This commit is contained in:
2025-12-08 05:42:52 +01:00
parent 80d8b7aa87
commit 5d483b0df5
32 changed files with 9837 additions and 579 deletions

View File

@@ -71,6 +71,9 @@ async def submit_benchmark(
# RAM
ram_total_mb=hw.ram.total_mb if hw.ram else None,
ram_used_mb=hw.ram.used_mb if hw.ram else None, # NEW
ram_free_mb=hw.ram.free_mb if hw.ram else None, # NEW
ram_shared_mb=hw.ram.shared_mb if hw.ram else None, # NEW
ram_slots_total=hw.ram.slots_total if hw.ram else None,
ram_slots_used=hw.ram.slots_used if hw.ram else None,
ram_ecc=hw.ram.ecc if hw.ram else None,
@@ -128,6 +131,16 @@ async def submit_benchmark(
if results.global_score is not None:
global_score = results.global_score
# Extract network results for easier frontend access
network_results = None
if results.network:
network_results = {
"upload_mbps": results.network.upload_mbps if hasattr(results.network, 'upload_mbps') else None,
"download_mbps": results.network.download_mbps if hasattr(results.network, 'download_mbps') else None,
"ping_ms": results.network.ping_ms if hasattr(results.network, 'ping_ms') else None,
"score": results.network.score
}
benchmark = Benchmark(
device_id=device.id,
hardware_snapshot_id=snapshot.id,
@@ -141,7 +154,8 @@ async def submit_benchmark(
network_score=results.network.score if results.network else None,
gpu_score=results.gpu.score if results.gpu else None,
details_json=json.dumps(results.dict())
details_json=json.dumps(results.dict()),
network_results_json=json.dumps(network_results) if network_results else None
)
db.add(benchmark)

View File

@@ -10,5 +10,6 @@ Base = declarative_base()
from app.models.device import Device # noqa
from app.models.hardware_snapshot import HardwareSnapshot # noqa
from app.models.benchmark import Benchmark # noqa
from app.models.disk_smart import DiskSMART # noqa
from app.models.manufacturer_link import ManufacturerLink # noqa
from app.models.document import Document # noqa

View File

@@ -30,6 +30,7 @@ class Benchmark(Base):
# Details
details_json = Column(Text, nullable=False) # JSON object with all raw results
network_results_json = Column(Text, nullable=True) # Network benchmark details (iperf3)
notes = Column(Text, nullable=True)
# Relationships

View File

@@ -0,0 +1,48 @@
"""
Linux BenchTools - Disk SMART Data Model
"""
from sqlalchemy import Column, Integer, String, Float, DateTime, ForeignKey
from sqlalchemy.orm import relationship
from datetime import datetime
from app.db.base import Base
class DiskSMART(Base):
"""
SMART health and aging data for storage devices
"""
__tablename__ = "disk_smart_data"
id = Column(Integer, primary_key=True, index=True, autoincrement=True)
hardware_snapshot_id = Column(Integer, ForeignKey("hardware_snapshots.id"), nullable=False, index=True)
captured_at = Column(DateTime, nullable=False, default=datetime.utcnow)
# Disk identification
device_name = Column(String(50), nullable=False) # e.g., "sda", "nvme0n1"
model = Column(String(255), nullable=True)
serial_number = Column(String(100), nullable=True)
size_gb = Column(Float, nullable=True)
disk_type = Column(String(20), nullable=True) # "ssd" or "hdd"
interface = Column(String(50), nullable=True) # "sata", "nvme", "usb"
# SMART Health Status
health_status = Column(String(20), nullable=True) # "PASSED", "FAILED", or null
temperature_celsius = Column(Integer, nullable=True)
# Aging indicators
power_on_hours = Column(Integer, nullable=True)
power_cycle_count = Column(Integer, nullable=True)
reallocated_sectors = Column(Integer, nullable=True) # Critical: bad sectors
pending_sectors = Column(Integer, nullable=True) # Very critical: imminent failure
udma_crc_errors = Column(Integer, nullable=True) # Cable/interface issues
# SSD-specific
wear_leveling_count = Column(Integer, nullable=True) # 0-100 (higher is better)
total_lbas_written = Column(Float, nullable=True) # Total data written
# Relationship
hardware_snapshot = relationship("HardwareSnapshot", back_populates="disk_smart_data")
def __repr__(self):
return f"<DiskSMART(id={self.id}, device='{self.device_name}', health='{self.health_status}')>"

View File

@@ -34,6 +34,9 @@ class HardwareSnapshot(Base):
# RAM
ram_total_mb = Column(Integer, nullable=True)
ram_used_mb = Column(Integer, nullable=True) # NEW: RAM utilisée
ram_free_mb = Column(Integer, nullable=True) # NEW: RAM libre
ram_shared_mb = Column(Integer, nullable=True) # NEW: RAM partagée (tmpfs/vidéo)
ram_slots_total = Column(Integer, nullable=True)
ram_slots_used = Column(Integer, nullable=True)
ram_ecc = Column(Boolean, nullable=True)
@@ -74,6 +77,7 @@ class HardwareSnapshot(Base):
# Relationships
device = relationship("Device", back_populates="hardware_snapshots")
benchmarks = relationship("Benchmark", back_populates="hardware_snapshot")
disk_smart_data = relationship("DiskSMART", back_populates="hardware_snapshot", cascade="all, delete-orphan")
def __repr__(self):
return f"<HardwareSnapshot(id={self.id}, device_id={self.device_id}, captured_at='{self.captured_at}')>"

View File

@@ -35,6 +35,9 @@ class RAMSlot(BaseModel):
class RAMInfo(BaseModel):
"""RAM information schema"""
total_mb: int
used_mb: Optional[int] = None # NEW
free_mb: Optional[int] = None # NEW
shared_mb: Optional[int] = None # NEW
slots_total: Optional[int] = None
slots_used: Optional[int] = None
ecc: Optional[bool] = None
@@ -56,7 +59,7 @@ class StorageDevice(BaseModel):
name: str
type: Optional[str] = None
interface: Optional[str] = None
capacity_gb: Optional[int] = None
capacity_gb: Optional[float] = None # Changed from int to float
vendor: Optional[str] = None
model: Optional[str] = None
smart_health: Optional[str] = None

75
backend/apply_migration.py Executable file
View File

@@ -0,0 +1,75 @@
#!/usr/bin/env python3
"""
Apply SQL migration to existing database
Usage: python apply_migration.py
"""
import sqlite3
import os
# Database path
DB_PATH = os.path.join(os.path.dirname(__file__), "data", "data.db")
MIGRATION_PATH = os.path.join(os.path.dirname(__file__), "migrations", "001_add_ram_stats_and_smart.sql")
def apply_migration():
"""Apply the SQL migration"""
if not os.path.exists(DB_PATH):
print(f"❌ Database not found at {DB_PATH}")
print(" The database will be created automatically on first run.")
return
if not os.path.exists(MIGRATION_PATH):
print(f"❌ Migration file not found at {MIGRATION_PATH}")
return
print(f"📂 Database: {DB_PATH}")
print(f"📄 Migration: {MIGRATION_PATH}")
print()
# Read migration SQL
with open(MIGRATION_PATH, 'r') as f:
migration_sql = f.read()
# Connect to database
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
try:
# Check if columns already exist
cursor.execute("PRAGMA table_info(hardware_snapshots)")
columns = [row[1] for row in cursor.fetchall()]
if 'ram_used_mb' in columns:
print("⚠️ Migration already applied (ram_used_mb column exists)")
# Check if disk_smart_data table exists
cursor.execute("SELECT name FROM sqlite_master WHERE type='table' AND name='disk_smart_data'")
if cursor.fetchone():
print("⚠️ disk_smart_data table already exists")
print("✅ Database is up to date")
return
# Apply migration
print("🔄 Applying migration...")
cursor.executescript(migration_sql)
conn.commit()
print("✅ Migration applied successfully!")
print()
print("New columns added to hardware_snapshots:")
print(" - ram_used_mb")
print(" - ram_free_mb")
print(" - ram_shared_mb")
print()
print("New table created:")
print(" - disk_smart_data")
except sqlite3.Error as e:
print(f"❌ Error applying migration: {e}")
conn.rollback()
finally:
conn.close()
if __name__ == "__main__":
apply_migration()

View File

@@ -0,0 +1,66 @@
#!/usr/bin/env python3
"""
Apply SQL migration 002 to existing database
Migration 002: Add network_results_json column to benchmarks table
Usage: python apply_migration_002.py
"""
import sqlite3
import os
# Database path
DB_PATH = os.path.join(os.path.dirname(__file__), "data", "data.db")
MIGRATION_PATH = os.path.join(os.path.dirname(__file__), "migrations", "002_add_network_results.sql")
def apply_migration():
"""Apply the SQL migration 002"""
if not os.path.exists(DB_PATH):
print(f"❌ Database not found at {DB_PATH}")
print(" The database will be created automatically on first run.")
return
if not os.path.exists(MIGRATION_PATH):
print(f"❌ Migration file not found at {MIGRATION_PATH}")
return
print(f"📂 Database: {DB_PATH}")
print(f"📄 Migration: {MIGRATION_PATH}")
print()
# Read migration SQL
with open(MIGRATION_PATH, 'r') as f:
migration_sql = f.read()
# Connect to database
conn = sqlite3.connect(DB_PATH)
cursor = conn.cursor()
try:
# Check if column already exists
cursor.execute("PRAGMA table_info(benchmarks)")
columns = [row[1] for row in cursor.fetchall()]
if 'network_results_json' in columns:
print("⚠️ Migration 002 already applied (network_results_json column exists)")
print("✅ Database is up to date")
return
# Apply migration
print("🔄 Applying migration 002...")
cursor.executescript(migration_sql)
conn.commit()
print("✅ Migration 002 applied successfully!")
print()
print("New column added to benchmarks:")
print(" - network_results_json")
except sqlite3.Error as e:
print(f"❌ Error applying migration: {e}")
conn.rollback()
finally:
conn.close()
if __name__ == "__main__":
apply_migration()

View File

@@ -0,0 +1,43 @@
-- Migration 001: Add RAM statistics and SMART data table
-- Date: 2025-12-07
-- Description: Adds used_mb, free_mb, shared_mb to hardware_snapshots and creates disk_smart_data table
-- Add new RAM columns to hardware_snapshots
ALTER TABLE hardware_snapshots ADD COLUMN ram_used_mb INTEGER;
ALTER TABLE hardware_snapshots ADD COLUMN ram_free_mb INTEGER;
ALTER TABLE hardware_snapshots ADD COLUMN ram_shared_mb INTEGER;
-- Create disk_smart_data table
CREATE TABLE IF NOT EXISTS disk_smart_data (
id INTEGER PRIMARY KEY AUTOINCREMENT,
hardware_snapshot_id INTEGER NOT NULL,
captured_at TIMESTAMP NOT NULL DEFAULT CURRENT_TIMESTAMP,
-- Disk identification
device_name VARCHAR(50) NOT NULL,
model VARCHAR(255),
serial_number VARCHAR(100),
size_gb REAL,
disk_type VARCHAR(20), -- 'ssd' or 'hdd'
interface VARCHAR(50), -- 'sata', 'nvme', 'usb'
-- SMART Health Status
health_status VARCHAR(20), -- 'PASSED', 'FAILED', or NULL
temperature_celsius INTEGER,
-- Aging indicators
power_on_hours INTEGER,
power_cycle_count INTEGER,
reallocated_sectors INTEGER, -- Critical: bad sectors
pending_sectors INTEGER, -- Very critical: imminent failure
udma_crc_errors INTEGER, -- Cable/interface issues
-- SSD-specific
wear_leveling_count INTEGER, -- 0-100 (higher is better)
total_lbas_written REAL, -- Total data written
FOREIGN KEY (hardware_snapshot_id) REFERENCES hardware_snapshots(id) ON DELETE CASCADE
);
CREATE INDEX IF NOT EXISTS idx_disk_smart_hardware_snapshot ON disk_smart_data(hardware_snapshot_id);
CREATE INDEX IF NOT EXISTS idx_disk_smart_device ON disk_smart_data(device_name);

View File

@@ -0,0 +1,4 @@
-- Migration 002: Add network_results_json column to benchmarks table
-- Date: 2025-12-07
ALTER TABLE benchmarks ADD COLUMN network_results_json TEXT;