1
This commit is contained in:
0
backend/Dockerfile
Normal file → Executable file
0
backend/Dockerfile
Normal file → Executable file
0
backend/README.md
Normal file → Executable file
0
backend/README.md
Normal file → Executable file
@@ -9,11 +9,24 @@ from datetime import datetime
|
||||
|
||||
from app.db.session import get_db
|
||||
from app.core.security import verify_token
|
||||
from app.schemas.benchmark import BenchmarkPayload, BenchmarkResponse, BenchmarkDetail, BenchmarkSummary
|
||||
from app.schemas.benchmark import (
|
||||
BenchmarkPayload,
|
||||
BenchmarkResponse,
|
||||
BenchmarkDetail,
|
||||
BenchmarkSummary,
|
||||
BenchmarkUpdate,
|
||||
)
|
||||
from app.models.device import Device
|
||||
from app.models.hardware_snapshot import HardwareSnapshot
|
||||
from app.models.benchmark import Benchmark
|
||||
from app.utils.scoring import calculate_global_score
|
||||
from app.utils.scoring import (
|
||||
calculate_global_score,
|
||||
calculate_cpu_score,
|
||||
calculate_memory_score,
|
||||
calculate_disk_score,
|
||||
calculate_network_score,
|
||||
calculate_gpu_score
|
||||
)
|
||||
|
||||
router = APIRouter()
|
||||
|
||||
@@ -91,7 +104,7 @@ async def submit_benchmark(
|
||||
snapshot.ram_slots_total = hw.ram.slots_total if hw.ram else None
|
||||
snapshot.ram_slots_used = hw.ram.slots_used if hw.ram else None
|
||||
snapshot.ram_ecc = hw.ram.ecc if hw.ram else None
|
||||
snapshot.ram_layout_json = json.dumps([slot.dict() for slot in hw.ram.layout]) if hw.ram and hw.ram.layout else None
|
||||
snapshot.ram_layout_json = json.dumps([slot.model_dump() for slot in hw.ram.layout]) if hw.ram and hw.ram.layout else None
|
||||
|
||||
# GPU
|
||||
snapshot.gpu_summary = f"{hw.gpu.vendor} {hw.gpu.model}" if hw.gpu and hw.gpu.model else None
|
||||
@@ -104,11 +117,12 @@ async def submit_benchmark(
|
||||
|
||||
# Storage
|
||||
snapshot.storage_summary = f"{len(hw.storage.devices)} device(s)" if hw.storage and hw.storage.devices else None
|
||||
snapshot.storage_devices_json = json.dumps([d.dict() for d in hw.storage.devices]) if hw.storage and hw.storage.devices else None
|
||||
snapshot.partitions_json = json.dumps([p.dict() for p in hw.storage.partitions]) if hw.storage and hw.storage.partitions else None
|
||||
snapshot.storage_devices_json = json.dumps([d.model_dump() for d in hw.storage.devices]) if hw.storage and hw.storage.devices else None
|
||||
snapshot.partitions_json = json.dumps([p.model_dump() for p in hw.storage.partitions]) if hw.storage and hw.storage.partitions else None
|
||||
|
||||
# Network
|
||||
snapshot.network_interfaces_json = json.dumps([i.dict() for i in hw.network.interfaces]) if hw.network and hw.network.interfaces else None
|
||||
snapshot.network_interfaces_json = json.dumps([i.model_dump() for i in hw.network.interfaces]) if hw.network and hw.network.interfaces else None
|
||||
snapshot.network_shares_json = json.dumps([share.model_dump() for share in hw.network_shares]) if hw.network_shares else None
|
||||
|
||||
# OS / Motherboard
|
||||
snapshot.os_name = hw.os.name if hw.os else None
|
||||
@@ -116,15 +130,29 @@ async def submit_benchmark(
|
||||
snapshot.kernel_version = hw.os.kernel_version if hw.os else None
|
||||
snapshot.architecture = hw.os.architecture if hw.os else None
|
||||
snapshot.virtualization_type = hw.os.virtualization_type if hw.os else None
|
||||
snapshot.screen_resolution = hw.os.screen_resolution if hw.os else None
|
||||
snapshot.display_server = hw.os.display_server if hw.os else None
|
||||
snapshot.session_type = hw.os.session_type if hw.os else None
|
||||
snapshot.last_boot_time = hw.os.last_boot_time if hw.os else None
|
||||
snapshot.uptime_seconds = hw.os.uptime_seconds if hw.os else None
|
||||
snapshot.battery_percentage = hw.os.battery_percentage if hw.os else None
|
||||
snapshot.battery_status = hw.os.battery_status if hw.os else None
|
||||
snapshot.battery_health = hw.os.battery_health if hw.os else None
|
||||
snapshot.hostname = hw.os.hostname if hw.os else None
|
||||
snapshot.desktop_environment = hw.os.desktop_environment if hw.os else None
|
||||
snapshot.motherboard_vendor = hw.motherboard.vendor if hw.motherboard else None
|
||||
snapshot.motherboard_model = hw.motherboard.model if hw.motherboard else None
|
||||
snapshot.bios_vendor = hw.motherboard.bios_vendor if hw.motherboard and hasattr(hw.motherboard, 'bios_vendor') else None
|
||||
snapshot.bios_version = hw.motherboard.bios_version if hw.motherboard else None
|
||||
snapshot.bios_date = hw.motherboard.bios_date if hw.motherboard else None
|
||||
|
||||
# PCI and USB Devices
|
||||
snapshot.pci_devices_json = json.dumps([d.model_dump(by_alias=True) for d in hw.pci_devices]) if hw.pci_devices else None
|
||||
snapshot.usb_devices_json = json.dumps([d.model_dump() for d in hw.usb_devices]) if hw.usb_devices else None
|
||||
|
||||
# Misc
|
||||
snapshot.sensors_json = json.dumps(hw.sensors.dict()) if hw.sensors else None
|
||||
snapshot.raw_info_json = json.dumps(hw.raw_info.dict()) if hw.raw_info else None
|
||||
snapshot.sensors_json = json.dumps(hw.sensors.model_dump()) if hw.sensors else None
|
||||
snapshot.raw_info_json = json.dumps(hw.raw_info.model_dump()) if hw.raw_info else None
|
||||
|
||||
# Add to session only if it's a new snapshot
|
||||
if not existing_snapshot:
|
||||
@@ -135,18 +163,61 @@ async def submit_benchmark(
|
||||
# 3. Create benchmark
|
||||
results = payload.results
|
||||
|
||||
# Calculate global score if not provided or recalculate
|
||||
global_score = calculate_global_score(
|
||||
cpu_score=results.cpu.score if results.cpu else None,
|
||||
memory_score=results.memory.score if results.memory else None,
|
||||
disk_score=results.disk.score if results.disk else None,
|
||||
network_score=results.network.score if results.network else None,
|
||||
gpu_score=results.gpu.score if results.gpu else None
|
||||
)
|
||||
# Recalculate scores from raw metrics using new formulas
|
||||
cpu_score = None
|
||||
cpu_score_single = None
|
||||
cpu_score_multi = None
|
||||
|
||||
# Use provided global_score if available and valid
|
||||
if results.global_score is not None:
|
||||
global_score = results.global_score
|
||||
if results.cpu:
|
||||
# Use scores from script if available (preferred), otherwise calculate
|
||||
if results.cpu.score_single is not None:
|
||||
cpu_score_single = results.cpu.score_single
|
||||
elif results.cpu.events_per_sec_single:
|
||||
cpu_score_single = calculate_cpu_score(results.cpu.events_per_sec_single)
|
||||
|
||||
if results.cpu.score_multi is not None:
|
||||
cpu_score_multi = results.cpu.score_multi
|
||||
elif results.cpu.events_per_sec_multi:
|
||||
cpu_score_multi = calculate_cpu_score(results.cpu.events_per_sec_multi)
|
||||
|
||||
# Use score from script if available, otherwise calculate
|
||||
if results.cpu.score is not None:
|
||||
cpu_score = results.cpu.score
|
||||
elif results.cpu.events_per_sec_multi:
|
||||
cpu_score = cpu_score_multi
|
||||
elif results.cpu.events_per_sec:
|
||||
cpu_score = calculate_cpu_score(results.cpu.events_per_sec)
|
||||
|
||||
memory_score = None
|
||||
if results.memory and results.memory.throughput_mib_s:
|
||||
memory_score = calculate_memory_score(results.memory.throughput_mib_s)
|
||||
|
||||
disk_score = None
|
||||
if results.disk:
|
||||
disk_score = calculate_disk_score(
|
||||
read_mb_s=results.disk.read_mb_s,
|
||||
write_mb_s=results.disk.write_mb_s
|
||||
)
|
||||
|
||||
network_score = None
|
||||
if results.network:
|
||||
network_score = calculate_network_score(
|
||||
upload_mbps=results.network.upload_mbps,
|
||||
download_mbps=results.network.download_mbps
|
||||
)
|
||||
|
||||
gpu_score = None
|
||||
if results.gpu and results.gpu.glmark2_score:
|
||||
gpu_score = calculate_gpu_score(results.gpu.glmark2_score)
|
||||
|
||||
# Calculate global score from recalculated component scores
|
||||
global_score = calculate_global_score(
|
||||
cpu_score=cpu_score,
|
||||
memory_score=memory_score,
|
||||
disk_score=disk_score,
|
||||
network_score=network_score,
|
||||
gpu_score=gpu_score
|
||||
)
|
||||
|
||||
# Extract network results for easier frontend access
|
||||
network_results = None
|
||||
@@ -155,7 +226,7 @@ async def submit_benchmark(
|
||||
"upload_mbps": results.network.upload_mbps if hasattr(results.network, 'upload_mbps') else None,
|
||||
"download_mbps": results.network.download_mbps if hasattr(results.network, 'download_mbps') else None,
|
||||
"ping_ms": results.network.ping_ms if hasattr(results.network, 'ping_ms') else None,
|
||||
"score": results.network.score
|
||||
"score": network_score
|
||||
}
|
||||
|
||||
benchmark = Benchmark(
|
||||
@@ -165,11 +236,13 @@ async def submit_benchmark(
|
||||
bench_script_version=payload.bench_script_version,
|
||||
|
||||
global_score=global_score,
|
||||
cpu_score=results.cpu.score if results.cpu else None,
|
||||
memory_score=results.memory.score if results.memory else None,
|
||||
disk_score=results.disk.score if results.disk else None,
|
||||
network_score=results.network.score if results.network else None,
|
||||
gpu_score=results.gpu.score if results.gpu else None,
|
||||
cpu_score=cpu_score,
|
||||
cpu_score_single=cpu_score_single,
|
||||
cpu_score_multi=cpu_score_multi,
|
||||
memory_score=memory_score,
|
||||
disk_score=disk_score,
|
||||
network_score=network_score,
|
||||
gpu_score=gpu_score,
|
||||
|
||||
details_json=json.dumps(results.dict()),
|
||||
network_results_json=json.dumps(network_results) if network_results else None
|
||||
@@ -210,9 +283,54 @@ async def get_benchmark(
|
||||
bench_script_version=benchmark.bench_script_version,
|
||||
global_score=benchmark.global_score,
|
||||
cpu_score=benchmark.cpu_score,
|
||||
cpu_score_single=benchmark.cpu_score_single,
|
||||
cpu_score_multi=benchmark.cpu_score_multi,
|
||||
memory_score=benchmark.memory_score,
|
||||
disk_score=benchmark.disk_score,
|
||||
network_score=benchmark.network_score,
|
||||
gpu_score=benchmark.gpu_score,
|
||||
details=json.loads(benchmark.details_json)
|
||||
details=json.loads(benchmark.details_json),
|
||||
notes=benchmark.notes
|
||||
)
|
||||
|
||||
|
||||
@router.patch("/benchmarks/{benchmark_id}", response_model=BenchmarkSummary)
|
||||
async def update_benchmark_entry(
|
||||
benchmark_id: int,
|
||||
payload: BenchmarkUpdate,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Update editable benchmark fields (currently only notes).
|
||||
"""
|
||||
benchmark = db.query(Benchmark).filter(Benchmark.id == benchmark_id).first()
|
||||
|
||||
if not benchmark:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Benchmark {benchmark_id} not found"
|
||||
)
|
||||
|
||||
update_data = payload.model_dump(exclude_unset=True)
|
||||
|
||||
if "notes" in update_data:
|
||||
benchmark.notes = update_data["notes"]
|
||||
|
||||
db.add(benchmark)
|
||||
db.commit()
|
||||
db.refresh(benchmark)
|
||||
|
||||
return BenchmarkSummary(
|
||||
id=benchmark.id,
|
||||
run_at=benchmark.run_at.isoformat(),
|
||||
global_score=benchmark.global_score,
|
||||
cpu_score=benchmark.cpu_score,
|
||||
cpu_score_single=benchmark.cpu_score_single,
|
||||
cpu_score_multi=benchmark.cpu_score_multi,
|
||||
memory_score=benchmark.memory_score,
|
||||
disk_score=benchmark.disk_score,
|
||||
network_score=benchmark.network_score,
|
||||
gpu_score=benchmark.gpu_score,
|
||||
bench_script_version=benchmark.bench_script_version,
|
||||
notes=benchmark.notes
|
||||
)
|
||||
|
||||
@@ -3,7 +3,7 @@ Linux BenchTools - Devices API
|
||||
"""
|
||||
|
||||
import json
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query
|
||||
from fastapi import APIRouter, Depends, HTTPException, status, Query, Response
|
||||
from sqlalchemy.orm import Session
|
||||
from typing import List
|
||||
|
||||
@@ -68,7 +68,8 @@ async def get_devices(
|
||||
disk_score=last_bench.disk_score,
|
||||
network_score=last_bench.network_score,
|
||||
gpu_score=last_bench.gpu_score,
|
||||
bench_script_version=last_bench.bench_script_version
|
||||
bench_script_version=last_bench.bench_script_version,
|
||||
notes=last_bench.notes
|
||||
)
|
||||
|
||||
items.append(DeviceSummary(
|
||||
@@ -80,6 +81,9 @@ async def get_devices(
|
||||
location=device.location,
|
||||
owner=device.owner,
|
||||
tags=device.tags,
|
||||
purchase_store=device.purchase_store,
|
||||
purchase_date=device.purchase_date,
|
||||
purchase_price=device.purchase_price,
|
||||
created_at=device.created_at.isoformat(),
|
||||
updated_at=device.updated_at.isoformat(),
|
||||
last_benchmark=last_bench_summary
|
||||
@@ -125,7 +129,8 @@ async def get_device(
|
||||
disk_score=last_bench.disk_score,
|
||||
network_score=last_bench.network_score,
|
||||
gpu_score=last_bench.gpu_score,
|
||||
bench_script_version=last_bench.bench_script_version
|
||||
bench_script_version=last_bench.bench_script_version,
|
||||
notes=last_bench.notes
|
||||
)
|
||||
|
||||
# Get last hardware snapshot
|
||||
@@ -146,20 +151,40 @@ async def get_device(
|
||||
cpu_base_freq_ghz=last_snapshot.cpu_base_freq_ghz,
|
||||
cpu_max_freq_ghz=last_snapshot.cpu_max_freq_ghz,
|
||||
ram_total_mb=last_snapshot.ram_total_mb,
|
||||
ram_used_mb=last_snapshot.ram_used_mb,
|
||||
ram_free_mb=last_snapshot.ram_free_mb,
|
||||
ram_shared_mb=last_snapshot.ram_shared_mb,
|
||||
ram_slots_total=last_snapshot.ram_slots_total,
|
||||
ram_slots_used=last_snapshot.ram_slots_used,
|
||||
gpu_summary=last_snapshot.gpu_summary,
|
||||
gpu_model=last_snapshot.gpu_model,
|
||||
storage_summary=last_snapshot.storage_summary,
|
||||
storage_devices_json=last_snapshot.storage_devices_json,
|
||||
partitions_json=last_snapshot.partitions_json,
|
||||
network_interfaces_json=last_snapshot.network_interfaces_json,
|
||||
network_shares_json=last_snapshot.network_shares_json,
|
||||
os_name=last_snapshot.os_name,
|
||||
os_version=last_snapshot.os_version,
|
||||
kernel_version=last_snapshot.kernel_version,
|
||||
architecture=last_snapshot.architecture,
|
||||
virtualization_type=last_snapshot.virtualization_type,
|
||||
screen_resolution=last_snapshot.screen_resolution,
|
||||
display_server=last_snapshot.display_server,
|
||||
session_type=last_snapshot.session_type,
|
||||
last_boot_time=last_snapshot.last_boot_time,
|
||||
uptime_seconds=last_snapshot.uptime_seconds,
|
||||
battery_percentage=last_snapshot.battery_percentage,
|
||||
battery_status=last_snapshot.battery_status,
|
||||
battery_health=last_snapshot.battery_health,
|
||||
hostname=last_snapshot.hostname,
|
||||
desktop_environment=last_snapshot.desktop_environment,
|
||||
motherboard_vendor=last_snapshot.motherboard_vendor,
|
||||
motherboard_model=last_snapshot.motherboard_model
|
||||
motherboard_model=last_snapshot.motherboard_model,
|
||||
bios_vendor=last_snapshot.bios_vendor,
|
||||
bios_version=last_snapshot.bios_version,
|
||||
bios_date=last_snapshot.bios_date,
|
||||
pci_devices_json=last_snapshot.pci_devices_json,
|
||||
usb_devices_json=last_snapshot.usb_devices_json
|
||||
)
|
||||
|
||||
# Get documents for this device
|
||||
@@ -189,6 +214,9 @@ async def get_device(
|
||||
location=device.location,
|
||||
owner=device.owner,
|
||||
tags=device.tags,
|
||||
purchase_store=device.purchase_store,
|
||||
purchase_date=device.purchase_date,
|
||||
purchase_price=device.purchase_price,
|
||||
created_at=device.created_at.isoformat(),
|
||||
updated_at=device.updated_at.isoformat(),
|
||||
last_benchmark=last_bench_summary,
|
||||
@@ -232,7 +260,8 @@ async def get_device_benchmarks(
|
||||
disk_score=b.disk_score,
|
||||
network_score=b.network_score,
|
||||
gpu_score=b.gpu_score,
|
||||
bench_script_version=b.bench_script_version
|
||||
bench_script_version=b.bench_script_version,
|
||||
notes=b.notes
|
||||
)
|
||||
for b in benchmarks
|
||||
]
|
||||
@@ -276,3 +305,25 @@ async def update_device(
|
||||
|
||||
# Return updated device (reuse get_device logic)
|
||||
return await get_device(device_id, db)
|
||||
|
||||
|
||||
@router.delete("/devices/{device_id}", status_code=status.HTTP_204_NO_CONTENT)
|
||||
async def delete_device(
|
||||
device_id: int,
|
||||
db: Session = Depends(get_db)
|
||||
):
|
||||
"""
|
||||
Delete a device and all related data
|
||||
"""
|
||||
device = db.query(Device).filter(Device.id == device_id).first()
|
||||
|
||||
if not device:
|
||||
raise HTTPException(
|
||||
status_code=status.HTTP_404_NOT_FOUND,
|
||||
detail=f"Device {device_id} not found"
|
||||
)
|
||||
|
||||
db.delete(device)
|
||||
db.commit()
|
||||
|
||||
return Response(status_code=status.HTTP_204_NO_CONTENT)
|
||||
|
||||
@@ -94,6 +94,16 @@ async def get_stats(db: Session = Depends(get_db)):
|
||||
}
|
||||
|
||||
|
||||
# Config endpoint (for frontend to get API token and server info)
|
||||
@app.get(f"{settings.API_PREFIX}/config")
|
||||
async def get_config():
|
||||
"""Get frontend configuration (API token, server URLs, etc.)"""
|
||||
return {
|
||||
"api_token": settings.API_TOKEN,
|
||||
"iperf_server": "10.0.1.97"
|
||||
}
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import uvicorn
|
||||
uvicorn.run("app.main:app", host="0.0.0.0", port=8007, reload=True)
|
||||
|
||||
@@ -23,6 +23,8 @@ class Benchmark(Base):
|
||||
# Scores
|
||||
global_score = Column(Float, nullable=False)
|
||||
cpu_score = Column(Float, nullable=True)
|
||||
cpu_score_single = Column(Float, nullable=True) # Monocore CPU score
|
||||
cpu_score_multi = Column(Float, nullable=True) # Multicore CPU score
|
||||
memory_score = Column(Float, nullable=True)
|
||||
disk_score = Column(Float, nullable=True)
|
||||
network_score = Column(Float, nullable=True)
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
Linux BenchTools - Device Model
|
||||
"""
|
||||
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Text
|
||||
from sqlalchemy import Column, Integer, String, DateTime, Text, Float
|
||||
from sqlalchemy.orm import relationship
|
||||
from datetime import datetime
|
||||
from app.db.base import Base
|
||||
@@ -22,6 +22,9 @@ class Device(Base):
|
||||
location = Column(String(255), nullable=True)
|
||||
owner = Column(String(100), nullable=True)
|
||||
tags = Column(Text, nullable=True) # JSON or comma-separated
|
||||
purchase_store = Column(String(255), nullable=True)
|
||||
purchase_date = Column(String(50), nullable=True)
|
||||
purchase_price = Column(Float, nullable=True)
|
||||
created_at = Column(DateTime, nullable=False, default=datetime.utcnow)
|
||||
updated_at = Column(DateTime, nullable=False, default=datetime.utcnow, onupdate=datetime.utcnow)
|
||||
|
||||
|
||||
@@ -58,6 +58,7 @@ class HardwareSnapshot(Base):
|
||||
|
||||
# Network
|
||||
network_interfaces_json = Column(Text, nullable=True) # JSON array
|
||||
network_shares_json = Column(Text, nullable=True) # JSON array
|
||||
|
||||
# OS / Motherboard
|
||||
os_name = Column(String(100), nullable=True)
|
||||
@@ -65,11 +66,25 @@ class HardwareSnapshot(Base):
|
||||
kernel_version = Column(String(100), nullable=True)
|
||||
architecture = Column(String(50), nullable=True)
|
||||
virtualization_type = Column(String(50), nullable=True)
|
||||
screen_resolution = Column(String(50), nullable=True)
|
||||
display_server = Column(String(50), nullable=True)
|
||||
session_type = Column(String(50), nullable=True)
|
||||
last_boot_time = Column(String(50), nullable=True)
|
||||
uptime_seconds = Column(Integer, nullable=True)
|
||||
battery_percentage = Column(Float, nullable=True)
|
||||
battery_status = Column(String(50), nullable=True)
|
||||
battery_health = Column(String(50), nullable=True)
|
||||
motherboard_vendor = Column(String(100), nullable=True)
|
||||
motherboard_model = Column(String(255), nullable=True)
|
||||
bios_vendor = Column(String(100), nullable=True)
|
||||
bios_version = Column(String(100), nullable=True)
|
||||
bios_date = Column(String(50), nullable=True)
|
||||
hostname = Column(String(255), nullable=True)
|
||||
desktop_environment = Column(String(100), nullable=True)
|
||||
|
||||
# PCI and USB Devices
|
||||
pci_devices_json = Column(Text, nullable=True) # JSON array
|
||||
usb_devices_json = Column(Text, nullable=True) # JSON array
|
||||
|
||||
# Misc
|
||||
sensors_json = Column(Text, nullable=True) # JSON object
|
||||
|
||||
@@ -10,8 +10,12 @@ from app.schemas.hardware import HardwareData
|
||||
class CPUResults(BaseModel):
|
||||
"""CPU benchmark results"""
|
||||
events_per_sec: Optional[float] = Field(None, ge=0)
|
||||
events_per_sec_single: Optional[float] = Field(None, ge=0) # Monocore
|
||||
events_per_sec_multi: Optional[float] = Field(None, ge=0) # Multicore
|
||||
duration_s: Optional[float] = Field(None, ge=0)
|
||||
score: Optional[float] = Field(None, ge=0, le=10000)
|
||||
score_single: Optional[float] = Field(None, ge=0, le=10000) # Monocore score
|
||||
score_multi: Optional[float] = Field(None, ge=0, le=10000) # Multicore score
|
||||
|
||||
|
||||
class MemoryResults(BaseModel):
|
||||
@@ -82,12 +86,15 @@ class BenchmarkDetail(BaseModel):
|
||||
|
||||
global_score: float
|
||||
cpu_score: Optional[float] = None
|
||||
cpu_score_single: Optional[float] = None
|
||||
cpu_score_multi: Optional[float] = None
|
||||
memory_score: Optional[float] = None
|
||||
disk_score: Optional[float] = None
|
||||
network_score: Optional[float] = None
|
||||
gpu_score: Optional[float] = None
|
||||
|
||||
details: dict # details_json parsed
|
||||
notes: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
@@ -99,11 +106,19 @@ class BenchmarkSummary(BaseModel):
|
||||
run_at: str
|
||||
global_score: float
|
||||
cpu_score: Optional[float] = None
|
||||
cpu_score_single: Optional[float] = None
|
||||
cpu_score_multi: Optional[float] = None
|
||||
memory_score: Optional[float] = None
|
||||
disk_score: Optional[float] = None
|
||||
network_score: Optional[float] = None
|
||||
gpu_score: Optional[float] = None
|
||||
bench_script_version: Optional[str] = None
|
||||
notes: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
|
||||
class BenchmarkUpdate(BaseModel):
|
||||
"""Fields allowed when updating a benchmark"""
|
||||
notes: Optional[str] = None
|
||||
|
||||
@@ -18,6 +18,9 @@ class DeviceBase(BaseModel):
|
||||
location: Optional[str] = None
|
||||
owner: Optional[str] = None
|
||||
tags: Optional[str] = None
|
||||
purchase_store: Optional[str] = None
|
||||
purchase_date: Optional[str] = None
|
||||
purchase_price: Optional[float] = None
|
||||
|
||||
|
||||
class DeviceCreate(DeviceBase):
|
||||
@@ -34,6 +37,9 @@ class DeviceUpdate(BaseModel):
|
||||
location: Optional[str] = None
|
||||
owner: Optional[str] = None
|
||||
tags: Optional[str] = None
|
||||
purchase_store: Optional[str] = None
|
||||
purchase_date: Optional[str] = None
|
||||
purchase_price: Optional[float] = None
|
||||
|
||||
|
||||
class DeviceSummary(DeviceBase):
|
||||
|
||||
@@ -2,7 +2,7 @@
|
||||
Linux BenchTools - Hardware Schemas
|
||||
"""
|
||||
|
||||
from pydantic import BaseModel
|
||||
from pydantic import BaseModel, ConfigDict, Field
|
||||
from typing import Optional, List
|
||||
|
||||
|
||||
@@ -73,6 +73,7 @@ class Partition(BaseModel):
|
||||
fs_type: Optional[str] = None
|
||||
used_gb: Optional[float] = None
|
||||
total_gb: Optional[float] = None
|
||||
free_gb: Optional[float] = None
|
||||
|
||||
|
||||
class StorageInfo(BaseModel):
|
||||
@@ -89,6 +90,7 @@ class NetworkInterface(BaseModel):
|
||||
ip: Optional[str] = None
|
||||
speed_mbps: Optional[int] = None
|
||||
driver: Optional[str] = None
|
||||
ssid: Optional[str] = None
|
||||
wake_on_lan: Optional[bool] = None
|
||||
|
||||
|
||||
@@ -97,6 +99,18 @@ class NetworkInfo(BaseModel):
|
||||
interfaces: Optional[List[NetworkInterface]] = None
|
||||
|
||||
|
||||
class NetworkShare(BaseModel):
|
||||
"""Mounted network share information"""
|
||||
protocol: Optional[str] = None
|
||||
source: Optional[str] = None
|
||||
mount_point: Optional[str] = None
|
||||
fs_type: Optional[str] = None
|
||||
options: Optional[str] = None
|
||||
total_gb: Optional[float] = None
|
||||
used_gb: Optional[float] = None
|
||||
free_gb: Optional[float] = None
|
||||
|
||||
|
||||
class MotherboardInfo(BaseModel):
|
||||
"""Motherboard information schema"""
|
||||
vendor: Optional[str] = None
|
||||
@@ -113,6 +127,34 @@ class OSInfo(BaseModel):
|
||||
kernel_version: Optional[str] = None
|
||||
architecture: Optional[str] = None
|
||||
virtualization_type: Optional[str] = None
|
||||
hostname: Optional[str] = None
|
||||
desktop_environment: Optional[str] = None
|
||||
session_type: Optional[str] = None
|
||||
display_server: Optional[str] = None
|
||||
screen_resolution: Optional[str] = None
|
||||
last_boot_time: Optional[str] = None
|
||||
uptime_seconds: Optional[int] = None
|
||||
battery_percentage: Optional[float] = None
|
||||
battery_status: Optional[str] = None
|
||||
battery_health: Optional[str] = None
|
||||
|
||||
|
||||
class PCIDevice(BaseModel):
|
||||
"""PCI device information"""
|
||||
model_config = ConfigDict(populate_by_name=True)
|
||||
slot: str
|
||||
class_: Optional[str] = Field(default=None, alias="class")
|
||||
vendor: Optional[str] = None
|
||||
device: Optional[str] = None
|
||||
|
||||
|
||||
class USBDevice(BaseModel):
|
||||
"""USB device information"""
|
||||
bus: str
|
||||
device: str
|
||||
vendor_id: Optional[str] = None
|
||||
product_id: Optional[str] = None
|
||||
name: Optional[str] = None
|
||||
|
||||
|
||||
class SensorsInfo(BaseModel):
|
||||
@@ -135,10 +177,13 @@ class HardwareData(BaseModel):
|
||||
gpu: Optional[GPUInfo] = None
|
||||
storage: Optional[StorageInfo] = None
|
||||
network: Optional[NetworkInfo] = None
|
||||
network_shares: Optional[List[NetworkShare]] = None
|
||||
motherboard: Optional[MotherboardInfo] = None
|
||||
os: Optional[OSInfo] = None
|
||||
sensors: Optional[SensorsInfo] = None
|
||||
raw_info: Optional[RawInfo] = None
|
||||
pci_devices: Optional[List[PCIDevice]] = None
|
||||
usb_devices: Optional[List[USBDevice]] = None
|
||||
|
||||
|
||||
class HardwareSnapshotResponse(BaseModel):
|
||||
@@ -157,6 +202,9 @@ class HardwareSnapshotResponse(BaseModel):
|
||||
|
||||
# RAM
|
||||
ram_total_mb: Optional[int] = None
|
||||
ram_used_mb: Optional[int] = None
|
||||
ram_free_mb: Optional[int] = None
|
||||
ram_shared_mb: Optional[int] = None
|
||||
ram_slots_total: Optional[int] = None
|
||||
ram_slots_used: Optional[int] = None
|
||||
|
||||
@@ -167,18 +215,37 @@ class HardwareSnapshotResponse(BaseModel):
|
||||
# Storage
|
||||
storage_summary: Optional[str] = None
|
||||
storage_devices_json: Optional[str] = None
|
||||
partitions_json: Optional[str] = None
|
||||
|
||||
# Network
|
||||
network_interfaces_json: Optional[str] = None
|
||||
network_shares_json: Optional[str] = None
|
||||
|
||||
# OS / Motherboard
|
||||
# OS / Motherboard / BIOS
|
||||
os_name: Optional[str] = None
|
||||
os_version: Optional[str] = None
|
||||
kernel_version: Optional[str] = None
|
||||
architecture: Optional[str] = None
|
||||
virtualization_type: Optional[str] = None
|
||||
hostname: Optional[str] = None
|
||||
desktop_environment: Optional[str] = None
|
||||
screen_resolution: Optional[str] = None
|
||||
display_server: Optional[str] = None
|
||||
session_type: Optional[str] = None
|
||||
last_boot_time: Optional[str] = None
|
||||
uptime_seconds: Optional[int] = None
|
||||
battery_percentage: Optional[float] = None
|
||||
battery_status: Optional[str] = None
|
||||
battery_health: Optional[str] = None
|
||||
motherboard_vendor: Optional[str] = None
|
||||
motherboard_model: Optional[str] = None
|
||||
bios_vendor: Optional[str] = None
|
||||
bios_version: Optional[str] = None
|
||||
bios_date: Optional[str] = None
|
||||
|
||||
# PCI and USB Devices
|
||||
pci_devices_json: Optional[str] = None
|
||||
usb_devices_json: Optional[str] = None
|
||||
|
||||
class Config:
|
||||
from_attributes = True
|
||||
|
||||
@@ -1,10 +1,103 @@
|
||||
"""
|
||||
Linux BenchTools - Scoring Utilities
|
||||
|
||||
New normalized scoring formulas (0-100 scale):
|
||||
- CPU: events_per_second / 100
|
||||
- Memory: throughput_mib_s / 1000
|
||||
- Disk: (read_mb_s + write_mb_s) / 20
|
||||
- Network: (upload_mbps + download_mbps) / 20
|
||||
- GPU: glmark2_score / 50
|
||||
"""
|
||||
|
||||
from app.core.config import settings
|
||||
|
||||
|
||||
def calculate_cpu_score(events_per_second: float = None) -> float:
|
||||
"""
|
||||
Calculate CPU score from sysbench events per second.
|
||||
|
||||
Formula: events_per_second / 100
|
||||
Range: 0-100 (capped)
|
||||
|
||||
Example: 3409.87 events/s → 34.1 score
|
||||
"""
|
||||
if events_per_second is None or events_per_second <= 0:
|
||||
return 0.0
|
||||
|
||||
score = events_per_second / 100.0
|
||||
return min(100.0, max(0.0, score))
|
||||
|
||||
|
||||
def calculate_memory_score(throughput_mib_s: float = None) -> float:
|
||||
"""
|
||||
Calculate Memory score from sysbench throughput.
|
||||
|
||||
Formula: throughput_mib_s / 1000
|
||||
Range: 0-100 (capped)
|
||||
|
||||
Example: 13806.03 MiB/s → 13.8 score
|
||||
"""
|
||||
if throughput_mib_s is None or throughput_mib_s <= 0:
|
||||
return 0.0
|
||||
|
||||
score = throughput_mib_s / 1000.0
|
||||
return min(100.0, max(0.0, score))
|
||||
|
||||
|
||||
def calculate_disk_score(read_mb_s: float = None, write_mb_s: float = None) -> float:
|
||||
"""
|
||||
Calculate Disk score from fio read/write bandwidth.
|
||||
|
||||
Formula: (read_mb_s + write_mb_s) / 20
|
||||
Range: 0-100 (capped)
|
||||
|
||||
Example: (695 + 695) MB/s → 69.5 score
|
||||
"""
|
||||
if read_mb_s is None and write_mb_s is None:
|
||||
return 0.0
|
||||
|
||||
read = read_mb_s if read_mb_s is not None and read_mb_s > 0 else 0.0
|
||||
write = write_mb_s if write_mb_s is not None and write_mb_s > 0 else 0.0
|
||||
|
||||
score = (read + write) / 20.0
|
||||
return min(100.0, max(0.0, score))
|
||||
|
||||
|
||||
def calculate_network_score(upload_mbps: float = None, download_mbps: float = None) -> float:
|
||||
"""
|
||||
Calculate Network score from iperf3 upload/download speeds.
|
||||
|
||||
Formula: (upload_mbps + download_mbps) / 20
|
||||
Range: 0-100 (capped)
|
||||
|
||||
Example: (484.67 + 390.13) Mbps → 43.7 score
|
||||
"""
|
||||
if upload_mbps is None and download_mbps is None:
|
||||
return 0.0
|
||||
|
||||
upload = upload_mbps if upload_mbps is not None and upload_mbps > 0 else 0.0
|
||||
download = download_mbps if download_mbps is not None and download_mbps > 0 else 0.0
|
||||
|
||||
score = (upload + download) / 20.0
|
||||
return min(100.0, max(0.0, score))
|
||||
|
||||
|
||||
def calculate_gpu_score(glmark2_score: int = None) -> float:
|
||||
"""
|
||||
Calculate GPU score from glmark2 benchmark.
|
||||
|
||||
Formula: glmark2_score / 50
|
||||
Range: 0-100 (capped)
|
||||
|
||||
Example: 2500 glmark2 → 50.0 score
|
||||
"""
|
||||
if glmark2_score is None or glmark2_score <= 0:
|
||||
return 0.0
|
||||
|
||||
score = glmark2_score / 50.0
|
||||
return min(100.0, max(0.0, score))
|
||||
|
||||
|
||||
def calculate_global_score(
|
||||
cpu_score: float = None,
|
||||
memory_score: float = None,
|
||||
|
||||
0
backend/apply_migration.py
Executable file → Normal file
0
backend/apply_migration.py
Executable file → Normal file
115
backend/apply_migration_003.py
Normal file
115
backend/apply_migration_003.py
Normal file
@@ -0,0 +1,115 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Apply SQL migration 003 to existing database
|
||||
Migration 003: Add cpu_score_single and cpu_score_multi columns to benchmarks table
|
||||
Usage: python apply_migration_003.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
# Database path
|
||||
DB_PATH = os.path.join(os.path.dirname(__file__), "data", "data.db")
|
||||
MIGRATION_PATH = os.path.join(
|
||||
os.path.dirname(__file__), "migrations", "003_add_cpu_scores.sql"
|
||||
)
|
||||
|
||||
# (column_name, human description)
|
||||
COLUMNS_TO_ADD: List[Tuple[str, str]] = [
|
||||
("cpu_score_single", "Score CPU monocœur"),
|
||||
("cpu_score_multi", "Score CPU multicœur"),
|
||||
]
|
||||
|
||||
|
||||
def _load_statements() -> Dict[str, str]:
|
||||
"""Load SQL statements mapped by column name from the migration file."""
|
||||
with open(MIGRATION_PATH, "r", encoding="utf-8") as f:
|
||||
raw_sql = f.read()
|
||||
|
||||
# Remove comments and blank lines for easier parsing
|
||||
filtered_lines = []
|
||||
for line in raw_sql.splitlines():
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("--"):
|
||||
continue
|
||||
filtered_lines.append(line)
|
||||
|
||||
statements = {}
|
||||
for statement in "\n".join(filtered_lines).split(";"):
|
||||
stmt = statement.strip()
|
||||
if not stmt:
|
||||
continue
|
||||
for column_name, _ in COLUMNS_TO_ADD:
|
||||
if column_name in stmt:
|
||||
statements[column_name] = stmt
|
||||
break
|
||||
|
||||
return statements
|
||||
|
||||
|
||||
def apply_migration():
|
||||
"""Apply the SQL migration 003."""
|
||||
|
||||
if not os.path.exists(DB_PATH):
|
||||
print(f"❌ Database not found at {DB_PATH}")
|
||||
print(" The database will be created automatically on first run.")
|
||||
return
|
||||
|
||||
if not os.path.exists(MIGRATION_PATH):
|
||||
print(f"❌ Migration file not found at {MIGRATION_PATH}")
|
||||
return
|
||||
|
||||
print(f"📂 Database: {DB_PATH}")
|
||||
print(f"📄 Migration: {MIGRATION_PATH}")
|
||||
print()
|
||||
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("PRAGMA table_info(benchmarks)")
|
||||
existing_columns = {row[1] for row in cursor.fetchall()}
|
||||
|
||||
missing_columns = [
|
||||
col for col, _ in COLUMNS_TO_ADD if col not in existing_columns
|
||||
]
|
||||
if not missing_columns:
|
||||
print("⚠️ Migration 003 already applied (CPU score columns exist)")
|
||||
print("✅ Database is up to date")
|
||||
return
|
||||
|
||||
statements = _load_statements()
|
||||
|
||||
print("🔄 Applying migration 003...")
|
||||
for column_name, description in COLUMNS_TO_ADD:
|
||||
if column_name not in missing_columns:
|
||||
print(f"⏩ Column {column_name} already present, skipping")
|
||||
continue
|
||||
|
||||
statement = statements.get(column_name)
|
||||
if not statement:
|
||||
raise RuntimeError(
|
||||
f"No SQL statement found for column '{column_name}' in migration file"
|
||||
)
|
||||
|
||||
print(f"➕ Adding {description} ({column_name})...")
|
||||
cursor.execute(statement)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print("✅ Migration 003 applied successfully!")
|
||||
print("New columns added to benchmarks table:")
|
||||
for column_name, description in COLUMNS_TO_ADD:
|
||||
if column_name in missing_columns:
|
||||
print(f" - {column_name}: {description}")
|
||||
|
||||
except (sqlite3.Error, RuntimeError) as e:
|
||||
print(f"❌ Error applying migration: {e}")
|
||||
conn.rollback()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
apply_migration()
|
||||
107
backend/apply_migration_004.py
Normal file
107
backend/apply_migration_004.py
Normal file
@@ -0,0 +1,107 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Apply SQL migration 004 to existing database.
|
||||
Migration 004: Add hostname/desktop environment/PCI/USB columns to hardware_snapshots.
|
||||
Usage: python apply_migration_004.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
# Database path
|
||||
DB_PATH = os.path.join(os.path.dirname(__file__), "data", "data.db")
|
||||
MIGRATION_PATH = os.path.join(
|
||||
os.path.dirname(__file__), "migrations", "004_add_snapshot_details.sql"
|
||||
)
|
||||
|
||||
COLUMNS_TO_ADD: List[Tuple[str, str]] = [
|
||||
("hostname", "Nom d'hôte du snapshot"),
|
||||
("desktop_environment", "Environnement de bureau détecté"),
|
||||
("pci_devices_json", "Liste PCI en JSON"),
|
||||
("usb_devices_json", "Liste USB en JSON"),
|
||||
]
|
||||
|
||||
|
||||
def _load_statements() -> Dict[str, str]:
|
||||
"""Return ALTER TABLE statements indexed by column name."""
|
||||
with open(MIGRATION_PATH, "r", encoding="utf-8") as f:
|
||||
filtered = []
|
||||
for line in f:
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("--"):
|
||||
continue
|
||||
filtered.append(line.rstrip("\n"))
|
||||
|
||||
statements: Dict[str, str] = {}
|
||||
for statement in "\n".join(filtered).split(";"):
|
||||
stmt = statement.strip()
|
||||
if not stmt:
|
||||
continue
|
||||
for column, _ in COLUMNS_TO_ADD:
|
||||
if column in stmt:
|
||||
statements[column] = stmt
|
||||
break
|
||||
return statements
|
||||
|
||||
|
||||
def apply_migration():
|
||||
"""Apply the SQL migration 004."""
|
||||
if not os.path.exists(DB_PATH):
|
||||
print(f"❌ Database not found at {DB_PATH}")
|
||||
print(" The database will be created automatically on first run.")
|
||||
return
|
||||
|
||||
if not os.path.exists(MIGRATION_PATH):
|
||||
print(f"❌ Migration file not found at {MIGRATION_PATH}")
|
||||
return
|
||||
|
||||
print(f"📂 Database: {DB_PATH}")
|
||||
print(f"📄 Migration: {MIGRATION_PATH}")
|
||||
print()
|
||||
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("PRAGMA table_info(hardware_snapshots)")
|
||||
existing_columns = {row[1] for row in cursor.fetchall()}
|
||||
|
||||
missing = [col for col, _ in COLUMNS_TO_ADD if col not in existing_columns]
|
||||
if not missing:
|
||||
print("⚠️ Migration 004 already applied (columns exist)")
|
||||
print("✅ Database is up to date")
|
||||
return
|
||||
|
||||
statements = _load_statements()
|
||||
|
||||
print("🔄 Applying migration 004...")
|
||||
for column, description in COLUMNS_TO_ADD:
|
||||
if column not in missing:
|
||||
print(f"⏩ Column {column} already present, skipping")
|
||||
continue
|
||||
statement = statements.get(column)
|
||||
if not statement:
|
||||
raise RuntimeError(
|
||||
f"No SQL statement found for column '{column}' in migration file"
|
||||
)
|
||||
print(f"➕ Adding {description} ({column})...")
|
||||
cursor.execute(statement)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print("✅ Migration 004 applied successfully!")
|
||||
print("New columns added to hardware_snapshots:")
|
||||
for column, description in COLUMNS_TO_ADD:
|
||||
if column in missing:
|
||||
print(f" - {column}: {description}")
|
||||
|
||||
except (sqlite3.Error, RuntimeError) as exc:
|
||||
print(f"❌ Error applying migration: {exc}")
|
||||
conn.rollback()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
apply_migration()
|
||||
112
backend/apply_migration_005.py
Normal file
112
backend/apply_migration_005.py
Normal file
@@ -0,0 +1,112 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Apply SQL migration 005 to existing database.
|
||||
Migration 005: Add OS/display/battery metadata columns to hardware_snapshots.
|
||||
Usage: python apply_migration_005.py
|
||||
"""
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
from typing import Dict, List, Tuple
|
||||
|
||||
DB_PATH = os.path.join(os.path.dirname(__file__), "data", "data.db")
|
||||
MIGRATION_PATH = os.path.join(
|
||||
os.path.dirname(__file__), "migrations", "005_add_os_display_and_battery.sql"
|
||||
)
|
||||
|
||||
COLUMNS_TO_ADD: List[Tuple[str, str]] = [
|
||||
("screen_resolution", "Résolution écran"),
|
||||
("display_server", "Serveur d'affichage"),
|
||||
("session_type", "Type de session"),
|
||||
("last_boot_time", "Dernier boot"),
|
||||
("uptime_seconds", "Uptime en secondes"),
|
||||
("battery_percentage", "Pourcentage batterie"),
|
||||
("battery_status", "Statut batterie"),
|
||||
("battery_health", "Santé batterie"),
|
||||
]
|
||||
|
||||
|
||||
def _load_statements() -> Dict[str, str]:
|
||||
"""Load ALTER statements from migration file keyed by column name."""
|
||||
with open(MIGRATION_PATH, "r", encoding="utf-8") as fh:
|
||||
filtered = []
|
||||
for line in fh:
|
||||
stripped = line.strip()
|
||||
if not stripped or stripped.startswith("--"):
|
||||
continue
|
||||
filtered.append(line.rstrip("\n"))
|
||||
|
||||
statements: Dict[str, str] = {}
|
||||
for statement in "\n".join(filtered).split(";"):
|
||||
stmt = statement.strip()
|
||||
if not stmt:
|
||||
continue
|
||||
for column, _ in COLUMNS_TO_ADD:
|
||||
if column in stmt:
|
||||
statements[column] = stmt
|
||||
break
|
||||
return statements
|
||||
|
||||
|
||||
def apply_migration():
|
||||
"""Apply migration 005 to the SQLite database."""
|
||||
if not os.path.exists(DB_PATH):
|
||||
print(f"❌ Database not found at {DB_PATH}")
|
||||
print(" The database will be created automatically on first run.")
|
||||
return
|
||||
|
||||
if not os.path.exists(MIGRATION_PATH):
|
||||
print(f"❌ Migration file not found at {MIGRATION_PATH}")
|
||||
return
|
||||
|
||||
print(f"📂 Database: {DB_PATH}")
|
||||
print(f"📄 Migration: {MIGRATION_PATH}")
|
||||
print()
|
||||
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("PRAGMA table_info(hardware_snapshots)")
|
||||
existing_columns = {row[1] for row in cursor.fetchall()}
|
||||
|
||||
missing = [col for col, _ in COLUMNS_TO_ADD if col not in existing_columns]
|
||||
if not missing:
|
||||
print("⚠️ Migration 005 already applied (columns exist)")
|
||||
print("✅ Database is up to date")
|
||||
return
|
||||
|
||||
statements = _load_statements()
|
||||
|
||||
print("🔄 Applying migration 005...")
|
||||
for column, description in COLUMNS_TO_ADD:
|
||||
if column not in missing:
|
||||
print(f"⏩ Column {column} already present, skipping")
|
||||
continue
|
||||
|
||||
statement = statements.get(column)
|
||||
if not statement:
|
||||
raise RuntimeError(
|
||||
f"No SQL statement found for column '{column}' in migration file"
|
||||
)
|
||||
|
||||
print(f"➕ Adding {description} ({column})...")
|
||||
cursor.execute(statement)
|
||||
|
||||
conn.commit()
|
||||
|
||||
print("✅ Migration 005 applied successfully!")
|
||||
print("New columns added to hardware_snapshots:")
|
||||
for column, description in COLUMNS_TO_ADD:
|
||||
if column in missing:
|
||||
print(f" - {column}: {description}")
|
||||
|
||||
except (sqlite3.Error, RuntimeError) as exc:
|
||||
print(f"❌ Error applying migration: {exc}")
|
||||
conn.rollback()
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
apply_migration()
|
||||
61
backend/apply_migration_006.py
Normal file
61
backend/apply_migration_006.py
Normal file
@@ -0,0 +1,61 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Apply SQL migration 006 to existing database
|
||||
Migration 006: Add purchase metadata fields to devices table
|
||||
"""
|
||||
|
||||
import os
|
||||
import sqlite3
|
||||
|
||||
DB_PATH = os.path.join(os.path.dirname(__file__), "data", "data.db")
|
||||
MIGRATION_PATH = os.path.join(
|
||||
os.path.dirname(__file__), "migrations", "006_add_purchase_fields.sql"
|
||||
)
|
||||
|
||||
COLUMNS = ["purchase_store", "purchase_date", "purchase_price"]
|
||||
|
||||
|
||||
def apply_migration():
|
||||
if not os.path.exists(DB_PATH):
|
||||
print(f"❌ Database not found at {DB_PATH}")
|
||||
print(" It will be created automatically on first backend start.")
|
||||
return
|
||||
|
||||
if not os.path.exists(MIGRATION_PATH):
|
||||
print(f"❌ Migration file not found at {MIGRATION_PATH}")
|
||||
return
|
||||
|
||||
conn = sqlite3.connect(DB_PATH)
|
||||
cursor = conn.cursor()
|
||||
|
||||
try:
|
||||
cursor.execute("PRAGMA table_info(devices)")
|
||||
existing_columns = {row[1] for row in cursor.fetchall()}
|
||||
|
||||
missing = [col for col in COLUMNS if col not in existing_columns]
|
||||
if not missing:
|
||||
print("⚠️ Migration 006 already applied (purchase columns exist)")
|
||||
return
|
||||
|
||||
print("🔄 Applying migration 006 (purchase fields)...")
|
||||
with open(MIGRATION_PATH, "r", encoding="utf-8") as f:
|
||||
statements = [
|
||||
stmt.strip()
|
||||
for stmt in f.read().split(";")
|
||||
if stmt.strip()
|
||||
]
|
||||
|
||||
for stmt in statements:
|
||||
cursor.execute(stmt)
|
||||
|
||||
conn.commit()
|
||||
print("✅ Migration 006 applied successfully.")
|
||||
except sqlite3.Error as exc:
|
||||
conn.rollback()
|
||||
print(f"❌ Error applying migration 006: {exc}")
|
||||
finally:
|
||||
conn.close()
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
apply_migration()
|
||||
0
backend/migrations/001_add_ram_stats_and_smart.sql
Normal file → Executable file
0
backend/migrations/001_add_ram_stats_and_smart.sql
Normal file → Executable file
0
backend/migrations/002_add_network_results.sql
Normal file → Executable file
0
backend/migrations/002_add_network_results.sql
Normal file → Executable file
5
backend/migrations/003_add_cpu_scores.sql
Executable file
5
backend/migrations/003_add_cpu_scores.sql
Executable file
@@ -0,0 +1,5 @@
|
||||
-- Migration 003: Add CPU subscore columns to benchmarks table
|
||||
-- Date: 2025-12-15
|
||||
|
||||
ALTER TABLE benchmarks ADD COLUMN cpu_score_single FLOAT;
|
||||
ALTER TABLE benchmarks ADD COLUMN cpu_score_multi FLOAT;
|
||||
7
backend/migrations/004_add_snapshot_details.sql
Executable file
7
backend/migrations/004_add_snapshot_details.sql
Executable file
@@ -0,0 +1,7 @@
|
||||
-- Migration 004: Add extra hardware snapshot metadata columns
|
||||
-- Date: 2025-12-17
|
||||
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN hostname VARCHAR(255);
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN desktop_environment VARCHAR(100);
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN pci_devices_json TEXT;
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN usb_devices_json TEXT;
|
||||
11
backend/migrations/005_add_os_display_and_battery.sql
Executable file
11
backend/migrations/005_add_os_display_and_battery.sql
Executable file
@@ -0,0 +1,11 @@
|
||||
-- Migration 005: Extend hardware_snapshots with OS/display/battery metadata
|
||||
-- Date: 2025-12-17
|
||||
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN screen_resolution VARCHAR(50);
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN display_server VARCHAR(50);
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN session_type VARCHAR(50);
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN last_boot_time VARCHAR(50);
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN uptime_seconds INTEGER;
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN battery_percentage FLOAT;
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN battery_status VARCHAR(50);
|
||||
ALTER TABLE hardware_snapshots ADD COLUMN battery_health VARCHAR(50);
|
||||
4
backend/migrations/006_add_purchase_fields.sql
Executable file
4
backend/migrations/006_add_purchase_fields.sql
Executable file
@@ -0,0 +1,4 @@
|
||||
-- Add purchase metadata columns to devices table
|
||||
ALTER TABLE devices ADD COLUMN purchase_store TEXT;
|
||||
ALTER TABLE devices ADD COLUMN purchase_date TEXT;
|
||||
ALTER TABLE devices ADD COLUMN purchase_price REAL;
|
||||
0
backend/migrations/add_bios_vendor.sql
Normal file → Executable file
0
backend/migrations/add_bios_vendor.sql
Normal file → Executable file
0
backend/requirements.txt
Normal file → Executable file
0
backend/requirements.txt
Normal file → Executable file
Reference in New Issue
Block a user