514 lines
14 KiB
Bash
Executable File
514 lines
14 KiB
Bash
Executable File
#!/usr/bin/env bash
|
|
|
|
#
|
|
# Linux BenchTools - Client Benchmark Script
|
|
# Version: 1.0.0
|
|
#
|
|
# This script collects hardware information and runs benchmarks on Linux machines,
|
|
# then sends the results to the BenchTools backend API.
|
|
#
|
|
|
|
set -e
|
|
|
|
# Script version
|
|
BENCH_SCRIPT_VERSION="1.0.0"
|
|
|
|
# Default values
|
|
SERVER_URL=""
|
|
API_TOKEN=""
|
|
DEVICE_IDENTIFIER=""
|
|
IPERF_SERVER=""
|
|
SHORT_MODE=false
|
|
SKIP_CPU=false
|
|
SKIP_MEMORY=false
|
|
SKIP_DISK=false
|
|
SKIP_NETWORK=false
|
|
SKIP_GPU=false
|
|
|
|
# Colors for output
|
|
RED='\033[0;31m'
|
|
GREEN='\033[0;32m'
|
|
YELLOW='\033[1;33m'
|
|
NC='\033[0m' # No Color
|
|
|
|
# Function to check and renew sudo timestamp
|
|
check_sudo() {
|
|
log_info "Requesting sudo permissions for benchmark execution..."
|
|
# Ask for password once and keep the timestamp active
|
|
sudo -v
|
|
if [ $? -ne 0 ]; then
|
|
log_error "Sudo authentication failed. Required for some hardware checks (dmidecode, fio) and benchmarks."
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
# Logging functions
|
|
log_info() {
|
|
echo -e "${GREEN}[INFO]${NC} $1"
|
|
}
|
|
|
|
log_warn() {
|
|
echo -e "${YELLOW}[WARN]${NC} $1"
|
|
}
|
|
|
|
log_error() {
|
|
echo -e "${RED}[ERROR]${NC} $1"
|
|
}
|
|
|
|
# Usage information
|
|
show_usage() {
|
|
cat <<EOF
|
|
Linux BenchTools - Client Benchmark Script v${BENCH_SCRIPT_VERSION}
|
|
|
|
Usage: $0 --server <URL> --token <TOKEN> [OPTIONS]
|
|
|
|
Required:
|
|
--server <URL> Backend API URL (e.g., http://server:8007/api/benchmark)
|
|
--token <TOKEN> API authentication token
|
|
|
|
Optional:
|
|
--device <NAME> Device identifier (default: hostname)
|
|
--iperf-server <HOST> iperf3 server for network tests
|
|
--short Run quick tests (reduced duration)
|
|
--skip-cpu Skip CPU benchmark
|
|
--skip-memory Skip memory benchmark
|
|
--skip-disk Skip disk benchmark
|
|
--skip-network Skip network benchmark
|
|
--skip-gpu Skip GPU benchmark
|
|
--help Show this help message
|
|
|
|
Example:
|
|
$0 --server http://192.168.1.100:8007/api/benchmark \\
|
|
--token YOUR_TOKEN \\
|
|
--iperf-server 192.168.1.100
|
|
|
|
EOF
|
|
}
|
|
|
|
# Parse command line arguments
|
|
parse_args() {
|
|
while [[ $# -gt 0 ]]; do
|
|
case $1 in
|
|
--server)
|
|
SERVER_URL="$2"
|
|
shift 2
|
|
;;
|
|
--token)
|
|
API_TOKEN="$2"
|
|
shift 2
|
|
;;
|
|
--device)
|
|
DEVICE_IDENTIFIER="$2"
|
|
shift 2
|
|
;;
|
|
--iperf-server)
|
|
IPERF_SERVER="$2"
|
|
shift 2
|
|
;;
|
|
--short)
|
|
SHORT_MODE=true
|
|
shift
|
|
;;
|
|
--skip-cpu)
|
|
SKIP_CPU=true
|
|
shift
|
|
;;
|
|
--skip-memory)
|
|
SKIP_MEMORY=true
|
|
shift
|
|
;;
|
|
--skip-disk)
|
|
SKIP_DISK=true
|
|
shift
|
|
;;
|
|
--skip-network)
|
|
SKIP_NETWORK=true
|
|
shift
|
|
;;
|
|
--skip-gpu)
|
|
SKIP_GPU=true
|
|
shift
|
|
;;
|
|
--help)
|
|
show_usage
|
|
exit 0
|
|
;;
|
|
*)
|
|
log_error "Unknown option: $1"
|
|
show_usage
|
|
exit 1
|
|
;;
|
|
esac
|
|
done
|
|
|
|
# Validate required parameters
|
|
if [[ -z "$SERVER_URL" || -z "$API_TOKEN" ]]; then
|
|
log_error "Missing required parameters: --server and --token"
|
|
show_usage
|
|
exit 1
|
|
fi
|
|
|
|
# Set device identifier to hostname if not provided
|
|
if [[ -z "$DEVICE_IDENTIFIER" ]]; then
|
|
DEVICE_IDENTIFIER=$(hostname)
|
|
fi
|
|
}
|
|
|
|
|
|
|
|
# Check dependencies (no automatic installation)
|
|
check_dependencies() {
|
|
log_info "Checking dependencies..."
|
|
|
|
local missing_essential=()
|
|
local missing_optional=()
|
|
|
|
# Essential tools (required for hardware detection)
|
|
for tool in curl jq; do
|
|
if ! command -v $tool &> /dev/null; then
|
|
missing_essential+=($tool)
|
|
fi
|
|
done
|
|
|
|
# Optional hardware detection tools
|
|
for tool in lscpu free dmidecode lsblk; do
|
|
if ! command -v $tool &> /dev/null; then
|
|
missing_optional+=($tool)
|
|
fi
|
|
done
|
|
|
|
# Optional benchmark tools
|
|
if [[ "$SKIP_CPU" == false ]] && ! command -v sysbench &> /dev/null; then
|
|
missing_optional+=(sysbench)
|
|
SKIP_CPU=true
|
|
log_warn "sysbench not found - CPU benchmark will be skipped"
|
|
fi
|
|
|
|
if [[ "$SKIP_DISK" == false ]] && ! command -v fio &> /dev/null; then
|
|
missing_optional+=(fio)
|
|
SKIP_DISK=true
|
|
log_warn "fio not found - Disk benchmark will be skipped"
|
|
fi
|
|
|
|
if [[ "$SKIP_NETWORK" == false && -n "$IPERF_SERVER" ]] && ! command -v iperf3 &> /dev/null; then
|
|
missing_optional+=(iperf3)
|
|
SKIP_NETWORK=true
|
|
log_warn "iperf3 not found - Network benchmark will be skipped"
|
|
fi
|
|
|
|
# Check essential dependencies
|
|
if [[ ${#missing_essential[@]} -gt 0 ]]; then
|
|
log_error "Missing essential dependencies: ${missing_essential[*]}"
|
|
log_error "Please install them manually. Example (Debian/Ubuntu):"
|
|
log_error " sudo apt-get install ${missing_essential[*]}"
|
|
exit 1
|
|
fi
|
|
|
|
# Info about optional dependencies
|
|
if [[ ${#missing_optional[@]} -gt 0 ]]; then
|
|
log_warn "Missing optional tools: ${missing_optional[*]}"
|
|
log_info "Some hardware info or benchmarks may be limited"
|
|
log_info "To install (Debian/Ubuntu): sudo apt-get install ${missing_optional[*]}"
|
|
fi
|
|
|
|
log_info "Dependency check completed"
|
|
}
|
|
|
|
|
|
|
|
# Collect CPU information
|
|
collect_cpu_info() {
|
|
local cpu_json="{}"
|
|
|
|
cpu_json=$(jq -n \
|
|
--arg vendor "$(lscpu | grep 'Vendor ID' | awk '{print $3}' || echo 'Unknown')" \
|
|
--arg model "$(lscpu | grep 'Model name' | sed 's/Model name: *//')" \
|
|
--argjson cores "$(lscpu | grep '^CPU(s):' | awk '{print $2}')" \
|
|
--argjson threads "$(nproc)" \
|
|
'{
|
|
vendor: $vendor,
|
|
model: $model,
|
|
cores: $cores,
|
|
threads: $threads
|
|
}'
|
|
)
|
|
|
|
echo "$cpu_json"
|
|
}
|
|
|
|
# Collect RAM information
|
|
collect_ram_info() {
|
|
local ram_total_mb=$(free -m | grep '^Mem:' | awk '{print $2}')
|
|
|
|
local ram_json=$(jq -n \
|
|
--argjson total_mb "$ram_total_mb" \
|
|
'{
|
|
total_mb: $total_mb
|
|
}'
|
|
)
|
|
|
|
echo "$ram_json"
|
|
}
|
|
|
|
# Collect OS information
|
|
collect_os_info() {
|
|
local os_name=$(grep '^ID=' /etc/os-release | cut -d= -f2 | tr -d '"')
|
|
local os_version=$(grep '^VERSION=' /etc/os-release | cut -d= -f2 | tr -d '"')
|
|
local kernel=$(uname -r)
|
|
local arch=$(uname -m)
|
|
|
|
local os_json=$(jq -n \
|
|
--arg name "$os_name" \
|
|
--arg version "$os_version" \
|
|
--arg kernel_version "$kernel" \
|
|
--arg architecture "$arch" \
|
|
'{
|
|
name: $name,
|
|
version: $version,
|
|
kernel_version: $kernel_version,
|
|
architecture: $architecture
|
|
}'
|
|
)
|
|
|
|
echo "$os_json"
|
|
}
|
|
|
|
collect_manufacturer_info() {
|
|
# Exécution de dmidecode avec sudo
|
|
local manufacturer=$(sudo dmidecode -s system-manufacturer 2>/dev/null || echo 'Unknown')
|
|
|
|
local json=$(jq -n \
|
|
--arg manufacturer "$manufacturer" \
|
|
'{
|
|
manufacturer: $manufacturer
|
|
}'
|
|
)
|
|
echo "$json"
|
|
}
|
|
|
|
# Run CPU benchmark
|
|
run_cpu_benchmark() {
|
|
if [[ "$SKIP_CPU" == true ]]; then
|
|
echo "null"
|
|
return
|
|
fi
|
|
|
|
log_info "Running CPU benchmark..."
|
|
|
|
local prime=10000
|
|
[[ "$SHORT_MODE" == false ]] && prime=20000
|
|
|
|
local result=$(sysbench cpu --cpu-max-prime=$prime --threads=$(nproc) run 2>&1)
|
|
|
|
local events_per_sec=$(echo "$result" | grep 'events per second' | awk '{print $4}')
|
|
local score=$(echo "scale=2; $events_per_sec / 100" | bc)
|
|
|
|
local cpu_result=$(jq -n \
|
|
--argjson events_per_sec "${events_per_sec:-0}" \
|
|
--argjson score "${score:-0}" \
|
|
'{
|
|
events_per_sec: $events_per_sec,
|
|
score: $score
|
|
}'
|
|
)
|
|
|
|
echo "$cpu_result"
|
|
}
|
|
|
|
# Run memory benchmark
|
|
run_memory_benchmark() {
|
|
if [[ "$SKIP_MEMORY" == true ]]; then
|
|
echo "null"
|
|
return
|
|
fi
|
|
|
|
log_info "Running memory benchmark..."
|
|
|
|
local size="512M"
|
|
[[ "$SHORT_MODE" == false ]] && size="2G"
|
|
|
|
local result=$(sysbench memory --memory-total-size=$size --memory-oper=write run 2>&1)
|
|
|
|
local throughput=$(echo "$result" | grep 'transferred' | awk '{print $4}')
|
|
local score=$(echo "scale=2; $throughput / 200" | bc)
|
|
|
|
local mem_result=$(jq -n \
|
|
--argjson throughput_mib_s "${throughput:-0}" \
|
|
--argjson score "${score:-0}" \
|
|
'{
|
|
throughput_mib_s: $throughput_mib_s,
|
|
score: $score
|
|
}'
|
|
)
|
|
|
|
echo "$mem_result"
|
|
}
|
|
|
|
# Run disk benchmark
|
|
run_disk_benchmark() {
|
|
if [[ "$SKIP_DISK" == true ]]; then
|
|
echo "null"
|
|
return
|
|
fi
|
|
|
|
log_info "Running disk benchmark..."
|
|
|
|
local size="256M"
|
|
[[ "$SHORT_MODE" == false ]] && size="1G"
|
|
|
|
local tmpfile="/tmp/fio_benchfile_$$"
|
|
|
|
fio --name=bench --rw=readwrite --bs=1M --size=$size --numjobs=1 \
|
|
--iodepth=16 --filename=$tmpfile --direct=1 --group_reporting \
|
|
--output-format=json > /tmp/fio_result_$$.json 2>&1
|
|
|
|
local read_mb_s=$(jq -r '.jobs[0].read.bw_bytes / 1048576' /tmp/fio_result_$$.json 2>/dev/null || echo 0)
|
|
local write_mb_s=$(jq -r '.jobs[0].write.bw_bytes / 1048576' /tmp/fio_result_$$.json 2>/dev/null || echo 0)
|
|
local score=$(echo "scale=2; ($read_mb_s + $write_mb_s) / 20" | bc)
|
|
|
|
rm -f $tmpfile /tmp/fio_result_$$.json
|
|
|
|
local disk_result=$(jq -n \
|
|
--argjson read_mb_s "${read_mb_s:-0}" \
|
|
--argjson write_mb_s "${write_mb_s:-0}" \
|
|
--argjson score "${score:-0}" \
|
|
'{
|
|
read_mb_s: $read_mb_s,
|
|
write_mb_s: $write_mb_s,
|
|
score: $score
|
|
}'
|
|
)
|
|
|
|
echo "$disk_result"
|
|
}
|
|
|
|
# Run network benchmark
|
|
run_network_benchmark() {
|
|
if [[ "$SKIP_NETWORK" == true || -z "$IPERF_SERVER" ]]; then
|
|
echo "null"
|
|
return
|
|
fi
|
|
|
|
log_info "Running network benchmark..."
|
|
|
|
local download=$(iperf3 -c "$IPERF_SERVER" -R -J 2>/dev/null | jq -r '.end.sum_received.bits_per_second / 1000000' 2>/dev/null || echo 0)
|
|
local upload=$(iperf3 -c "$IPERF_SERVER" -J 2>/dev/null | jq -r '.end.sum_sent.bits_per_second / 1000000' 2>/dev/null || echo 0)
|
|
|
|
local score=$(echo "scale=2; ($download + $upload) / 20" | bc)
|
|
|
|
local net_result=$(jq -n \
|
|
--argjson download_mbps "${download:-0}" \
|
|
--argjson upload_mbps "${upload:-0}" \
|
|
--argjson score "${score:-0}" \
|
|
'{
|
|
download_mbps: $download_mbps,
|
|
upload_mbps: $upload_mbps,
|
|
score: $score
|
|
}'
|
|
)
|
|
|
|
echo "$net_result"
|
|
}
|
|
|
|
# Calculate global score
|
|
calculate_global_score() {
|
|
local cpu_score=$1
|
|
local mem_score=$2
|
|
local disk_score=$3
|
|
local net_score=$4
|
|
|
|
local global=$(echo "scale=2; ($cpu_score * 0.3) + ($mem_score * 0.2) + ($disk_score * 0.25) + ($net_score * 0.15)" | bc)
|
|
|
|
echo "$global"
|
|
}
|
|
|
|
# Build and send JSON payload
|
|
send_benchmark() {
|
|
log_info "Collecting hardware information..."
|
|
|
|
local cpu_info=$(collect_cpu_info)
|
|
local ram_info=$(collect_ram_info)
|
|
local os_info=$(collect_os_info)
|
|
|
|
log_info "Running benchmarks..."
|
|
|
|
local cpu_result=$(run_cpu_benchmark)
|
|
local memory_result=$(run_memory_benchmark)
|
|
local disk_result=$(run_disk_benchmark)
|
|
local network_result=$(run_network_benchmark)
|
|
|
|
# Extract scores
|
|
local cpu_score=$(echo "$cpu_result" | jq -r '.score // 0' 2>/dev/null || echo 0)
|
|
local mem_score=$(echo "$memory_result" | jq -r '.score // 0' 2>/dev/null || echo 0)
|
|
local disk_score=$(echo "$disk_result" | jq -r '.score // 0' 2>/dev/null || echo 0)
|
|
local net_score=$(echo "$network_result" | jq -r '.score // 0' 2>/dev/null || echo 0)
|
|
|
|
# Calculate global score
|
|
local global_score=$(calculate_global_score "$cpu_score" "$mem_score" "$disk_score" "$net_score")
|
|
|
|
log_info "Building JSON payload..."
|
|
|
|
local payload=$(jq -n \
|
|
--arg device_identifier "$DEVICE_IDENTIFIER" \
|
|
--arg bench_script_version "$BENCH_SCRIPT_VERSION" \
|
|
--argjson cpu "$cpu_info" \
|
|
--argjson ram "$ram_info" \
|
|
--argjson os "$os_info" \
|
|
--argjson cpu_result "$cpu_result" \
|
|
--argjson memory_result "$memory_result" \
|
|
--argjson disk_result "$disk_result" \
|
|
--argjson network_result "$network_result" \
|
|
--argjson global_score "$global_score" \
|
|
'{
|
|
device_identifier: $device_identifier,
|
|
bench_script_version: $bench_script_version,
|
|
hardware: {
|
|
cpu: $cpu,
|
|
ram: $ram,
|
|
os: $os
|
|
},
|
|
results: {
|
|
cpu: $cpu_result,
|
|
memory: $memory_result,
|
|
disk: $disk_result,
|
|
network: $network_result,
|
|
gpu: null,
|
|
global_score: $global_score
|
|
}
|
|
}'
|
|
)
|
|
|
|
log_info "Sending results to server..."
|
|
|
|
local response=$(curl -s -w "\n%{http_code}" \
|
|
-X POST "$SERVER_URL" \
|
|
-H "Content-Type: application/json" \
|
|
-H "Authorization: Bearer $API_TOKEN" \
|
|
-d "$payload")
|
|
|
|
local http_code=$(echo "$response" | tail -n1)
|
|
local body=$(echo "$response" | head -n-1)
|
|
|
|
if [[ "$http_code" == "200" ]]; then
|
|
log_info "Benchmark submitted successfully!"
|
|
log_info "Response: $body"
|
|
else
|
|
log_error "Failed to submit benchmark (HTTP $http_code)"
|
|
log_error "Response: $body"
|
|
exit 1
|
|
fi
|
|
}
|
|
|
|
# Main execution
|
|
main() {
|
|
log_info "Linux BenchTools Client v${BENCH_SCRIPT_VERSION}"
|
|
check_sudo #
|
|
parse_args "$@"
|
|
check_dependencies
|
|
send_benchmark
|
|
|
|
log_info "Benchmark completed!"
|
|
}
|
|
|
|
main "$@"
|