Compare commits

..

32 Commits

Author SHA1 Message Date
dependabot[bot] 77f3a84c23 Bump flatted from 3.2.7 to 3.4.2 in /webapp/frontend
Bumps [flatted](https://github.com/WebReflection/flatted) from 3.2.7 to 3.4.2.
- [Commits](https://github.com/WebReflection/flatted/compare/v3.2.7...v3.4.2)

---
updated-dependencies:
- dependency-name: flatted
  dependency-version: 3.4.2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
2026-03-19 17:51:36 +00:00
Aram Akhavan e4c40f7e80 Update issue triage template (#962) 2026-03-14 22:27:52 -07:00
Aram Akhavan 6cc9ff7fc5 Update docker building (#961)
* Remove old entry and dependencies from Makefile
* Update Dockerfiles to only COPY needed files for faster builds and better caching
* Test the docker files getting built from the Makefile in CI
2026-03-14 22:11:33 -07:00
mcarbonne 0aea6b96ca Add devcontainer config (#861)
Closes #853

---------

Co-authored-by: Aram Akhavan <github@aram.nubmail.ca>
Co-authored-by: Aram Akhavan <1147328+kaysond@users.noreply.github.com>
2026-03-13 14:40:51 -07:00
slydetector afbf1450c2 Build and distribute latest smartmontools 7.5 as part of image (#924)
Co-authored-by: slydetector <slydetector>
Co-authored-by: Aram Akhavan <1147328+kaysond@users.noreply.github.com>
2026-03-08 13:24:23 -07:00
Merlin 6a278bc2cf Add support for topic in Zulip notifications and truncate long topics
Subjects over 60 characters long, such as the test notification, are rejected by shoutrrr. This truncates the subject to the max length.

Users may want all Scrutiny notifications to be sent to a particular topic rather than whatever Scrutiny happens to decide.
2026-02-28 10:27:16 -08:00
enoch85 9d1ce790d0 Update docker compose example (#685) 2026-02-22 08:06:55 -08:00
Alliot fb5d4818b0 fix: page smart attribute queries with limit and sort (#869) 2026-02-21 21:04:33 -08:00
Aram Akhavan 3a06920354 Make defaut temperature history length 1 week (#939)
Closes #356
2026-02-21 20:48:44 -08:00
Aram Akhavan dd8a6757d1 Add telegram message thread format to example.scrutiny.yaml (#938)
Closes #765
2026-02-21 20:43:20 -08:00
Aram Akhavan d433a6a54e Bump base image to debian trixie (#935)
CIoses #929
2026-02-21 19:55:21 -08:00
Aram Akhavan c365988a52 Update Makefile docker image tags to use ghcr.io (#936)
Also remove outdated note on building frontend (it's built in the Dockerfiles)
2026-02-21 16:26:35 -08:00
Aram Akhavan 6a1a985306 Switch to maintained fork of shoutrrr (#934)
Closes #817
2026-02-21 16:13:37 -08:00
Aram Akhavan 02996d6288 Bump influxdb to 2.8 (#933)
Closes #863
2026-02-21 16:02:10 -08:00
Aram Akhavan 3d2671650e Change LBA metrics to uint64 (#932)
Fixes #800
2026-02-21 15:54:45 -08:00
Aram Akhavan 28658790c8 Fix notify urls env var in docs (#931)
Closes #862
2026-02-21 15:50:31 -08:00
Kevin Thomer 18f10a9295 Add documentation for rootless systemd service and podman quadlets (#927) 2026-02-19 11:29:55 -08:00
Liu Xiaoyi 67b7a08e4a feat: add "day" as resolution for temperature graph (#823) 2026-02-13 09:58:15 -08:00
packagrio-bot a014337167 (v0.8.6) Automated packaging of release by Packagr 2026-02-09 21:17:48 +00:00
Aram Akhavan 3a5ee0a762 Remove armv7 from omnibus builds (#916) 2026-02-09 13:14:39 -08:00
packagrio-bot 625a0244e2 (v0.8.5) Automated packaging of release by Packagr 2026-02-09 20:26:21 +00:00
Aram Akhavan a269ba57df Fix omnibus release builds (#915) 2026-02-09 12:24:03 -08:00
packagrio-bot 6a76b5aa26 (v0.8.4) Automated packaging of release by Packagr 2026-02-09 06:20:42 +00:00
Aram Akhavan 939d40eb20 Fix Dockerfiles in release workflow (#907) 2026-02-08 22:18:24 -08:00
Aram Akhavan ad738508e5 Update README.md CI badge (#906) 2026-02-08 22:10:13 -08:00
dependabot[bot] 971249ba3f Bump js-yaml from 3.14.1 to 3.14.2 in /webapp/frontend (#905)
Bumps [js-yaml](https://github.com/nodeca/js-yaml) from 3.14.1 to 3.14.2.
- [Changelog](https://github.com/nodeca/js-yaml/blob/master/CHANGELOG.md)
- [Commits](https://github.com/nodeca/js-yaml/compare/3.14.1...3.14.2)

---
updated-dependencies:
- dependency-name: js-yaml
  dependency-version: 3.14.2
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
2026-02-08 21:52:36 -08:00
dependabot[bot] 73417ca653 Bump follow-redirects from 1.15.2 to 1.15.6 in /webapp/frontend (#604)
Bumps [follow-redirects](https://github.com/follow-redirects/follow-redirects) from 1.15.2 to 1.15.6.
- [Release notes](https://github.com/follow-redirects/follow-redirects/releases)
- [Commits](https://github.com/follow-redirects/follow-redirects/compare/v1.15.2...v1.15.6)

---
updated-dependencies:
- dependency-name: follow-redirects
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Aram Akhavan <github@aram.nubmail.ca>
2026-02-08 21:52:14 -08:00
dependabot[bot] a6d092983d Bump express from 4.18.2 to 4.19.2 in /webapp/frontend (#613)
Bumps [express](https://github.com/expressjs/express) from 4.18.2 to 4.19.2.
- [Release notes](https://github.com/expressjs/express/releases)
- [Changelog](https://github.com/expressjs/express/blob/master/History.md)
- [Commits](https://github.com/expressjs/express/compare/4.18.2...4.19.2)

---
updated-dependencies:
- dependency-name: express
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Aram Akhavan <github@aram.nubmail.ca>
2026-02-08 21:52:07 -08:00
dependabot[bot] 1988b101e1 Bump lodash from 4.17.21 to 4.17.23 in /webapp/frontend (#856)
Bumps [lodash](https://github.com/lodash/lodash) from 4.17.21 to 4.17.23.
- [Release notes](https://github.com/lodash/lodash/releases)
- [Commits](https://github.com/lodash/lodash/compare/4.17.21...4.17.23)

---
updated-dependencies:
- dependency-name: lodash
  dependency-version: 4.17.23
  dependency-type: direct:production
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Aram Akhavan <github@aram.nubmail.ca>
2026-02-08 21:37:51 -08:00
dependabot[bot] 746ae76cfc Bump node-forge from 1.3.1 to 1.3.3 in /webapp/frontend (#857)
Bumps [node-forge](https://github.com/digitalbazaar/forge) from 1.3.1 to 1.3.3.
- [Changelog](https://github.com/digitalbazaar/forge/blob/main/CHANGELOG.md)
- [Commits](https://github.com/digitalbazaar/forge/compare/v1.3.1...v1.3.3)

---
updated-dependencies:
- dependency-name: node-forge
  dependency-version: 1.3.3
  dependency-type: indirect
...

Signed-off-by: dependabot[bot] <support@github.com>
Co-authored-by: dependabot[bot] <49699333+dependabot[bot]@users.noreply.github.com>
Co-authored-by: Aram Akhavan <github@aram.nubmail.ca>
2026-02-08 21:37:44 -08:00
Aram Akhavan 3380023ad0 Run CI on master (#904)
* Run CI on master

* Consolidate go linting into main CI
2026-02-08 21:21:48 -08:00
mcarbonne 6362512406 Update go to 1.25 (#875)
Closes #872

* update go to 1.25

* update deprecated gomock

* remove deprecated ioutil

* update (and fix) ci

* add golang lint (as warning)

* enable formatters + freeze golang version
2026-02-08 20:46:36 -08:00
64 changed files with 1795 additions and 1325 deletions
+25
View File
@@ -0,0 +1,25 @@
services:
app:
image: mcr.microsoft.com/devcontainers/base:ubuntu-22.04
volumes:
- ..:/workspaces/scrutiny:cached
command: sleep infinity
network_mode: service:influxdb
influxdb:
image: influxdb:2.8
restart: unless-stopped
ports:
- "8086:8086"
environment:
- DOCKER_INFLUXDB_INIT_MODE=setup
- DOCKER_INFLUXDB_INIT_USERNAME=admin
- DOCKER_INFLUXDB_INIT_PASSWORD=password12345
- DOCKER_INFLUXDB_INIT_ORG=scrutiny
- DOCKER_INFLUXDB_INIT_BUCKET=metrics
- DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=my-super-secret-auth-token
volumes:
- scrutiny-influxdb-data:/var/lib/influxdb2
volumes:
scrutiny-influxdb-data:
@@ -0,0 +1,30 @@
{
"name": "Scrutiny Dev (rootless docker)",
"dockerComposeFile": "../docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/scrutiny",
"features": {
"ghcr.io/devcontainers/features/go:1": "1.25",
"ghcr.io/devcontainers/features/node:1": "lts"
},
"onCreateCommand": "sudo apt-get update && sudo apt-get install -y smartmontools iputils-ping chromium-browser",
"customizations": {
"vscode": {
"extensions": [
"golang.go",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode"
]
}
},
"forwardPorts": [8080, 8086],
"postCreateCommand": "bash .devcontainer/setup.sh",
"remoteUser": "root",
"containerUser": "root",
"updateRemoteUserUID": false
}
+28
View File
@@ -0,0 +1,28 @@
{
"name": "Scrutiny Dev (docker)",
"dockerComposeFile": "../docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/scrutiny",
"features": {
"ghcr.io/devcontainers/features/go:1": "1.25",
"ghcr.io/devcontainers/features/node:1": "lts"
},
"onCreateCommand": "sudo apt-get update && sudo apt-get install -y smartmontools iputils-ping chromium-browser",
"customizations": {
"vscode": {
"extensions": [
"golang.go",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode"
]
}
},
"forwardPorts": [8080, 8086],
"postCreateCommand": "bash .devcontainer/setup.sh",
"remoteUser": "vscode"
}
+32
View File
@@ -0,0 +1,32 @@
{
"name": "Scrutiny Dev (podman)",
"dockerComposeFile": "../docker-compose.yml",
"service": "app",
"workspaceFolder": "/workspaces/scrutiny",
"features": {
"ghcr.io/devcontainers/features/go:1": "1.25",
"ghcr.io/devcontainers/features/node:1": "lts"
},
"onCreateCommand": "sudo apt-get update && sudo apt-get install -y smartmontools iputils-ping chromium-browser",
"customizations": {
"vscode": {
"extensions": [
"golang.go",
"dbaeumer.vscode-eslint",
"esbenp.prettier-vscode"
]
}
},
"forwardPorts": [8080, 8086],
"postCreateCommand": "bash .devcontainer/setup.sh",
"remoteEnv": {
"PODMAN_USERNS": "keep-id"
},
"containerUser": "vscode",
"updateRemoteUserUID": true
}
+40
View File
@@ -0,0 +1,40 @@
#!/bin/bash
echo "Starting Scrutiny Setup..."
if [ ! -f "scrutiny.yaml" ]; then
echo "Creating scrutiny.yaml from template..."
cat <<EOF > scrutiny.yaml
version: 1
web:
listen:
port: 8080
host: 0.0.0.0
database:
location: ./scrutiny.db
src:
frontend:
path: ./dist
influxdb:
retention_policy: false
token: "my-super-secret-auth-token"
org: "scrutiny"
bucket: "metrics"
host: "localhost"
port: 8086
log:
file: 'web.log'
level: DEBUG
EOF
else
echo "scrutiny.yaml already exists."
fi
echo "Vendoring Go modules..."
go mod vendor
echo "Installing Node modules..."
cd webapp/frontend
npm install
echo "Setup Complete! Ready to code."
+10 -1
View File
@@ -50,9 +50,10 @@ body:
required: true
- type: textarea
attributes:
label: scrutiny logs
label: scrutiny debug logs
description: |
Provide any captured scrutiny logs or panic dumps during your issue reproduction in this field.
Make sure to turn on debug logging with the environment variable DEBUG=true
render: text
- type: input
attributes:
@@ -112,6 +113,14 @@ body:
render: json
validations:
required: false
- type: textarea
attributes:
label: docker-compose.yml
description: |
If using docker, please provide your full docker-compose.yml file.
render: yaml
validations:
required: false
- type: textarea
attributes:
label: scrutiny.yaml
+53 -10
View File
@@ -1,6 +1,13 @@
name: CI
# This workflow is triggered on pushes & pull requests
on: [pull_request]
on:
push:
branches:
- master
pull_request:
permissions:
contents: read
jobs:
test-frontend:
@@ -21,11 +28,10 @@ jobs:
test-backend:
name: Test Backend
runs-on: ubuntu-latest
container: ghcr.io/packagrio/packagr:latest-golang
# Service containers to run with `build` (Required for end-to-end testing)
services:
influxdb:
image: influxdb:2.2
image: influxdb:2.8
env:
DOCKER_INFLUXDB_INIT_MODE: setup
DOCKER_INFLUXDB_INIT_USERNAME: admin
@@ -38,13 +44,10 @@ jobs:
env:
STATIC: true
steps:
- name: Git
run: |
apt-get update && apt-get install -y software-properties-common
add-apt-repository ppa:git-core/ppa && apt-get update && apt-get install -y git
git --version
- name: Add influxdb to hosts
run: echo "127.0.0.1 influxdb" | sudo tee -a /etc/hosts
- name: Checkout
uses: actions/checkout@v2
uses: actions/checkout@v6
- name: Test Backend
run: |
make binary-clean binary-test-coverage
@@ -76,6 +79,19 @@ jobs:
fail_ci_if_error: true
verbose: true
golangci:
name: lint
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v6
- uses: actions/setup-go@v6
with:
go-version: 1.25
- name: golangci-lint
uses: golangci/golangci-lint-action@v9
with:
args: --issues-exit-code=0
build:
name: Build ${{ matrix.cfg.goos }}/${{ matrix.cfg.goarch }}
runs-on: ${{ matrix.cfg.on }}
@@ -102,7 +118,7 @@ jobs:
uses: actions/checkout@v2
- uses: actions/setup-go@v3
with:
go-version: '^1.20.1'
go-version: '^1.25'
- name: Build Binaries
run: |
make binary-clean binary-all
@@ -113,3 +129,30 @@ jobs:
path: |
scrutiny-web-*
scrutiny-collector-metrics-*
makefile-docker-omnibus:
name: Build Docker Omnibus From Makefile
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Build
run: make docker-omnibus
makefile-docker-web:
name: Build Docker Web From Makefile
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Build
run: make docker-web
makefile-docker-collector:
name: Build Docker Collector From Makefile
runs-on: ubuntu-latest
steps:
- name: Checkout
uses: actions/checkout@v6
- name: Build
run: make docker-collector
+4 -4
View File
@@ -108,7 +108,7 @@ jobs:
with:
platforms: linux/amd64,linux/arm64,linux/arm/v7
context: .
file: docker/Dockerfile.collector
file: docker/Dockerfile.web
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
@@ -145,10 +145,10 @@ jobs:
- name: Extract Docker metadata
id: meta
uses: docker/metadata-action@v5
# tag latest and latest-omnibus
with:
flavor: |
latest=true
# tag latest and latest-omnibus
suffix=-omnibus,onlatest=false
tags: |
type=raw,value=latest
@@ -162,9 +162,9 @@ jobs:
- name: Build and push Docker image
uses: docker/build-push-action@v6
with:
platforms: linux/amd64,linux/arm64,linux/arm/v7
platforms: linux/amd64,linux/arm64
context: .
file: docker/Dockerfile.collector
file: docker/Dockerfile
push: true
tags: ${{ steps.meta.outputs.tags }}
labels: ${{ steps.meta.outputs.labels }}
+2 -2
View File
@@ -20,7 +20,7 @@ jobs:
# Service containers to run with `build` (Required for end-to-end testing)
services:
influxdb:
image: influxdb:2.2
image: influxdb:2.8
env:
DOCKER_INFLUXDB_INIT_MODE: setup
DOCKER_INFLUXDB_INIT_USERNAME: admin
@@ -97,7 +97,7 @@ jobs:
name: workspace
- uses: actions/setup-go@v6
with:
go-version: '1.20.1' # The Go version to download (if necessary) and use.
go-version: '1.25' # The Go version to download (if necessary) and use.
- name: Build Binaries
run: |
make binary-clean binary-all
+11
View File
@@ -0,0 +1,11 @@
version: "2"
formatters:
enable:
- gofmt
- goimports
linters:
enable:
- bodyclose
settings:
errcheck:
check-blank: true
+37
View File
@@ -0,0 +1,37 @@
{
"version": "0.2.0",
"configurations": [
{
"name": "Run Scrutiny",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/webapp/backend/cmd/scrutiny/scrutiny.go",
"args": ["start", "--config", "./scrutiny.yaml"],
"cwd": "${workspaceFolder}",
"env": {
"DEBUG": "true"
},
"console": "integratedTerminal",
"preLaunchTask": "Build Frontend",
"serverReadyAction": {
"action": "openExternally",
"pattern": "Listening and serving HTTP on",
"uriFormat": "http://localhost:8080/web/"
}
},
{
"name": "Run Collector",
"type": "go",
"request": "launch",
"mode": "auto",
"program": "${workspaceFolder}/collector/cmd/collector-metrics/collector-metrics.go",
"args": ["run", "--debug"],
"cwd": "${workspaceFolder}",
"env": {
"COLLECTOR_DEBUG": "true"
},
"console": "integratedTerminal"
}
]
}
+10
View File
@@ -0,0 +1,10 @@
{
"version": "2.0.0",
"tasks": [
{
"label": "Build Frontend",
"type": "shell",
"command": "cd webapp/frontend && npm run build:prod -- --output-path=../../dist"
}
]
}
+10 -5
View File
@@ -147,9 +147,14 @@ The Scrutiny repository is a [monorepo](https://en.wikipedia.org/wiki/Monorepo)
Depending on the functionality you are adding, you may need to setup a development environment for 1 or more projects.
# Devcontainer
Devcontainer configurations are available to build and run Scrutiny (WebUI and Collector) in a fully isolated environment.
When opening the project with vscode, choose "Reopen in Container". Three configurations are available depending on your
container runtime and setup: docker, docker-rootless, and podman.
# Modifying the Scrutiny Backend Server (API)
1. install the [Go runtime](https://go.dev/doc/install) (v1.20+)
1. install the [Go runtime](https://go.dev/doc/install) (v1.25)
2. download the `scrutiny-web-frontend.tar.gz` for
the [latest release](https://github.com/AnalogJ/scrutiny/releases/latest). Extract to a folder named `dist`
3. create a `scrutiny.yaml` config file
@@ -177,7 +182,7 @@ Depending on the functionality you are adding, you may need to setup a developme
```
4. start a InfluxDB docker container.
```bash
docker run -p 8086:8086 --rm influxdb:2.2
docker run -p 8086:8086 --rm influxdb:2.8
```
5. start the scrutiny web server
```bash
@@ -230,7 +235,7 @@ you'll need to follow the steps below:
```
4. start a InfluxDB docker container.
```bash
docker run -p 8086:8086 --rm influxdb:2.2
docker run -p 8086:8086 --rm influxdb:2.8
```
5. build the Angular Frontend Application
```bash
@@ -254,7 +259,7 @@ If you'd like to populate the database with some test data, you can run the fol
> This is done automatically by the `webapp/backend/pkg/models/testdata/helper.go` script
```
docker run -p 8086:8086 --rm influxdb:2.2
docker run -p 8086:8086 --rm influxdb:2.8
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/web/testdata/register-devices-req.json localhost:8080/api/devices/register
@@ -322,7 +327,7 @@ docker run -p 8086:8086 -d --rm \
-e DOCKER_INFLUXDB_INIT_ORG=scrutiny \
-e DOCKER_INFLUXDB_INIT_BUCKET=metrics \
-e DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=my-super-secret-auth-token \
influxdb:2.2
influxdb:2.8
go test ./...
```
+9 -4
View File
@@ -1,5 +1,6 @@
.ONESHELL: # Applies to every targets in the file! .ONESHELL instructs make to invoke a single instance of the shell and provide it with the entire recipe, regardless of how many lines it contains.
.SHELLFLAGS = -ec
export GOTOOLCHAIN=go1.25.5
########################################################################################################################
# Global Env Settings
@@ -66,6 +67,11 @@ binary-dep:
binary-test: binary-dep
go test -v $(STATIC_TAGS) ./...
.PHONY: lint
lint:
GOTOOLCHAIN=go1.25.5 go install github.com/golangci/golangci-lint/v2/cmd/golangci-lint@v2.8.0
golangci-lint run ./...
.PHONY: binary-test-coverage
binary-test-coverage: binary-dep
go test -coverprofile=coverage.txt -covermode=atomic -v $(STATIC_TAGS) ./...
@@ -115,19 +121,18 @@ binary-frontend-test-coverage:
########################################################################################################################
# Docker
# NOTE: these docker make targets are only used for local development (not used by Github Actions/CI)
# NOTE: docker-web and docker-omnibus require `make binary-frontend` or frontend.tar.gz content in /dist before executing.
########################################################################################################################
.PHONY: docker-collector
docker-collector:
@echo "building collector docker image"
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile.collector -t analogj/scrutiny-dev:collector .
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile.collector -t ghcr.io/analogj/scrutiny-dev:collector .
.PHONY: docker-web
docker-web:
@echo "building web docker image"
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile.web -t analogj/scrutiny-dev:web .
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile.web -t ghcr.io/analogj/scrutiny-dev:web .
.PHONY: docker-omnibus
docker-omnibus:
@echo "building omnibus docker image"
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile -t analogj/scrutiny-dev:omnibus .
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile -t ghcr.io/analogj/scrutiny-dev:omnibus .
+7 -3
View File
@@ -7,7 +7,7 @@
# scrutiny
[![CI](https://github.com/AnalogJ/scrutiny/workflows/CI/badge.svg?branch=master)](https://github.com/AnalogJ/scrutiny/actions?query=workflow%3ACI)
[![CI](https://github.com/AnalogJ/scrutiny/actions/workflows/ci.yaml/badge.svg)](https://github.com/AnalogJ/scrutiny/actions/workflows/ci.yaml)
[![codecov](https://codecov.io/gh/AnalogJ/scrutiny/branch/master/graph/badge.svg)](https://codecov.io/gh/AnalogJ/scrutiny)
[![GitHub license](https://img.shields.io/github/license/AnalogJ/scrutiny.svg?style=flat-square)](https://github.com/AnalogJ/scrutiny/blob/master/LICENSE)
[![Godoc](https://img.shields.io/badge/godoc-reference-blue.svg?style=flat-square)](https://godoc.org/github.com/analogj/scrutiny)
@@ -102,7 +102,7 @@ other Docker images:
- `ghcr.io/analogj/scrutiny:latest-collector` - Contains the Scrutiny data collector, `smartctl` binary and cron-like
scheduler. You can run one collector on each server.
- `ghcr.io/analogj/scrutiny:latest-web` - Contains the Web UI and API. Only one container necessary
- `influxdb:2.2` - InfluxDB image, used by the Web container to persist SMART data. Only one container necessary
- `influxdb:2.8` - InfluxDB image, used by the Web container to persist SMART data. Only one container necessary
See [docs/TROUBLESHOOTING_INFLUXDB.md](./docs/TROUBLESHOOTING_INFLUXDB.md)
> See [docker/example.hubspoke.docker-compose.yml](https://github.com/AnalogJ/scrutiny/blob/master/docker/example.hubspoke.docker-compose.yml) for a docker-compose file.
@@ -111,7 +111,7 @@ other Docker images:
docker run -p 8086:8086 --restart unless-stopped \
-v `pwd`/influxdb2:/var/lib/influxdb2 \
--name scrutiny-influxdb \
influxdb:2.2
influxdb:2.8
docker run -p 8080:8080 --restart unless-stopped \
-v `pwd`/scrutiny:/opt/scrutiny/config \
@@ -128,6 +128,10 @@ docker run --restart unless-stopped \
ghcr.io/analogj/scrutiny:latest-collector
```
### Hub rootless installation using Podman Quadlets
See [docs/INSTALL_ROOTLESS_PODMAN.md](docs/INSTALL_ROOTLESS_PODMAN.md) for instructions.
## Manual Installation (without-Docker)
While the easiest way to get started with [Scrutiny is using Docker](https://github.com/AnalogJ/scrutiny#docker),
@@ -3,17 +3,18 @@ package main
import (
"encoding/json"
"fmt"
"github.com/analogj/scrutiny/collector/pkg/collector"
"github.com/analogj/scrutiny/collector/pkg/config"
"github.com/analogj/scrutiny/collector/pkg/errors"
"github.com/analogj/scrutiny/webapp/backend/pkg/version"
"github.com/sirupsen/logrus"
"io"
"log"
"os"
"strings"
"time"
"github.com/analogj/scrutiny/collector/pkg/collector"
"github.com/analogj/scrutiny/collector/pkg/config"
"github.com/analogj/scrutiny/collector/pkg/errors"
"github.com/analogj/scrutiny/webapp/backend/pkg/version"
"github.com/sirupsen/logrus"
utils "github.com/analogj/go-util/utils"
"github.com/fatih/color"
"github.com/urfave/cli/v2"
@@ -37,8 +38,8 @@ func main() {
}
//we're going to load the config file manually, since we need to validate it.
err = config.ReadConfig(configFilePath) // Find and read the config file
if _, ok := err.(errors.ConfigFileMissingError); ok { // Handle errors reading the config file
err = config.ReadConfig(configFilePath) // Find and read the config file
if _, ok := err.(errors.ConfigFileMissingError); ok { // Handle errors reading the config file
//ignore "could not find config file"
} else if err != nil {
os.Exit(1)
@@ -81,7 +82,7 @@ OPTIONS:
subtitle := collectorMetrics + utils.LeftPad2Len(versionInfo, " ", 65-len(collectorMetrics))
color.New(color.FgGreen).Fprintf(c.App.Writer, fmt.Sprintf(utils.StripIndent(
color.New(color.FgGreen).Fprintf(c.App.Writer, utils.StripIndent(
`
___ ___ ____ __ __ ____ ____ _ _ _ _
/ __) / __)( _ \( )( )(_ _)(_ _)( \( )( \/ )
@@ -89,7 +90,7 @@ OPTIONS:
(___/ \___)(_)\_)(______) (__) (____)(_)\_) (__)
%s
`), subtitle))
`), subtitle)
return nil
},
@@ -2,14 +2,15 @@ package main
import (
"fmt"
"github.com/analogj/scrutiny/collector/pkg/collector"
"github.com/analogj/scrutiny/webapp/backend/pkg/version"
"github.com/sirupsen/logrus"
"io"
"log"
"os"
"time"
"github.com/analogj/scrutiny/collector/pkg/collector"
"github.com/analogj/scrutiny/webapp/backend/pkg/version"
"github.com/sirupsen/logrus"
utils "github.com/analogj/go-util/utils"
"github.com/fatih/color"
"github.com/urfave/cli/v2"
@@ -57,7 +58,7 @@ OPTIONS:
subtitle := collectorSelfTest + utils.LeftPad2Len(versionInfo, " ", 65-len(collectorSelfTest))
color.New(color.FgGreen).Fprintf(c.App.Writer, fmt.Sprintf(utils.StripIndent(
color.New(color.FgGreen).Fprintf(c.App.Writer, utils.StripIndent(
`
___ ___ ____ __ __ ____ ____ _ _ _ _
/ __) / __)( _ \( )( )(_ _)(_ _)( \( )( \/ )
@@ -65,7 +66,7 @@ OPTIONS:
(___/ \___)(_)\_)(______) (__) (____)(_)\_) (__)
%s
`), subtitle))
`), subtitle)
return nil
},
+2 -12
View File
@@ -3,9 +3,10 @@ package collector
import (
"bytes"
"encoding/json"
"github.com/sirupsen/logrus"
"net/http"
"time"
"github.com/sirupsen/logrus"
)
var httpClient = &http.Client{Timeout: 60 * time.Second}
@@ -14,17 +15,6 @@ type BaseCollector struct {
logger *logrus.Entry
}
func (c *BaseCollector) getJson(url string, target interface{}) error {
r, err := httpClient.Get(url)
if err != nil {
return err
}
defer r.Body.Close()
return json.NewDecoder(r.Body).Decode(target)
}
func (c *BaseCollector) postJson(url string, body interface{}, target interface{}) error {
requestBody, err := json.Marshal(body)
if err != nil {
+3 -2
View File
@@ -3,11 +3,12 @@ package shell
import (
"bytes"
"errors"
"github.com/sirupsen/logrus"
"io"
"os/exec"
"path"
"strings"
"github.com/sirupsen/logrus"
)
type localShell struct{}
@@ -36,7 +37,7 @@ func (s *localShell) Command(logger *logrus.Entry, cmdName string, cmdArgs []str
if workingDir != "" && path.IsAbs(workingDir) {
cmd.Dir = workingDir
} else if workingDir != "" {
return "", errors.New("Working Directory must be an absolute path")
return "", errors.New("working directory must be an absolute path")
}
err := cmd.Run()
@@ -7,8 +7,8 @@ package mock_shell
import (
reflect "reflect"
gomock "github.com/golang/mock/gomock"
logrus "github.com/sirupsen/logrus"
gomock "go.uber.org/mock/gomock"
)
// MockInterface is a mock of Interface interface.
+10 -9
View File
@@ -2,15 +2,16 @@ package config
import (
"fmt"
"github.com/analogj/go-util/utils"
"github.com/analogj/scrutiny/collector/pkg/errors"
"github.com/analogj/scrutiny/collector/pkg/models"
"github.com/mitchellh/mapstructure"
"github.com/spf13/viper"
"log"
"os"
"sort"
"strings"
"github.com/analogj/go-util/utils"
"github.com/analogj/scrutiny/collector/pkg/errors"
"github.com/analogj/scrutiny/collector/pkg/models"
"github.com/go-viper/mapstructure/v2"
"github.com/spf13/viper"
)
// When initializing this class the following methods must be called:
@@ -20,7 +21,7 @@ import (
type configuration struct {
*viper.Viper
deviceOverrides []models.ScanOverride
deviceOverrides []models.ScanOverride
}
//Viper uses the following precedence order. Each item takes precedence over the item below it:
@@ -53,7 +54,7 @@ func (c *configuration) Init() error {
c.SetEnvPrefix("COLLECTOR")
c.SetEnvKeyReplacer(strings.NewReplacer("-", "_", ".", "_"))
c.AutomaticEnv()
//c.SetDefault("collect.short.command", "-a -o on -S on")
c.SetDefault("allow_listed_devices", []string{})
@@ -167,7 +168,7 @@ func (c *configuration) GetCommandMetricsInfoArgs(deviceName string) string {
overrides := c.GetDeviceOverrides()
for _, deviceOverrides := range overrides {
if strings.ToLower(deviceName) == strings.ToLower(deviceOverrides.Device) {
if strings.EqualFold(deviceName, deviceOverrides.Device) {
//found matching device
if len(deviceOverrides.Commands.MetricsInfoArgs) > 0 {
return deviceOverrides.Commands.MetricsInfoArgs
@@ -183,7 +184,7 @@ func (c *configuration) GetCommandMetricsSmartArgs(deviceName string) string {
overrides := c.GetDeviceOverrides()
for _, deviceOverrides := range overrides {
if strings.ToLower(deviceName) == strings.ToLower(deviceOverrides.Device) {
if strings.EqualFold(deviceName, deviceOverrides.Device) {
//found matching device
if len(deviceOverrides.Commands.MetricsSmartArgs) > 0 {
return deviceOverrides.Commands.MetricsSmartArgs
+1 -1
View File
@@ -8,8 +8,8 @@ import (
reflect "reflect"
models "github.com/analogj/scrutiny/collector/pkg/models"
gomock "github.com/golang/mock/gomock"
viper "github.com/spf13/viper"
gomock "go.uber.org/mock/gomock"
)
// MockInterface is a mock of Interface interface.
+2 -2
View File
@@ -107,8 +107,8 @@ func (d *Detect) SmartCtlInfo(device *models.Device) error {
if len(device.WWN) == 0 {
// no WWN populated after WWN lookup and fallback. we need to throw an error
errMsg := fmt.Sprintf("no WWN (or fallback) populated for device: %s. Device will be registered, but no data will be published for this device. ", device.DeviceName)
d.Logger.Errorf(errMsg)
return fmt.Errorf(errMsg)
d.Logger.Errorf("%v", errMsg)
return fmt.Errorf("%v", errMsg)
}
return nil
+1 -12
View File
@@ -9,16 +9,15 @@ import (
mock_config "github.com/analogj/scrutiny/collector/pkg/config/mock"
"github.com/analogj/scrutiny/collector/pkg/detect"
"github.com/analogj/scrutiny/collector/pkg/models"
"github.com/golang/mock/gomock"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/assert"
"github.com/stretchr/testify/require"
"go.uber.org/mock/gomock"
)
func TestDetect_SmartctlScan(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
@@ -48,7 +47,6 @@ func TestDetect_SmartctlScan(t *testing.T) {
func TestDetect_SmartctlScan_Megaraid(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
@@ -81,7 +79,6 @@ func TestDetect_SmartctlScan_Megaraid(t *testing.T) {
func TestDetect_SmartctlScan_Nvme(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
@@ -113,7 +110,6 @@ func TestDetect_SmartctlScan_Nvme(t *testing.T) {
func TestDetect_TransformDetectedDevices_Empty(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
@@ -147,7 +143,6 @@ func TestDetect_TransformDetectedDevices_Empty(t *testing.T) {
func TestDetect_TransformDetectedDevices_Ignore(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{{Device: "/dev/sda", DeviceType: nil, Ignore: true}})
@@ -180,7 +175,6 @@ func TestDetect_TransformDetectedDevices_Ignore(t *testing.T) {
func TestDetect_TransformDetectedDevices_Raid(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
@@ -223,7 +217,6 @@ func TestDetect_TransformDetectedDevices_Raid(t *testing.T) {
func TestDetect_TransformDetectedDevices_Simple(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
@@ -257,7 +250,6 @@ func TestDetect_TransformDetectedDevices_Simple(t *testing.T) {
func TestDetect_TransformDetectedDevices_WithoutDeviceTypeOverride(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
@@ -290,7 +282,6 @@ func TestDetect_TransformDetectedDevices_WithoutDeviceTypeOverride(t *testing.T)
func TestDetect_TransformDetectedDevices_WhenDeviceNotDetected(t *testing.T) {
// setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
@@ -312,7 +303,6 @@ func TestDetect_TransformDetectedDevices_WhenDeviceNotDetected(t *testing.T) {
func TestDetect_TransformDetectedDevices_AllowListFilters(t *testing.T) {
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
@@ -353,7 +343,6 @@ func TestDetect_TransformDetectedDevices_AllowListFilters(t *testing.T) {
func TestDetect_SmartCtlInfo(t *testing.T) {
t.Run("should report nvme info", func(t *testing.T) {
ctrl := gomock.NewController(t)
defer ctrl.Finish()
const (
someArgs = "--info --json"
+9 -10
View File
@@ -2,12 +2,13 @@ package detect
import (
"fmt"
"os"
"path/filepath"
"strings"
"github.com/analogj/scrutiny/collector/pkg/common/shell"
"github.com/analogj/scrutiny/collector/pkg/models"
"github.com/jaypipes/ghw"
"io/ioutil"
"path/filepath"
"strings"
)
func DevicePrefix() string {
@@ -23,15 +24,15 @@ func (d *Detect) Start() ([]models.Device, error) {
}
//inflate device info for detected devices.
for ndx, _ := range detectedDevices {
d.SmartCtlInfo(&detectedDevices[ndx]) //ignore errors.
for ndx := range detectedDevices {
d.SmartCtlInfo(&detectedDevices[ndx]) //ignore errors.
populateUdevInfo(&detectedDevices[ndx]) //ignore errors.
}
return detectedDevices, nil
}
//WWN values NVMe and SCSI
// WWN values NVMe and SCSI
func (d *Detect) wwnFallback(detectedDevice *models.Device) {
block, err := ghw.Block()
if err == nil {
@@ -61,7 +62,7 @@ func (d *Detect) wwnFallback(detectedDevice *models.Device) {
func populateUdevInfo(detectedDevice *models.Device) error {
// Get device major:minor numbers
// `cat /sys/class/block/sda/dev`
devNo, err := ioutil.ReadFile(filepath.Join("/sys/class/block/", detectedDevice.DeviceName, "dev"))
devNo, err := os.ReadFile(filepath.Join("/sys/class/block/", detectedDevice.DeviceName, "dev"))
if err != nil {
return err
}
@@ -69,7 +70,7 @@ func populateUdevInfo(detectedDevice *models.Device) error {
// Look up block device in udev runtime database
// `cat /run/udev/data/b8:0`
udevID := "b" + strings.TrimSpace(string(devNo))
udevBytes, err := ioutil.ReadFile(filepath.Join("/run/udev/data/", udevID))
udevBytes, err := os.ReadFile(filepath.Join("/run/udev/data/", udevID))
if err != nil {
return err
}
@@ -97,7 +98,5 @@ func populateUdevInfo(detectedDevice *models.Device) error {
detectedDevice.DeviceSerialID = fmt.Sprintf("%s-%s", udevInfo["ID_BUS"], deviceSerialID)
}
return nil
}
+3 -4
View File
@@ -1,10 +1,10 @@
package detect_test
import (
"fmt"
"testing"
"github.com/analogj/scrutiny/collector/pkg/detect"
"github.com/stretchr/testify/require"
"testing"
)
func TestWwn_FromStringTable(t *testing.T) {
@@ -25,8 +25,7 @@ func TestWwn_FromStringTable(t *testing.T) {
}
//test
for _, tt := range tests {
testname := fmt.Sprintf("%s", tt.wwnStr)
t.Run(testname, func(t *testing.T) {
t.Run(tt.wwnStr, func(t *testing.T) {
str := tt.wwn.ToString()
require.Equal(t, tt.wwnStr, str)
})
+28 -5
View File
@@ -6,16 +6,20 @@
######## Build the frontend
FROM --platform=${BUILDPLATFORM} node AS frontendbuild
WORKDIR /go/src/github.com/analogj/scrutiny
COPY --link . /go/src/github.com/analogj/scrutiny
COPY --link Makefile /go/src/github.com/analogj/scrutiny/
COPY --link webapp/frontend /go/src/github.com/analogj/scrutiny/webapp/frontend
RUN make binary-frontend
######## Build the backend
FROM golang:1.20-bookworm as backendbuild
FROM golang:1.25-trixie as backendbuild
WORKDIR /go/src/github.com/analogj/scrutiny
COPY --link . /go/src/github.com/analogj/scrutiny
COPY --link Makefile /go/src/github.com/analogj/scrutiny/
COPY --link go.mod go.sum /go/src/github.com/analogj/scrutiny/
COPY --link collector /go/src/github.com/analogj/scrutiny/collector
COPY --link webapp/backend /go/src/github.com/analogj/scrutiny/webapp/backend
RUN apt-get update && DEBIAN_FRONTEND=noninteractive \
apt-get install -y --no-install-recommends \
file \
@@ -23,8 +27,25 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive \
RUN make binary-clean binary-all WEB_BINARY_NAME=scrutiny
######## Build smartmontools from source
FROM debian:trixie-slim AS smartmontoolsbuild
ARG SMARTMONTOOLS_VER=7.5
RUN apt-get update && DEBIAN_FRONTEND=noninteractive \
apt-get install -y --no-install-recommends \
ca-certificates curl gcc g++ gnupg make \
&& rm -rf /var/lib/apt/lists/*
RUN curl -L "https://github.com/smartmontools/smartmontools/releases/download/RELEASE_$(echo ${SMARTMONTOOLS_VER} | tr '.' '_')/smartmontools-${SMARTMONTOOLS_VER}.tar.gz" -o /tmp/smartmontools.tar.gz \
&& tar -xzf /tmp/smartmontools.tar.gz -C /tmp \
&& cd /tmp/smartmontools-${SMARTMONTOOLS_VER} \
&& ./configure --prefix=/usr LDFLAGS='-static' --without-libcap-ng --without-libsystemd \
&& make -j"$(nproc)" \
&& make install \
&& /usr/sbin/update-smart-drivedb \
&& rm -rf /tmp/smartmontools*
######## Combine build artifacts in runtime image
FROM debian:bookworm-slim as runtime
FROM debian:trixie-slim AS runtime
ARG TARGETARCH
EXPOSE 8080
WORKDIR /opt/scrutiny
@@ -40,7 +61,6 @@ RUN apt-get update && DEBIAN_FRONTEND=noninteractive \
ca-certificates \
cron \
curl \
smartmontools \
tzdata \
procps \
xz-utils \
@@ -62,6 +82,9 @@ RUN curl -L https://dl.influxdata.com/influxdb/releases/influxdb2-${INFLUXVER}-$
COPY /rootfs /
COPY --from=smartmontoolsbuild /usr/sbin/smartctl /usr/sbin/smartctl
COPY --from=smartmontoolsbuild /usr/share/smartmontools/ /usr/share/smartmontools/
COPY --link --from=backendbuild --chmod=755 /go/src/github.com/analogj/scrutiny/scrutiny /opt/scrutiny/bin/
COPY --link --from=backendbuild --chmod=755 /go/src/github.com/analogj/scrutiny/scrutiny-collector-metrics /opt/scrutiny/bin/
COPY --link --from=frontendbuild --chmod=644 /go/src/github.com/analogj/scrutiny/dist /opt/scrutiny/web
+26 -4
View File
@@ -4,21 +4,43 @@
########
FROM golang:1.20-bookworm as backendbuild
FROM golang:1.25-trixie AS backendbuild
WORKDIR /go/src/github.com/analogj/scrutiny
COPY . /go/src/github.com/analogj/scrutiny
COPY --link Makefile /go/src/github.com/analogj/scrutiny/
COPY --link go.mod go.sum /go/src/github.com/analogj/scrutiny/
COPY --link collector /go/src/github.com/analogj/scrutiny/collector
COPY --link webapp/backend /go/src/github.com/analogj/scrutiny/webapp/backend
RUN apt-get update && apt-get install -y file && rm -rf /var/lib/apt/lists/*
RUN make binary-clean binary-collector
######## Build smartmontools from source
FROM debian:trixie-slim AS smartmontoolsbuild
ARG SMARTMONTOOLS_VER=7.5
RUN apt-get update && DEBIAN_FRONTEND=noninteractive \
apt-get install -y --no-install-recommends \
ca-certificates curl gcc g++ gnupg make \
&& rm -rf /var/lib/apt/lists/*
RUN curl -L "https://github.com/smartmontools/smartmontools/releases/download/RELEASE_$(echo ${SMARTMONTOOLS_VER} | tr '.' '_')/smartmontools-${SMARTMONTOOLS_VER}.tar.gz" -o /tmp/smartmontools.tar.gz \
&& tar -xzf /tmp/smartmontools.tar.gz -C /tmp \
&& cd /tmp/smartmontools-${SMARTMONTOOLS_VER} \
&& ./configure --prefix=/usr LDFLAGS='-static' --without-libcap-ng --without-libsystemd \
&& make -j"$(nproc)" \
&& make install \
&& /usr/sbin/update-smart-drivedb \
&& rm -rf /tmp/smartmontools*
########
FROM debian:bookworm-slim as runtime
FROM debian:trixie-slim AS runtime
WORKDIR /opt/scrutiny
ENV PATH="/opt/scrutiny/bin:${PATH}"
RUN apt-get update && apt-get install -y cron smartmontools ca-certificates tzdata && rm -rf /var/lib/apt/lists/* && update-ca-certificates
RUN apt-get update && apt-get install -y cron ca-certificates tzdata && rm -rf /var/lib/apt/lists/* && update-ca-certificates
COPY --from=smartmontoolsbuild /usr/sbin/smartctl /usr/sbin/smartctl
COPY --from=smartmontoolsbuild /usr/share/smartmontools/ /usr/share/smartmontools/
COPY /docker/entrypoint-collector.sh /entrypoint-collector.sh
COPY /rootfs/etc/cron.d/scrutiny /etc/cron.d/scrutiny
+20
View File
@@ -0,0 +1,20 @@
########################################################################################################################
# Smartmontools Builder
# - Builds smartctl from source as a static binary.
# - Updates the drive database to include the latest drive models since it can change between releases.
# - Used as a shared build stage by Dockerfile and Dockerfile.collector.
########################################################################################################################
FROM debian:trixie-slim
ARG SMARTMONTOOLS_VER=7.5
RUN apt-get update && DEBIAN_FRONTEND=noninteractive \
apt-get install -y --no-install-recommends \
ca-certificates curl gcc g++ gnupg make \
&& rm -rf /var/lib/apt/lists/*
RUN curl -L "https://github.com/smartmontools/smartmontools/releases/download/RELEASE_$(echo ${SMARTMONTOOLS_VER} | tr '.' '_')/smartmontools-${SMARTMONTOOLS_VER}.tar.gz" -o /tmp/smartmontools.tar.gz \
&& tar -xzf /tmp/smartmontools.tar.gz -C /tmp \
&& cd /tmp/smartmontools-${SMARTMONTOOLS_VER} \
&& ./configure --prefix=/usr LDFLAGS='-static' --without-libcap-ng --without-libsystemd \
&& make -j"$(nproc)" \
&& make install \
&& /usr/sbin/update-smart-drivedb \
&& rm -rf /tmp/smartmontools*
+8 -4
View File
@@ -6,22 +6,26 @@
######## Build the frontend
FROM --platform=${BUILDPLATFORM} node AS frontendbuild
WORKDIR /go/src/github.com/analogj/scrutiny
COPY --link . /go/src/github.com/analogj/scrutiny
COPY --link Makefile /go/src/github.com/analogj/scrutiny/
COPY --link webapp/frontend /go/src/github.com/analogj/scrutiny/webapp/frontend
RUN make binary-frontend
######## Build the backend
FROM golang:1.20-bookworm as backendbuild
FROM golang:1.25-trixie as backendbuild
WORKDIR /go/src/github.com/analogj/scrutiny
COPY --link . /go/src/github.com/analogj/scrutiny
COPY --link Makefile /go/src/github.com/analogj/scrutiny/
COPY --link go.mod go.sum /go/src/github.com/analogj/scrutiny/
COPY --link collector /go/src/github.com/analogj/scrutiny/collector
COPY --link webapp/backend /go/src/github.com/analogj/scrutiny/webapp/backend
RUN apt-get update && apt-get install -y file && rm -rf /var/lib/apt/lists/*
RUN make binary-clean binary-all WEB_BINARY_NAME=scrutiny
######## Combine build artifacts in runtime image
FROM debian:bookworm-slim as runtime
FROM debian:trixie-slim as runtime
EXPOSE 8080
WORKDIR /opt/scrutiny
ENV PATH="/opt/scrutiny/bin:${PATH}"
+1 -1
View File
@@ -3,7 +3,7 @@ version: '2.4'
services:
influxdb:
restart: unless-stopped
image: influxdb:2.2
image: influxdb:2.8
ports:
- '8086:8086'
volumes:
+11 -13
View File
@@ -49,19 +49,15 @@ contains the connection and notification details but I always find it easier to
docker-compose.
```yaml
version: "3.4"
networks:
monitoring: # A common network for all monitoring services to communicate into
external: true
notifications: # To Gotify or another Notification service
external: true
services:
influxdb:
restart: unless-stopped
container_name: influxdb
image: influxdb:2.1-alpine
image: influxdb:2.8
ports:
- 8086:8086
volumes:
@@ -73,7 +69,8 @@ services:
- DOCKER_INFLUXDB_INIT_PASSWORD=${PASSWORD}
- DOCKER_INFLUXDB_INIT_ORG=homelab
- DOCKER_INFLUXDB_INIT_BUCKET=scrutiny
- DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=your-very-secret-token
- DOCKER_INFLUXDB_INIT_ADMIN_TOKEN=SUPER-SECRET-TOKEN
- TZ=Europe/Stockholm
networks:
- monitoring
@@ -85,17 +82,20 @@ services:
ports:
- 8080:8080
volumes:
- ${DIR_CONFIG}/scrutiny/config:/opt/scrutiny/config
- ${DIR_CONFIG}/config:/opt/scrutiny/config
environment:
- SCRUTINY_WEB_INFLUXDB_HOST=influxdb
- SCRUTINY_WEB_INFLUXDB_PORT=8086
- SCRUTINY_WEB_INFLUXDB_TOKEN=your-very-secret-token
- SCRUTINY_WEB_INFLUXDB_TOKEN=SUPER-SECRET-TOKEN
- SCRUTINY_WEB_INFLUXDB_ORG=homelab
- SCRUTINY_WEB_INFLUXDB_BUCKET=scrutiny
# Optional but highly recommended to notify you in case of a problem
- SCRUTINY_NOTIFY_URLS=["http://gotify:80/message?token=a-gotify-token"]
# Optional but highly recommended to notify you in case of a problem; space-separated list of shoutrrr uri's
# https://github.com/AnalogJ/scrutiny/blob/master/docs/TROUBLESHOOTING_NOTIFICATIONS.md
- SCRUTINY_NOTIFY_URLS=http://gotify:80/message?token=a-gotify-token ntfy://username:password@host:port/topic
- TZ=Europe/Stockholm
depends_on:
- influxdb
influxdb:
condition: service_healthy
networks:
- notifications
- monitoring
@@ -164,8 +164,6 @@ Also all drives that you wish to monitor need to be presented to the container u
The image handles the periodic scanning of the drives.
```yaml
version: "3.4"
services:
collector:
+315 -13
View File
@@ -10,9 +10,9 @@ Scrutiny is made up of three components: an influxdb Database, a collector and a
## InfluxDB
Please follow the official InfluxDB installation guide. Note, you'll need to install v2.2.0+.
Please follow the official InfluxDB installation guide. Note, you'll need to install v2.8.0+.
https://docs.influxdata.com/influxdb/v2.2/install/
https://docs.influxdata.com/influxdb/v2/install/
## Webapp/API
@@ -122,6 +122,11 @@ So you'll need to install the v7+ version using one of the following commands:
- `dnf install smartmontools`
- **FreeBSD:** `pkg install smartmontools`
The following additional dependencies are needed if you want to run the collector as an unprivileged user:
- systemd version > 235
- a restricted user account
### Directory Structure
Now let's create a directory structure to contain the Scrutiny collector binary.
@@ -133,40 +138,337 @@ mkdir -p /opt/scrutiny/bin
### Download Files
Next, we'll download the Scrutiny collector binary from the [latest Github release](https://github.com/analogj/scrutiny/releases).
The file you need to download is named:
Next, we'll download the Scrutiny collector binary from the [latest Github release](https://github.com/analogj/scrutiny/releases). You are looking for the one titled **scrutiny-collector-metrics-linux-amd64** unless you know you are on arm.
- **scrutiny-collector-metrics-linux-amd64** - save this file to `/opt/scrutiny/bin`
```sh
wget -O /tmp/scrutiny-collector-metrics https://github.com/AnalogJ/scrutiny/releases/latest/download/scrutiny-collector-metrics-linux-amd64
```
Optional, but recommended: Before continuing it's recommended you compare the sha from the release page with the downloaded file to ensure it's the same file and not corrupted/tampered with. The command to do this is:
`echo "SHA_GOES_HERE /tmp/scrutiny-collector-metrics" | sha256sum -c`
example for the v0.8.6 release:
`echo "4c163645ce24e5487f4684a25ec73485d77a82a57f084808ff5aad0c11499ad2 /tmp/scrutiny-collector-metrics" | sha256sum -c`
followed by:
`sudo mv /tmp/scrutiny-collector-metrics /opt/scrutiny/bin/`
to move the binary to its final resting place
### Prepare Scrutiny
Now that we have downloaded the required files, let's prepare the filesystem.
```
```sh
# Let's make sure the Scrutiny collector is executable.
chmod +x /opt/scrutiny/bin/scrutiny-collector-metrics-linux-amd64
chmod +x /opt/scrutiny/bin/scrutiny-collector-metrics
```
if you are using SELinux, you may need to also do the following:
```sh
# tell SELinux to allow these binaries
sudo semanage fcontext -a -t bin_t "/opt/scrutiny/bin(/.*)?"
# update labels
sudo restorecon -Rv /opt/scrutiny/bin
```
### Start Scrutiny Collector, Populate Webapp
Next, we will manually trigger the collector, to populate the Scrutiny dashboard:
> NOTE: if you need to pass a config file to the scrutiny collector, you can provide it using the `--config` flag.
```
/opt/scrutiny/bin/scrutiny-collector-metrics-linux-amd64 run --api-endpoint "http://localhost:8080"
```sh
/opt/scrutiny/bin/scrutiny-collector-metrics run --api-endpoint "http://localhost:8080"
```
### Schedule Collector with Cron
### Schedule Collector with (root) Cron
Finally you need to schedule the collector to run periodically.
This may be different depending on your OS/environment, but it may look something like this:
```
```sh
# open crontab
crontab -e
sudo crontab -e
# add a line for Scrutiny
*/15 * * * * . /etc/profile; /opt/scrutiny/bin/scrutiny-collector-metrics-linux-amd64 run --api-endpoint "http://localhost:8080"
*/15 * * * * . /etc/profile; /opt/scrutiny/bin/scrutiny-collector-metrics run --api-endpoint "http://localhost:8080"
```
### Schedule Collector with Systemd (rootless)
Alternatively you can run `scrutiny-collector-metrics` as non-root so long as the relevant capabilities and permissions are granted.
#### Creating a Restricted Service Account
This is the account that will run `scrutiny-collector-metrics`. Note this isn't strictly needed for all setups, but is useful from a logging/auditing perspective.
- Debian-based distros:
- `sudo adduser --system scrutiny-svc --group --home /opt/scrutiny-svc`
- RHEL-based distros:
- `sudo useradd --system --home-dir /opt/scrutiny-svc --shell /sbin/nologin scrutiny-svc`
Next, add the user to the `disk` group:
```sh
sudo usermod -aG disk scrutiny-svc
```
#### Creating a Restricted Systemd Service using AmbientCapabilities (easier)
This is the simpler setup, which allows you to run scrutiny rootless, but depending on what you want, may require granting more permissions to scrutiny than you would like to.
1. go to `/etc/systemd/system`
2. create scrutiny-collector.service with the following contents:
```ini
[Unit]
Description=Daily Restricted Scrutiny Collector
After=network.target
[Service]
[Unit]
Description=Daily Restricted Scrutiny Collector
After=network.target
[Service]
Type=oneshot
User=scrutiny-svc
Group=disk
ExecStart=/opt/scrutiny/bin/scrutiny-collector-metrics run --api-endpoint "http://localhost:8080"
# --- PRIVILEGE LOCKDOWN ---
## CAP_SYS_RAWIO is needed for SATA drives
AmbientCapabilities=CAP_SYS_RAWIO
CapabilityBoundingSet=CAP_SYS_RAWIO
## unfortunately nvme drives require CAP_SYS_ADMIN
## if you want nvme drives you must do the following:
#AmbientCapabilities=CAP_SYS_RAWIO CAP_SYS_ADMIN
#CapabilityBoundingSet=
NoNewPrivileges=yes
# Security/sandboxing settings
KeyringMode=private
LockPersonality=yes
MemoryDenyWriteExecute=yes
ProtectSystem=strict
ProtectHome=yes
PrivateDevices=no
## you can restrict devices using:
#DevicePolicy=closed
#DeviceAllow=/dev/sda r
#DeviceAllow=/dev/nvme0 r
ProtectKernelModules=yes
ProtectKernelTunables=yes
ProtectControlGroups=yes
ProtectClock=yes
ProtectHostname=yes
ProtectKernelLogs=yes
RemoveIPC=yes
RestrictSUIDSGID=true
# --- NETWORK LOCKDOWN
## use these to restrict what scrutiny can talk to over the network
## if using a hub on a different host you will need to change the values accordingly
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
IPAddressDeny=any
IPAddressAllow=localhost
[Install]
WantedBy=multi-user.target
```
Additionally, for nvme drives you may need to create a udev rule on many systems, as /dev/nvme* is often owned only by root:
##### add udev rule `/etc/udev/rules.d/99-nvme.rules` with contents:
```
KERNEL=="nvme[0-9]*", GROUP="disk", MODE="0640"
```
then run the following commands to load the udev rule:
```sh
sudo udevadm control --reload-rules
sudo udevadm trigger --subsystem-match=nvme --action=add
```
##### Pros:
- easy to maintain
- much better than running as root (especially if you don't need nvme drives)
- there are no privilege escalations needed
##### Cons:
NOTE: These cons basically only apply if a major supply-chain attack happens against scrutiny, and reflect a worst-case scenario that is unlikely to ever occur:
- CAP_SYS_RAWIO allows for data exfiltration/modification from SATA drives (ssh keys, /etc/shadow, etc)
- CAP_SYS_ADMIN would theoretically allow for significant system compromise
- nvme drives requires a udev rule for reliable access
If you are happy with that, you can jump to [Create a Systemd Timer to run scrutiny-collector.service](#create-a-systemd-timer-to-run-scrutiny-collectorservice)
#### Creating a Restricted Systemd Service using sudo and Shim Script
If granting scrutiny `CAP_SYS_RAWIO` and/or `CAP_SYS_ADMIN` exceeds your risk appetite, you have another option, though one more complicated and with its own set of pros/cons
1. run `sudo mkdir -p /opt/smartctl-shim/bin`
2. edit `/opt/smartctl-shim/bin/smartctl` with the following content:
```sh
#!/bin/bash
# Shim for accounts to use smartctl without being root
# for automation requires the account be in sudoers
exec /usr/bin/sudo /usr/sbin/smartctl "$@"
```
3. create a new `scrutiny-collector` file in `/etc/sudoers.d/`
4. inside `/etc/sudoers.d/scrutiny-collector` add the following:
```sh
scrutiny-svc ALL=(root) NOPASSWD: /usr/sbin/smartctl *
```
5. go to `/etc/systemd/system`
6. create scrutiny-collector.service with the following contents:
```ini
[Unit]
Description=Daily Restricted Scrutiny Collector
After=network.target
[Service]
Type=oneshot
User=scrutiny-svc
Environment="PATH=/opt/smartctl-shim/bin:/usr/bin:/bin"
ExecStart=/opt/scrutiny/bin/scrutiny-collector-metrics run --api-endpoint "http://localhost:8080"
# --- PRIVILEGE LOCKDOWN ---
## we use sudo to elevate privileges for smartctl only, so no Ambient Capabilities are needed
AmbientCapabilities=
## CAP_SYS_RAWIO is needed for SATA drives
CapabilityBoundingSet=CAP_SETUID CAP_SETGID CAP_AUDIT_WRITE CAP_SYS_RAWIO CAP_SYS_RESOURCE
## unfortunately nvme drives require CAP_SYS_ADMIN
## if you want nvme drives you must do the following:
# CapabilityBoundingSet=CAP_SETUID CAP_SETGID CAP_AUDIT_WRITE CAP_SYS_RAWIO CAP_SYS_ADMIN CAP_SYS_RESOURCE
## since sudo needs to be used to elevate permissions in this setup, we need to allow new privileges
NoNewPrivileges=no
# Security/sandboxing settings
KeyringMode=private
LockPersonality=yes
MemoryDenyWriteExecute=yes
ProtectSystem=strict
ProtectHome=yes
PrivateDevices=no
ProtectKernelModules=yes
ProtectKernelTunables=yes
ProtectControlGroups=yes
ProtectClock=yes
ProtectHostname=yes
ProtectKernelLogs=yes
RemoveIPC=yes
RestrictSUIDSGID=true
# --- NETWORK LOCKDOWN
## use these to restrict what scrutiny can talk to over the network
## if using a hub on a different host you will need to change the values accordingly
RestrictAddressFamilies=AF_INET AF_INET6 AF_UNIX
IPAddressDeny=any
IPAddressAllow=localhost
[Install]
WantedBy=multi-user.target
```
##### Pros:
- the scrutiny binary itself will not have permissions like CAP_SYS_ADMIN
- much better than running as root (especially if you don't need nvme drives)
- `sudo` restricts privilege escalation to just `smartctl`
- no udev rule needed
##### Cons:
NOTE: These cons basically only apply if a major supply-chain attack happens against scrutiny, and reflect a worst-case scenario that is unlikely to ever occur:
- Any sort of privilege escalation attack in sudo could theoretically allow a compromised scrutiny to gain additional privileges, since the process has permission to escelate privileges in general
- Even though sudo only allows `smartctl`, it still has `CAP_SYS_RAWIO` and `CAP_SYS_ADMIN` so in theory the same attacks from the first method are possible, though now only with an exploit using smartctl instead of scrutiny directly
- even though you don't need a udev rule, this adds a lot of additional administrative overhead
- while the scrutiny binary itself isn't elevated, it has a sub-process that is running as root (systemctl)
#### Create a Systemd Timer to run scrutiny-collector.service
First, lets test our service. It doesn't matter which method you used above, as either way you need to load and run it.
```sh
# reload changes for systemd services
sudo systemctl daemon-reload
# enable the service
sudo systemctl enable scrutiny-collector.service
# now run the service
sudo systemctl start scrutiny-collector.service
```
You should see the data in your hub instance of scrutiny now. If your run into issues I recommend turning on debug logging for scrutiny and checking your system logs using journalctl. It may be a permission is missing or wrong.
Now that things have been validated, lets create the systemd timer to run the service for us on a schedule:
1. if you are not still there, go to `/etc/systemd/system`
2. create scrutiny-collector.timer with the following contents:
```ini
[Unit]
Description=Run Scruitiny Collector daily at 2am
[Timer]
# Standard calendar trigger
OnCalendar=*-*-* 02:00:00
# Ensures the job runs if the computer was off at 2am
Persistent=true
# Minimizes I/O spikes by staggering start time
RandomizedDelaySec=30
[Install]
WantedBy=timers.target
```
Update the schedule as you see fit for your needs
Once you are satisfied with our timer, you'll need to load and enable it:
```sh
# reload changes for systemd services
sudo systemctl daemon-reload
# now enable the timer
sudo systemctl enable --now scrutiny-collector.timer
```
That's it! you're done. You can check the status of the timer using `sudo systemctl status scrutiny-collector.timer
`
+170
View File
@@ -0,0 +1,170 @@
# Rootless Podman Quadlet Install
Note: These instructions are written with Podman 4.9 in mind, as that's what's available on Ubuntu 24.04. Podman 5+ can simplify the process using a .pod file to run both the hub and influxdb instance in the same pod, sharing localhost. This is a fairly trivial change should anyone want to add the documentation for it. While this document isn't Ubuntu-specific, this is being purposefully done to allow it to apply to the vast majority of Podman users, regardless of what Linux distro they use.
### Dependencies
- Podman > 4.9
- Systemd > 250 (for quadlet support)
- a restricted service account
### Creating a Service Account
See [Creating a Restricted Service Account](INSTALL_MANUAL.md#creating-a-restricted-service-account) for instructions.
While you do not need to use the same account as the collector, this guide will assume you will be for all its examples.
In addition to those steps, you will need to create sub ids and enable lingering for the user:
```sh
# add sub-uids and sub-gids, you may need to adjust numbers if you have other rootless quadlets running for other users already
# it is not recommended to go below 100000
# we choose to start at 500000 in the event you have some other podman accounts
sudo usermod --add-subuids 500000-565535 scrutiny-svc
sudo usermod --add-subgids 500000-565535 scrutiny-svc
# We want the quadlets to stay running even if the user isn't logged in
sudo loginctl enable-linger scrutiny-svc
```
### Directory Structure
Once the account is created, you will need to grab its id to create a few drectories for the data files and rootless quadlet files:
```sh
# create folders for config and influxdb
sudo mkdir -p /opt/scrutiny-svc/scrutiny/{config,influxdb}
# get the config file for scrutiny hub
sudo wget -O /opt/scrutiny-svc/scrutiny/config/scrutiny.yaml https://raw.githubusercontent.com/AnalogJ/scrutiny/refs/heads/master/example.scrutiny.yaml
# set permissions on everything
sudo chown -R scrutiny-svc:scrutiny-svc /opt/scrutiny-svc
# Get the ID of scrutiny-svc so you know it for your own record-keeping
id -u scrutiny-svc
# create a directory
sudo mkdir -p /etc/containers/systemd/users/$(id -u scrutiny-svc)
## go into the directory you just created for the rest of the guide
cd /etc/containers/systemd/users/$(id -u scrutiny-svc)
```
### Quadlet Files
Now that everything is set up and configured for the account to run quadlets, we just need to create a few quadlet files.
All remaining system actions will take place in `/etc/containers/systemd/users/$(id -u scrutiny-svc)` which is why we had you cd into it.
#### Networking
We need the hub and influxdb instances to be able to talk to each other, and in the case of Podman 4.9, they will run separately not sharing a localhost, and as such we need to configure a network for them to share. The file is pretty simple:
##### scrutiny-net.network
```ini
[Network]
NetworkName=scrutiny-net
```
#### Containers
Now we're ready for creating the containers
##### influxdb.container
```ini
[Unit]
Description=influxdb
[Container]
ContainerName=influxdb
Image=docker.io/library/influxdb:2.8
AutoUpdate=registry
Timezone=local
## not strictly necessary, but keeps file permission sane for influxdb
PodmanArgs=--group-add keep-groups
## versions of podman after 5.1 should do the below instead
#GroupAdd=keep-groups
Volume=/opt/scrutiny-svc/scrutiny/influxdb:/var/lib/influxdb2:Z
Network=scrutiny-net
[Service]
Restart=on-failure
[Install]
# Start by default on boot
WantedBy=default.target
```
##### scrutiny-web.container
```ini
[Unit]
Description=scrutiny-web
After=influxdb.service
Requires=influxdb.service
[Container]
ContainerName=scrutiny-web
Image=ghcr.io/analogj/scrutiny:latest-web
AutoUpdate=registry
Timezone=local
Volume=/opt/scrutiny-svc/scrutiny/config:/opt/scrutiny/config:Z
Network=scrutiny-net
PublishPort=8080:8080/tcp
[Service]
Restart=on-failure
[Install]
# Start by default on boot
WantedBy=default.target
```
#### Update scrutiny config
Since our containers are running separately, we need to update `/opt/scrutiny-svc/scrutiny/config/scrutiny.yaml` to the new influxdb host:
1. edit `/opt/scrutiny-svc/scrutiny/config/scrutiny.yaml`
2. under `influxdb` section, change `host: 0.0.0.0` to `host: influxdb` -- remember that yaml is whitespace-sensitive! so be mindful of the indents
```yaml
influxdb:
# scheme: 'http'
host: influxdb
port: 8086
```
# Running the hub and doing the
With that done, we're now ready to start up the services:
```sh
# reload all the systemd user files for scrutiny-svc
sudo systemctl --user -M scrutiny-svc@ daemon-reload
# start the scrutiny-net network:
sudo systemctl --user -M scrutiny-svc@ start scrutiny-net-network.service
# start influxdb first and wait for it to come up
sudo systemctl --user -M scrutiny-svc@ start influxdb.service
# check if it's fully up
sudo systemctl --user -M scrutiny-svc@ status influxdb.service
# now start scrutiny
sudo systemctl --user -M scrutiny-svc@ start scrutiny-web.service
```
You are now ready to run the collector, if you would like to run that rootless as well, see the guide at [Schedule Collector with Systemd (rootless)](INSTALL_MANUAL.md#schedule-collector-with-systemd-rootless)
+2 -2
View File
@@ -41,14 +41,14 @@ The growth rate is pretty unintuitive -- see https://github.com/AnalogJ/scrutiny
InfluxDB is a required dependency for Scrutiny v0.4.0+.
https://docs.influxdata.com/influxdb/v2.2/install/
https://docs.influxdata.com/influxdb/v2/install/
## Persistence
To ensure that all data is correctly stored, you must also persist the InfluxDB database directory
- If you're using the Official Scrutiny Omnibus image (`ghcr.io/analogj/scrutiny:master-omnibus`), the path is `/opt/scrutiny/influxdb`
- If you're deploying in Hub/Spoke mode with the InfluxDB maintained image (`influxdb:2.2`), the path is `/var/lib/influxdb2`
- If you're deploying in Hub/Spoke mode with the InfluxDB maintained image (`influxdb:2.8`), the path is `/var/lib/influxdb2`
If you attempt to restart Scrutiny but you forgot to persist the InfluxDB directory, you will get an error message like follows:
+2 -2
View File
@@ -3,8 +3,8 @@
As documented in [example.scrutiny.yaml](https://github.com/AnalogJ/scrutiny/blob/master/example.scrutiny.yaml#L59-L75)
there are multiple ways to configure notifications for Scrutiny.
Under the hood we use a library called [Shoutrrr](https://github.com/containrrr/shoutrrr) to send our notifications, and you should use their documentation if you run into
any issues: https://containrrr.dev/shoutrrr/services/overview/
Under the hood we use a library called [Shoutrrr](https://github.com/nicholas-fedor/shoutrrr) to send our notifications, and you should use their documentation if you run into
any issues: https://shoutrrr.nickfedor.com/services/overview/
# Script Notifications
+24 -39
View File
@@ -59,7 +59,7 @@ log:
# Notification "urls" look like the following. For more information about service specific configuration see
# Shoutrrr's documentation: https://containrrr.dev/shoutrrr/services/overview/
# Shoutrrr's documentation: https://shoutrrr.nickfedor.com/services/overview/
#
# note, usernames and passwords containing special characters will need to be urlencoded.
# if your username is: "myname@example.com" and your password is "124@34$1"
@@ -67,41 +67,26 @@ log:
#notify:
# urls:
# - "discord://token@webhookid"
# - "telegram://token@telegram?channels=channel-1[,channel-2,...]"
# - "pushover://shoutrrr:apiToken@userKey/?priority=1&devices=device1[,device2, ...]"
# - "slack://[botname@]token-a/token-b/token-c"
# - "smtp://username:password@host:port/?fromAddress=fromAddress&toAddresses=recipient1[,recipient2,...]"
# - "teams://token-a/token-b/token-c"
# - "gotify://gotify-host/token"
# - "pushbullet://api-token[/device/#channel/email]"
# - "ifttt://key/?events=event1[,event2,...]&value1=value1&value2=value2&value3=value3"
# - "mattermost://[username@]mattermost-host/token[/channel]"
# - "ntfy://username:password@host:port/topic"
# - "hangouts://chat.googleapis.com/v1/spaces/FOO/messages?key=bar&token=baz"
# - "zulip://bot-mail:bot-key@zulip-domain/?stream=name-or-id&topic=name"
# - "join://shoutrrr:api-key@join/?devices=device1[,device2, ...][&icon=icon][&title=title]"
# - "script:///file/path/on/disk"
# - "https://www.example.com/path"
########################################################################################################################
# FEATURES COMING SOON
#
# The following commented out sections are a preview of additional configuration options that will be available soon.
#
########################################################################################################################
#limits:
# ata:
# critical:
# error: 10
# standard:
# error: 20
# warn: 10
# scsi:
# critical: true
# standard: true
# nvme:
# critical: true
# standard: true
# - discord://token@id[?thread_id=threadid]
# - googlechat://chat.googleapis.com/v1/spaces/FOO/messages?key=bar&token=baz
# - hangouts://chat.googleapis.com/v1/spaces/FOO/messages?key=bar&token=baz
# - lark://host/token?secret=secret&title=title&link=url
# - matrix://username:password@host:port/[?rooms=!roomID1[,roomAlias2]]
# - mattermost://[username@]mattermost-host/token[/channel]
# - rocketchat://[username@]rocketchat-host/token[/channel|@recipient]
# - signal://[user[:password]@]host[:port]/source_phone/recipient1[,recipient2,...]
# - slack://[botname@]token-a/token-b/token-c
# - teams://group@tenant/altId/groupOwner?host=organization.webhook.office.com
# - telegram://token@telegram?chats=@channel-1[,chat-id-1,chat-id-2:message-thread-id,...]
# - wecom://key
# - zulip://bot-mail:bot-key@zulip-domain/?stream=name-or-id&topic=name
# - bark://devicekey@host
# - gotify://gotify-host/token
# - ifttt://key/?events=event1[,event2,...]&value1=value1&value2=value2&value3=value3
# - join://shoutrrr:api-key@join/?devices=device1[,device2, ...][&icon=icon][&title=title]
# - ntfy://username:password@ntfy.sh/topic
# - pushbullet://api-token[/device/#channel/email]
# - pushover://shoutrrr:apiToken@userKey/?devices=device1[,device2, ...]
# - opsgenie://host/token?responders=responder1[,responder2]
# - pagerduty://[host[:port]]/integration-key[?query-parameters]
# - smtp://username:password@host:port/?fromaddress=fromAddress&toaddresses=recipient1[,recipient2,...][&additional_params]
+73 -62
View File
@@ -1,81 +1,92 @@
module github.com/analogj/scrutiny
go 1.20
go 1.25
require (
github.com/analogj/go-util v0.0.0-20190301173314-5295e364eb14
github.com/containrrr/shoutrrr v0.8.0
github.com/fatih/color v1.15.0
github.com/gin-gonic/gin v1.6.3
github.com/glebarez/sqlite v1.4.5
github.com/go-gormigrate/gormigrate/v2 v2.0.0
github.com/golang/mock v1.6.0
github.com/influxdata/influxdb-client-go/v2 v2.9.0
github.com/jaypipes/ghw v0.6.1
github.com/mitchellh/mapstructure v1.5.0
github.com/samber/lo v1.25.0
github.com/sirupsen/logrus v1.6.0
github.com/spf13/viper v1.15.0
github.com/stretchr/testify v1.8.1
github.com/urfave/cli/v2 v2.2.0
golang.org/x/sync v0.1.0
gorm.io/gorm v1.23.5
github.com/analogj/go-util v0.0.0-20210417161720-39b497cca03b
github.com/fatih/color v1.18.0
github.com/gin-gonic/gin v1.11.0
github.com/glebarez/sqlite v1.11.0
github.com/go-gormigrate/gormigrate/v2 v2.1.5
github.com/go-viper/mapstructure/v2 v2.5.0
github.com/influxdata/influxdb-client-go/v2 v2.14.0
github.com/jaypipes/ghw v0.21.2
github.com/nicholas-fedor/shoutrrr v0.13.2
github.com/samber/lo v1.52.0
github.com/sirupsen/logrus v1.9.4
github.com/spf13/viper v1.21.0
github.com/stretchr/testify v1.11.1
github.com/urfave/cli/v2 v2.27.7
go.uber.org/mock v0.6.0
golang.org/x/sync v0.19.0
gorm.io/gorm v1.31.1
)
require (
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.2 // indirect
github.com/apapsch/go-jsonmerge/v2 v2.0.0 // indirect
github.com/bytedance/gopkg v0.1.3 // indirect
github.com/bytedance/sonic v1.15.0 // indirect
github.com/bytedance/sonic/loader v0.5.0 // indirect
github.com/cloudwego/base64x v0.1.6 // indirect
github.com/cpuguy83/go-md2man/v2 v2.0.7 // indirect
github.com/davecgh/go-spew v1.1.1 // indirect
github.com/deepmap/oapi-codegen v1.8.2 // indirect
github.com/fsnotify/fsnotify v1.6.0 // indirect
github.com/ghodss/yaml v1.0.0 // indirect
github.com/gin-contrib/sse v0.1.0 // indirect
github.com/glebarez/go-sqlite v1.17.2 // indirect
github.com/go-ole/go-ole v1.2.4 // indirect
github.com/go-playground/locales v0.13.0 // indirect
github.com/go-playground/universal-translator v0.17.0 // indirect
github.com/go-playground/validator/v10 v10.2.0 // indirect
github.com/golang/protobuf v1.5.3 // indirect
github.com/google/uuid v1.3.0 // indirect
github.com/hashicorp/hcl v1.0.0 // indirect
github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 // indirect
github.com/jaypipes/pcidb v0.5.0 // indirect
github.com/dustin/go-humanize v1.0.1 // indirect
github.com/fsnotify/fsnotify v1.9.0 // indirect
github.com/gabriel-vasile/mimetype v1.4.13 // indirect
github.com/gin-contrib/sse v1.1.0 // indirect
github.com/glebarez/go-sqlite v1.22.0 // indirect
github.com/go-ole/go-ole v1.3.0 // indirect
github.com/go-playground/locales v0.14.1 // indirect
github.com/go-playground/universal-translator v0.18.1 // indirect
github.com/go-playground/validator/v10 v10.30.1 // indirect
github.com/goccy/go-json v0.10.5 // indirect
github.com/goccy/go-yaml v1.19.2 // indirect
github.com/google/uuid v1.6.0 // indirect
github.com/influxdata/line-protocol v0.0.0-20210922203350-b1ad95c89adf // indirect
github.com/jaypipes/pcidb v1.1.1 // indirect
github.com/jinzhu/inflection v1.0.0 // indirect
github.com/jinzhu/now v1.1.4 // indirect
github.com/jinzhu/now v1.1.5 // indirect
github.com/json-iterator/go v1.1.12 // indirect
github.com/konsorten/go-windows-terminal-sequences v1.0.3 // indirect
github.com/kvz/logstreamer v0.0.0-20201023134116-02d20f4338f5 // indirect
github.com/leodido/go-urn v1.2.0 // indirect
github.com/magiconair/properties v1.8.7 // indirect
github.com/mattn/go-colorable v0.1.13 // indirect
github.com/mattn/go-isatty v0.0.18 // indirect
github.com/klauspost/cpuid/v2 v2.3.0 // indirect
github.com/kvz/logstreamer v0.0.0-20221024075423-bf5cfbd32e39 // indirect
github.com/leodido/go-urn v1.4.0 // indirect
github.com/mattn/go-colorable v0.1.14 // indirect
github.com/mattn/go-isatty v0.0.20 // indirect
github.com/mitchellh/go-homedir v1.1.0 // indirect
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
github.com/modern-go/reflect2 v1.0.2 // indirect
github.com/pelletier/go-toml/v2 v2.0.6 // indirect
github.com/ncruces/go-strftime v1.0.0 // indirect
github.com/oapi-codegen/runtime v1.1.2 // indirect
github.com/pelletier/go-toml/v2 v2.2.4 // indirect
github.com/pkg/errors v0.9.1 // indirect
github.com/pmezard/go-difflib v1.0.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect
github.com/quic-go/qpack v0.6.0 // indirect
github.com/quic-go/quic-go v0.59.0 // indirect
github.com/remyoudompheng/bigfft v0.0.0-20230129092748-24d4a6f8daec // indirect
github.com/russross/blackfriday/v2 v2.1.0 // indirect
github.com/spf13/afero v1.9.3 // indirect
github.com/spf13/cast v1.5.0 // indirect
github.com/spf13/jwalterweatherman v1.1.0 // indirect
github.com/spf13/pflag v1.0.5 // indirect
github.com/subosito/gotenv v1.4.2 // indirect
github.com/ugorji/go/codec v1.1.7 // indirect
golang.org/x/crypto v0.1.0 // indirect
golang.org/x/exp v0.0.0-20220303212507-bbda1eaf7a17 // indirect
golang.org/x/net v0.8.0 // indirect
golang.org/x/sys v0.7.0 // indirect
golang.org/x/term v0.6.0 // indirect
golang.org/x/text v0.8.0 // indirect
google.golang.org/protobuf v1.28.1 // indirect
gopkg.in/ini.v1 v1.67.0 // indirect
github.com/sagikazarmark/locafero v0.12.0 // indirect
github.com/spf13/afero v1.15.0 // indirect
github.com/spf13/cast v1.10.0 // indirect
github.com/spf13/pflag v1.0.10 // indirect
github.com/subosito/gotenv v1.6.0 // indirect
github.com/twitchyliquid64/golang-asm v0.15.1 // indirect
github.com/ugorji/go/codec v1.3.1 // indirect
github.com/xrash/smetrics v0.0.0-20250705151800-55b8f293f342 // indirect
github.com/yusufpapurcu/wmi v1.2.4 // indirect
go.yaml.in/yaml/v3 v3.0.4 // indirect
golang.org/x/arch v0.23.0 // indirect
golang.org/x/crypto v0.47.0 // indirect
golang.org/x/exp v0.0.0-20260112195511-716be5621a96 // indirect
golang.org/x/net v0.49.0 // indirect
golang.org/x/sys v0.40.0 // indirect
golang.org/x/term v0.39.0 // indirect
golang.org/x/text v0.33.0 // indirect
google.golang.org/protobuf v1.36.11 // indirect
gopkg.in/yaml.v2 v2.4.0 // indirect
gopkg.in/yaml.v3 v3.0.1 // indirect
howett.net/plist v0.0.0-20181124034731-591f970eefbb // indirect
modernc.org/libc v1.16.8 // indirect
modernc.org/mathutil v1.4.1 // indirect
modernc.org/memory v1.1.1 // indirect
modernc.org/sqlite v1.17.2 // indirect
howett.net/plist v1.0.2-0.20250314012144-ee69052608d9 // indirect
modernc.org/libc v1.67.7 // indirect
modernc.org/mathutil v1.7.1 // indirect
modernc.org/memory v1.11.0 // indirect
modernc.org/sqlite v1.44.3 // indirect
)
+213 -779
View File
File diff suppressed because it is too large Load Diff
+9 -8
View File
@@ -3,15 +3,16 @@ package main
import (
"encoding/json"
"fmt"
"io"
"log"
"os"
"time"
"github.com/analogj/scrutiny/webapp/backend/pkg/config"
"github.com/analogj/scrutiny/webapp/backend/pkg/errors"
"github.com/analogj/scrutiny/webapp/backend/pkg/version"
"github.com/analogj/scrutiny/webapp/backend/pkg/web"
"github.com/sirupsen/logrus"
"io"
"log"
"os"
"time"
utils "github.com/analogj/go-util/utils"
"github.com/fatih/color"
@@ -36,8 +37,8 @@ func main() {
}
//we're going to load the config file manually, since we need to validate it.
err = config.ReadConfig(configFilePath) // Find and read the config file
if _, ok := err.(errors.ConfigFileMissingError); ok { // Handle errors reading the config file
err = config.ReadConfig(configFilePath) // Find and read the config file
if _, ok := err.(errors.ConfigFileMissingError); ok { // Handle errors reading the config file
//ignore "could not find config file"
} else if err != nil {
log.Print(color.HiRedString("CONFIG ERROR: %v", err))
@@ -81,7 +82,7 @@ OPTIONS:
subtitle := scrutiny + utils.LeftPad2Len(versionInfo, " ", 65-len(scrutiny))
color.New(color.FgGreen).Fprintf(c.App.Writer, fmt.Sprintf(utils.StripIndent(
color.New(color.FgGreen).Fprintf(c.App.Writer, utils.StripIndent(
`
___ ___ ____ __ __ ____ ____ _ _ _ _
/ __) / __)( _ \( )( )(_ _)(_ _)( \( )( \/ )
@@ -89,7 +90,7 @@ OPTIONS:
(___/ \___)(_)\_)(______) (__) (____)(_)\_) (__)
%s
`), subtitle))
`), subtitle)
return nil
},
@@ -8,8 +8,8 @@ import (
reflect "reflect"
config "github.com/analogj/scrutiny/webapp/backend/pkg/config"
gomock "github.com/golang/mock/gomock"
viper "github.com/spf13/viper"
gomock "go.uber.org/mock/gomock"
)
// MockInterface is a mock of Interface interface.
@@ -12,7 +12,7 @@ import (
models "github.com/analogj/scrutiny/webapp/backend/pkg/models"
collector "github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
measurements "github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
gomock "github.com/golang/mock/gomock"
gomock "go.uber.org/mock/gomock"
)
// MockDeviceRepo is a mock of DeviceRepo interface.
@@ -5,6 +5,11 @@ import (
"crypto/tls"
"encoding/json"
"fmt"
"io"
"net/http"
"net/url"
"time"
"github.com/analogj/scrutiny/webapp/backend/pkg/config"
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
"github.com/glebarez/sqlite"
@@ -13,10 +18,6 @@ import (
"github.com/influxdata/influxdb-client-go/v2/domain"
"github.com/sirupsen/logrus"
"gorm.io/gorm"
"io/ioutil"
"net/http"
"net/url"
"time"
)
const (
@@ -29,6 +30,7 @@ const (
// 60seconds * 60minutes * 24hours * 7 days * (52 + 52 + 4)weeks
RETENTION_PERIOD_25_MONTHS_IN_SECONDS = 65_318_400
DURATION_KEY_DAY = "day"
DURATION_KEY_WEEK = "week"
DURATION_KEY_MONTH = "month"
DURATION_KEY_YEAR = "year"
@@ -82,7 +84,7 @@ func NewScrutinyRepository(appConfig config.Interface, globalLogger logrus.Field
DisableForeignKeyConstraintWhenMigrating: true,
})
if err != nil {
return nil, fmt.Errorf("Failed to connect to database! - %v", err)
return nil, fmt.Errorf("failed to connect to database! - %v", err)
}
globalLogger.Infof("Successfully connected to scrutiny sqlite db: %s\n", appConfig.GetString("web.database.location"))
@@ -146,7 +148,7 @@ func NewScrutinyRepository(appConfig config.Interface, globalLogger logrus.Field
taskAPI := client.TasksAPI()
if writeAPI == nil || queryAPI == nil || taskAPI == nil {
return nil, fmt.Errorf("Failed to connect to influxdb!")
return nil, fmt.Errorf("failed to connect to influxdb")
}
deviceRepo := scrutinyRepository{
@@ -238,13 +240,13 @@ func InfluxSetupComplete(influxEndpoint string, tlsConfig *tls.Config) (bool, er
return false, err
}
client := &http.Client{Transport: &http.Transport{TLSClientConfig: tlsConfig}}
client := &http.Client{Transport: &http.Transport{TLSClientConfig: tlsConfig}}
res, err := client.Get(influxUri.String())
if err != nil {
return false, err
}
body, err := ioutil.ReadAll(res.Body)
body, err := io.ReadAll(res.Body)
if err != nil {
return false, err
}
@@ -445,6 +447,7 @@ func (sr *scrutinyRepository) GetSummary(ctx context.Context) (map[string]*model
func (sr *scrutinyRepository) lookupBucketName(durationKey string) string {
switch durationKey {
case DURATION_KEY_DAY:
case DURATION_KEY_WEEK:
//data stored in the last week
return sr.appConfig.GetString("web.influxdb.bucket")
@@ -462,8 +465,10 @@ func (sr *scrutinyRepository) lookupBucketName(durationKey string) string {
}
func (sr *scrutinyRepository) lookupDuration(durationKey string) []string {
switch durationKey {
case DURATION_KEY_DAY:
//data stored in the last day
return []string{"-1d", "now()"}
case DURATION_KEY_WEEK:
//data stored in the last week
return []string{"-1w", "now()"}
@@ -480,8 +485,22 @@ func (sr *scrutinyRepository) lookupDuration(durationKey string) []string {
return []string{"-1w", "now()"}
}
func (sr *scrutinyRepository) lookupResolution(durationKey string) string {
switch durationKey {
case DURATION_KEY_DAY:
// Return data with higher resolution for daily summaries
return "10m"
default:
// Return data with 1h resolution for other summaries
return "1h"
}
}
func (sr *scrutinyRepository) lookupNestedDurationKeys(durationKey string) []string {
switch durationKey {
case DURATION_KEY_DAY:
//all data is stored in a single bucket, but we want a finer resolution
return []string{DURATION_KEY_DAY}
case DURATION_KEY_WEEK:
//all data is stored in a single bucket
return []string{DURATION_KEY_WEEK}
@@ -3,11 +3,12 @@ package database
import (
"context"
"fmt"
"time"
"github.com/analogj/scrutiny/webapp/backend/pkg"
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
"gorm.io/gorm/clause"
"time"
)
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
@@ -31,7 +32,7 @@ func (sr *scrutinyRepository) GetDevices(ctx context.Context) ([]models.Device,
//Get a list of all the active devices.
devices := []models.Device{}
if err := sr.gormClient.WithContext(ctx).Find(&devices).Error; err != nil {
return nil, fmt.Errorf("Could not get device summary from DB: %v", err)
return nil, fmt.Errorf("could not get device summary from DB: %v", err)
}
return devices, nil
}
@@ -40,7 +41,7 @@ func (sr *scrutinyRepository) GetDevices(ctx context.Context) ([]models.Device,
func (sr *scrutinyRepository) UpdateDevice(ctx context.Context, wwn string, collectorSmartData collector.SmartInfo) (models.Device, error) {
var device models.Device
if err := sr.gormClient.WithContext(ctx).Where("wwn = ?", wwn).First(&device).Error; err != nil {
return device, fmt.Errorf("Could not get device from DB: %v", err)
return device, fmt.Errorf("could not get device from DB: %v", err)
}
//TODO catch GormClient err
@@ -55,7 +56,7 @@ func (sr *scrutinyRepository) UpdateDevice(ctx context.Context, wwn string, coll
func (sr *scrutinyRepository) UpdateDeviceStatus(ctx context.Context, wwn string, status pkg.DeviceStatus) (models.Device, error) {
var device models.Device
if err := sr.gormClient.WithContext(ctx).Where("wwn = ?", wwn).First(&device).Error; err != nil {
return device, fmt.Errorf("Could not get device from DB: %v", err)
return device, fmt.Errorf("could not get device from DB: %v", err)
}
device.DeviceStatus = pkg.DeviceStatusSet(device.DeviceStatus, status)
@@ -78,7 +79,7 @@ func (sr *scrutinyRepository) GetDeviceDetails(ctx context.Context, wwn string)
func (sr *scrutinyRepository) UpdateDeviceArchived(ctx context.Context, wwn string, archived bool) error {
var device models.Device
if err := sr.gormClient.WithContext(ctx).Where("wwn = ?", wwn).First(&device).Error; err != nil {
return fmt.Errorf("Could not get device from DB: %v", err)
return fmt.Errorf("could not get device from DB: %v", err)
}
return sr.gormClient.Model(&device).Where("wwn = ?", wwn).Update("archived", archived).Error
@@ -177,7 +177,7 @@ func (sr *scrutinyRepository) aggregateSmartAttributesQuery(wwn string, duration
`|> sort(columns: ["_time"], desc: true)`,
}...)
if selectEntries > 0 {
partialQueryStr = append(partialQueryStr, fmt.Sprintf(`|> tail(n: %d, offset: %d)`, selectEntries, selectEntriesOffset))
partialQueryStr = append(partialQueryStr, fmt.Sprintf(`|> limit(n: %d, offset: %d)`, selectEntries, selectEntriesOffset))
}
partialQueryStr = append(partialQueryStr, `|> yield(name: "last")`)
@@ -196,9 +196,11 @@ func (sr *scrutinyRepository) generateSmartAttributesSubquery(wwn string, durati
}
partialQueryStr = append(partialQueryStr, `|> aggregateWindow(every: 1d, fn: last, createEmpty: false)`)
// ensure we are selecting the latest entries when paging
partialQueryStr = append(partialQueryStr, `|> sort(columns: ["_time"], desc: true)`)
if selectEntries > 0 {
partialQueryStr = append(partialQueryStr, fmt.Sprintf(`|> tail(n: %d, offset: %d)`, selectEntries, selectEntriesOffset))
partialQueryStr = append(partialQueryStr, fmt.Sprintf(`|> limit(n: %d, offset: %d)`, selectEntries, selectEntriesOffset))
}
partialQueryStr = append(partialQueryStr, "|> schema.fieldsAsCols()")
@@ -647,7 +647,7 @@ func m20201107210306_FromPreInfluxDBSmartResultsCreatePostInfluxDBSmartResults(d
}
postDeviceSmartData.ProcessScsiSmartInfo(postScsiGrownDefectList, postScsiErrorCounterLog)
} else {
return fmt.Errorf("Unknown device protocol: %s", preDevice.DeviceProtocol), postDeviceSmartData
return fmt.Errorf("unknown device protocol: %s", preDevice.DeviceProtocol), postDeviceSmartData
}
return nil, postDeviceSmartData
@@ -3,17 +3,18 @@ package database
import (
"context"
"fmt"
"strings"
"github.com/analogj/scrutiny/webapp/backend/pkg/config"
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
"github.com/mitchellh/mapstructure"
"strings"
"github.com/go-viper/mapstructure/v2"
)
// LoadSettings will retrieve settings from the database, store them in the AppConfig object, and return a Settings struct
func (sr *scrutinyRepository) LoadSettings(ctx context.Context) (*models.Settings, error) {
settingsEntries := []models.SettingEntry{}
if err := sr.gormClient.WithContext(ctx).Find(&settingsEntries).Error; err != nil {
return nil, fmt.Errorf("Could not get settings from DB: %v", err)
return nil, fmt.Errorf("could not get settings from DB: %v", err)
}
// store retrieved settings in the AppConfig obj
@@ -58,7 +59,7 @@ func (sr *scrutinyRepository) SaveSettings(ctx context.Context, settings models.
//retrieve current settings from the database
settingsEntries := []models.SettingEntry{}
if err := sr.gormClient.WithContext(ctx).Find(&settingsEntries).Error; err != nil {
return fmt.Errorf("Could not get settings from DB: %v", err)
return fmt.Errorf("could not get settings from DB: %v", err)
}
//update settingsEntries
@@ -1,10 +1,11 @@
package database
import (
mock_config "github.com/analogj/scrutiny/webapp/backend/pkg/config/mock"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/require"
"testing"
mock_config "github.com/analogj/scrutiny/webapp/backend/pkg/config/mock"
"github.com/stretchr/testify/require"
"go.uber.org/mock/gomock"
)
func Test_DownsampleScript_Weekly(t *testing.T) {
@@ -12,7 +13,6 @@ func Test_DownsampleScript_Weekly(t *testing.T) {
//setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
@@ -64,7 +64,6 @@ func Test_DownsampleScript_Monthly(t *testing.T) {
//setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
@@ -116,7 +115,6 @@ func Test_DownsampleScript_Yearly(t *testing.T) {
//setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
@@ -140,13 +140,14 @@ func (sr *scrutinyRepository) aggregateTempQuery(durationKey string) string {
for _, nestedDurationKey := range nestedDurationKeys {
bucketName := sr.lookupBucketName(nestedDurationKey)
durationRange := sr.lookupDuration(nestedDurationKey)
durationResolution := sr.lookupResolution(nestedDurationKey)
subQueryNames = append(subQueryNames, fmt.Sprintf(`%sData`, nestedDurationKey))
partialQueryStr = append(partialQueryStr, []string{
fmt.Sprintf(`%sData = from(bucket: "%s")`, nestedDurationKey, bucketName),
fmt.Sprintf(`|> range(start: %s, stop: %s)`, durationRange[0], durationRange[1]),
`|> filter(fn: (r) => r["_measurement"] == "temp" )`,
`|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)`,
fmt.Sprintf(`|> aggregateWindow(every: %s, fn: mean, createEmpty: false)`, durationResolution),
`|> group(columns: ["device_wwn"])`,
`|> toInt()`,
"",
@@ -1,10 +1,11 @@
package database
import (
mock_config "github.com/analogj/scrutiny/webapp/backend/pkg/config/mock"
"github.com/golang/mock/gomock"
"github.com/stretchr/testify/require"
"testing"
mock_config "github.com/analogj/scrutiny/webapp/backend/pkg/config/mock"
"github.com/stretchr/testify/require"
"go.uber.org/mock/gomock"
)
func Test_aggregateTempQuery_Week(t *testing.T) {
@@ -12,7 +13,6 @@ func Test_aggregateTempQuery_Week(t *testing.T) {
//setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
@@ -45,7 +45,6 @@ func Test_aggregateTempQuery_Month(t *testing.T) {
//setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
@@ -86,7 +85,6 @@ func Test_aggregateTempQuery_Year(t *testing.T) {
//setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
@@ -134,7 +132,6 @@ func Test_aggregateTempQuery_Forever(t *testing.T) {
//setup
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
+13 -13
View File
@@ -143,21 +143,21 @@ type SmartInfo struct {
ErrorNumber int `json:"error_number"`
LifetimeHours int `json:"lifetime_hours"`
CompletionRegisters struct {
Error int `json:"error"`
Status int `json:"status"`
Count int `json:"count"`
Lba int `json:"lba"`
Device int `json:"device"`
Error int `json:"error"`
Status int `json:"status"`
Count int `json:"count"`
Lba uint64 `json:"lba"`
Device int `json:"device"`
} `json:"completion_registers"`
ErrorDescription string `json:"error_description"`
PreviousCommands []struct {
Registers struct {
Command int `json:"command"`
Features int `json:"features"`
Count int `json:"count"`
Lba int `json:"lba"`
Device int `json:"device"`
DeviceControl int `json:"device_control"`
Command int `json:"command"`
Features int `json:"features"`
Count int `json:"count"`
Lba uint64 `json:"lba"`
Device int `json:"device"`
DeviceControl int `json:"device_control"`
} `json:"registers"`
PowerupMilliseconds int `json:"powerup_milliseconds"`
CommandName string `json:"command_name"`
@@ -188,8 +188,8 @@ type SmartInfo struct {
AtaSmartSelectiveSelfTestLog struct {
Revision int `json:"revision"`
Table []struct {
LbaMin int `json:"lba_min"`
LbaMax int `json:"lba_max"`
LbaMin uint64 `json:"lba_min"`
LbaMax uint64 `json:"lba_max"`
Status struct {
Value int `json:"value"`
String string `json:"string"`
@@ -2,13 +2,14 @@ package measurements
import (
"fmt"
"github.com/analogj/scrutiny/webapp/backend/pkg"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
"github.com/analogj/scrutiny/webapp/backend/pkg/thresholds"
"log"
"strconv"
"strings"
"time"
"github.com/analogj/scrutiny/webapp/backend/pkg"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
"github.com/analogj/scrutiny/webapp/backend/pkg/thresholds"
)
type Smart struct {
@@ -102,7 +103,7 @@ func NewSmartFromInfluxDB(attrs map[string]interface{}) (*Smart, error) {
} else if sm.DeviceProtocol == pkg.DeviceProtocolScsi {
sm.Attributes[attributeId] = &SmartScsiAttribute{}
} else {
return nil, fmt.Errorf("Unknown Device Protocol: %s", sm.DeviceProtocol)
return nil, fmt.Errorf("unknown Device Protocol: %s", sm.DeviceProtocol)
}
}
@@ -116,7 +117,7 @@ func NewSmartFromInfluxDB(attrs map[string]interface{}) (*Smart, error) {
return &sm, nil
}
//Parse Collector SMART data results and create Smart object (and associated SmartAtaAttribute entries)
// Parse Collector SMART data results and create Smart object (and associated SmartAtaAttribute entries)
func (sm *Smart) FromCollectorSmartInfo(wwn string, info collector.SmartInfo) error {
sm.DeviceWWN = wwn
sm.Date = time.Unix(info.LocalTime.TimeT, 0)
@@ -143,7 +144,7 @@ func (sm *Smart) FromCollectorSmartInfo(wwn string, info collector.SmartInfo) er
return nil
}
//generate SmartAtaAttribute entries from Scrutiny Collector Smart data.
// generate SmartAtaAttribute entries from Scrutiny Collector Smart data.
func (sm *Smart) ProcessAtaSmartInfo(tableItems []collector.AtaSmartAttributesTableItem) {
for _, collectorAttr := range tableItems {
attrModel := SmartAtaAttribute{
@@ -171,7 +172,7 @@ func (sm *Smart) ProcessAtaSmartInfo(tableItems []collector.AtaSmartAttributesTa
}
}
//generate SmartNvmeAttribute entries from Scrutiny Collector Smart data.
// generate SmartNvmeAttribute entries from Scrutiny Collector Smart data.
func (sm *Smart) ProcessNvmeSmartInfo(nvmeSmartHealthInformationLog collector.NvmeSmartHealthInformationLog) {
sm.Attributes = map[string]SmartAttribute{
@@ -201,7 +202,7 @@ func (sm *Smart) ProcessNvmeSmartInfo(nvmeSmartHealthInformationLog collector.Nv
}
}
//generate SmartScsiAttribute entries from Scrutiny Collector Smart data.
// generate SmartScsiAttribute entries from Scrutiny Collector Smart data.
func (sm *Smart) ProcessScsiSmartInfo(defectGrownList int64, scsiErrorCounterLog collector.ScsiErrorCounterLog) {
sm.Attributes = map[string]SmartAttribute{
"scsi_grown_defect_list": (&SmartScsiAttribute{AttributeId: "scsi_grown_defect_list", Value: defectGrownList, Threshold: 0}).PopulateAttributeStatus(),
@@ -91,7 +91,7 @@ func (sa *SmartAtaAttribute) Inflate(key string, val interface{}) {
}
}
//populate attribute status, using SMART Thresholds & Observed Metadata
// populate attribute status, using SMART Thresholds & Observed Metadata
// Chainable
func (sa *SmartAtaAttribute) PopulateAttributeStatus() *SmartAtaAttribute {
if strings.ToUpper(sa.WhenFailed) == pkg.AttributeWhenFailedFailingNow {
@@ -165,6 +165,4 @@ func (sa *SmartAtaAttribute) ValidateThreshold(smartMetadata thresholds.AtaAttri
sa.Status = pkg.AttributeStatusSet(sa.Status, pkg.AttributeStatusWarningScrutiny)
sa.StatusReason = "Could not determine Observed Failure Rate for Critical Attribute"
}
return
}
@@ -2,14 +2,15 @@ package measurements_test
import (
"encoding/json"
"io"
"os"
"testing"
"time"
"github.com/analogj/scrutiny/webapp/backend/pkg"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
"github.com/stretchr/testify/require"
"io/ioutil"
"os"
"testing"
"time"
)
func TestSmart_Flatten(t *testing.T) {
@@ -312,7 +313,7 @@ func TestFromCollectorSmartInfo(t *testing.T) {
var smartJson collector.SmartInfo
smartDataBytes, err := ioutil.ReadAll(smartDataFile)
smartDataBytes, err := io.ReadAll(smartDataFile)
require.NoError(t, err)
err = json.Unmarshal(smartDataBytes, &smartJson)
require.NoError(t, err)
@@ -344,7 +345,7 @@ func TestFromCollectorSmartInfo_Fail_Smart(t *testing.T) {
var smartJson collector.SmartInfo
smartDataBytes, err := ioutil.ReadAll(smartDataFile)
smartDataBytes, err := io.ReadAll(smartDataFile)
require.NoError(t, err)
err = json.Unmarshal(smartDataBytes, &smartJson)
require.NoError(t, err)
@@ -368,7 +369,7 @@ func TestFromCollectorSmartInfo_Fail_ScrutinySmart(t *testing.T) {
var smartJson collector.SmartInfo
smartDataBytes, err := ioutil.ReadAll(smartDataFile)
smartDataBytes, err := io.ReadAll(smartDataFile)
require.NoError(t, err)
err = json.Unmarshal(smartDataBytes, &smartJson)
require.NoError(t, err)
@@ -392,7 +393,7 @@ func TestFromCollectorSmartInfo_Fail_ScrutinyNonCriticalFailed(t *testing.T) {
var smartJson collector.SmartInfo
smartDataBytes, err := ioutil.ReadAll(smartDataFile)
smartDataBytes, err := io.ReadAll(smartDataFile)
require.NoError(t, err)
err = json.Unmarshal(smartDataBytes, &smartJson)
require.NoError(t, err)
@@ -425,7 +426,7 @@ func TestFromCollectorSmartInfo_NVMe_Fail_Scrutiny(t *testing.T) {
var smartJson collector.SmartInfo
smartDataBytes, err := ioutil.ReadAll(smartDataFile)
smartDataBytes, err := io.ReadAll(smartDataFile)
require.NoError(t, err)
err = json.Unmarshal(smartDataBytes, &smartJson)
require.NoError(t, err)
@@ -456,7 +457,7 @@ func TestFromCollectorSmartInfo_Nvme(t *testing.T) {
var smartJson collector.SmartInfo
smartDataBytes, err := ioutil.ReadAll(smartDataFile)
smartDataBytes, err := io.ReadAll(smartDataFile)
require.NoError(t, err)
err = json.Unmarshal(smartDataBytes, &smartJson)
require.NoError(t, err)
@@ -483,7 +484,7 @@ func TestFromCollectorSmartInfo_Scsi(t *testing.T) {
var smartJson collector.SmartInfo
smartDataBytes, err := ioutil.ReadAll(smartDataFile)
smartDataBytes, err := io.ReadAll(smartDataFile)
require.NoError(t, err)
err = json.Unmarshal(smartDataBytes, &smartJson)
require.NoError(t, err)
+2 -3
View File
@@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"log"
"net/http"
"os"
@@ -68,7 +67,7 @@ func SendPostRequest(url string, file io.Reader) ([]byte, error) {
log.Printf("%v\n", response.Status)
return ioutil.ReadAll(response.Body)
return io.ReadAll(response.Body)
}
// InfluxDB will throw an error/ignore any submitted data with a timestamp older than the
@@ -79,7 +78,7 @@ func readSmartDataFileFixTimestamp(daysToSubtract int, smartDataFilepath string)
return nil, err
}
metricsFileData, err := ioutil.ReadAll(metricsfile)
metricsFileData, err := io.ReadAll(metricsfile)
if err != nil {
return nil, err
}
+16 -5
View File
@@ -19,9 +19,9 @@ import (
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
"github.com/analogj/scrutiny/webapp/backend/pkg/thresholds"
"github.com/containrrr/shoutrrr"
shoutrrrTypes "github.com/containrrr/shoutrrr/pkg/types"
"github.com/gin-gonic/gin"
"github.com/nicholas-fedor/shoutrrr"
shoutrrrTypes "github.com/nicholas-fedor/shoutrrr/pkg/types"
"github.com/sirupsen/logrus"
"golang.org/x/sync/errgroup"
)
@@ -64,7 +64,7 @@ func ShouldNotify(logger logrus.FieldLogger, device models.Device, smartAttrs me
var failingAttributes []string
// Loop through the attributes to find the failing ones
for attrId, attrData := range smartAttrs.Attributes {
var status pkg.AttributeStatus = attrData.GetStatus()
var status = attrData.GetStatus()
// Skip over passing attributes
if status == pkg.AttributeStatusPassed {
continue
@@ -147,7 +147,7 @@ func NewPayload(device models.Device, test bool, currentTime ...time.Time) Paylo
//validate that the Payload is populated
var sendDate time.Time
if currentTime != nil && len(currentTime) > 0 {
if len(currentTime) > 0 {
sendDate = currentTime[0]
} else {
sendDate = time.Now()
@@ -318,7 +318,7 @@ func (n *Notify) SendScriptNotification(scriptUrl string) error {
if !utils.FileExists(scriptPath) {
n.Logger.Errorf("Script does not exist: %s", scriptPath)
return errors.New(fmt.Sprintf("custom script path does not exist: %s", scriptPath))
return fmt.Errorf("custom script path does not exist: %s", scriptPath)
}
copyEnv := os.Environ()
@@ -424,6 +424,17 @@ func (n *Notify) GenShoutrrrNotificationParams(shoutrrrUrl string) (string, *sho
case "telegram":
(*params)["title"] = subject
case "zulip":
query := serviceURL.Query()
urlTopic := query["topic"]
delete(query, "topic")
if len(urlTopic) > 0 && urlTopic[len(urlTopic)-1] != "" {
subject = urlTopic[len(urlTopic)-1]
}
subjectRunes := []rune(subject)
if len(subjectRunes) > 60 {
n.Logger.Warningf("Zulip notification subject too long (%d characters), truncating to 60 characters", len(subjectRunes))
subject = string(subjectRunes[:60])
}
(*params)["topic"] = subject
}
+1 -13
View File
@@ -12,9 +12,9 @@ import (
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
"github.com/gin-gonic/gin"
"github.com/golang/mock/gomock"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
"go.uber.org/mock/gomock"
)
func TestShouldNotify_MustSkipPassingDevices(t *testing.T) {
@@ -28,7 +28,6 @@ func TestShouldNotify_MustSkipPassingDevices(t *testing.T) {
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesAll
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
require.False(t, ShouldNotify(logrus.StandardLogger(), device, smartAttrs, statusThreshold, notifyFilterAttributes, true, &gin.Context{}, fakeDatabase))
@@ -44,7 +43,6 @@ func TestShouldNotify_MetricsStatusThresholdBoth_FailingSmartDevice(t *testing.T
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesAll
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
require.True(t, ShouldNotify(logrus.StandardLogger(), device, smartAttrs, statusThreshold, notifyFilterAttributes, true, &gin.Context{}, fakeDatabase))
@@ -60,7 +58,6 @@ func TestShouldNotify_MetricsStatusThresholdSmart_FailingSmartDevice(t *testing.
statusThreshold := pkg.MetricsStatusThresholdSmart
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesAll
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
require.True(t, ShouldNotify(logrus.StandardLogger(), device, smartAttrs, statusThreshold, notifyFilterAttributes, true, &gin.Context{}, fakeDatabase))
@@ -76,7 +73,6 @@ func TestShouldNotify_MetricsStatusThresholdScrutiny_FailingSmartDevice(t *testi
statusThreshold := pkg.MetricsStatusThresholdScrutiny
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesAll
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
require.False(t, ShouldNotify(logrus.StandardLogger(), device, smartAttrs, statusThreshold, notifyFilterAttributes, true, &gin.Context{}, fakeDatabase))
@@ -96,7 +92,6 @@ func TestShouldNotify_MetricsStatusFilterAttributesCritical_WithCriticalAttrs(t
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesCritical
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
@@ -120,7 +115,6 @@ func TestShouldNotify_MetricsStatusFilterAttributesCritical_WithMultipleCritical
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesCritical
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
@@ -141,7 +135,6 @@ func TestShouldNotify_MetricsStatusFilterAttributesCritical_WithNoCriticalAttrs(
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesCritical
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
@@ -162,7 +155,6 @@ func TestShouldNotify_MetricsStatusFilterAttributesCritical_WithNoFailingCritica
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesCritical
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
@@ -186,7 +178,6 @@ func TestShouldNotify_MetricsStatusFilterAttributesCritical_MetricsStatusThresho
statusThreshold := pkg.MetricsStatusThresholdSmart
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesCritical
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
//assert
@@ -206,7 +197,6 @@ func TestShouldNotify_NoRepeat_DatabaseFailure(t *testing.T) {
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesAll
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
fakeDatabase.EXPECT().GetSmartAttributeHistory(&gin.Context{}, "", database.DURATION_KEY_FOREVER, 1, 1, []string{"5"}).Return([]measurements.Smart{}, errors.New("")).Times(1)
@@ -228,7 +218,6 @@ func TestShouldNotify_NoRepeat_NoDatabaseData(t *testing.T) {
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesAll
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
fakeDatabase.EXPECT().GetSmartAttributeHistory(&gin.Context{}, "", database.DURATION_KEY_FOREVER, 1, 1, []string{"5"}).Return([]measurements.Smart{}, nil).Times(1)
@@ -250,7 +239,6 @@ func TestShouldNotify_NoRepeat(t *testing.T) {
statusThreshold := pkg.MetricsStatusThresholdBoth
notifyFilterAttributes := pkg.MetricsStatusFilterAttributesAll
mockCtrl := gomock.NewController(t)
defer mockCtrl.Finish()
fakeDatabase := mock_database.NewMockDeviceRepo(mockCtrl)
fakeDatabase.EXPECT().GetSmartAttributeHistory(&gin.Context{}, "", database.DURATION_KEY_FOREVER, 1, 1, []string{"5"}).Return([]measurements.Smart{smartAttrs}, nil).Times(1)
+1 -1
View File
@@ -2,4 +2,4 @@ package version
// VERSION is the app-global version string, which will be replaced with a
// new value during packaging
const VERSION = "0.8.3"
const VERSION = "0.8.6"
+6 -6
View File
@@ -3,15 +3,15 @@ package middleware
import (
"bytes"
"fmt"
"github.com/gin-gonic/gin"
"github.com/sirupsen/logrus"
"io"
"io/ioutil"
"math"
"net/http"
"os"
"strings"
"time"
"github.com/gin-gonic/gin"
"github.com/sirupsen/logrus"
)
// Middleware based on https://github.com/toorop/gin-logrus/blob/master/logger.go
@@ -40,9 +40,9 @@ func LoggerMiddleware(logger *logrus.Entry) gin.HandlerFunc {
//clone the request body reader.
var reqBody string
if c.Request.Body != nil {
buf, _ := ioutil.ReadAll(c.Request.Body)
reqBodyReader1 := ioutil.NopCloser(bytes.NewBuffer(buf))
reqBodyReader2 := ioutil.NopCloser(bytes.NewBuffer(buf)) //We have to create a new Buffer, because reqBodyReader1 will be read.
buf, _ := io.ReadAll(c.Request.Body)
reqBodyReader1 := io.NopCloser(bytes.NewBuffer(buf))
reqBodyReader2 := io.NopCloser(bytes.NewBuffer(buf)) //We have to create a new Buffer, because reqBodyReader1 will be read.
c.Request.Body = reqBodyReader2
reqBody = readBody(reqBodyReader1)
}
+12 -23
View File
@@ -5,7 +5,6 @@ import (
"encoding/json"
"fmt"
"io"
"io/ioutil"
"net/http"
"net/http/httptest"
"os"
@@ -20,10 +19,10 @@ import (
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
"github.com/analogj/scrutiny/webapp/backend/pkg/web"
"github.com/golang/mock/gomock"
"github.com/sirupsen/logrus"
"github.com/stretchr/testify/require"
"github.com/stretchr/testify/suite"
"go.uber.org/mock/gomock"
)
/*
@@ -52,7 +51,7 @@ func helperReadSmartDataFileFixTimestamp(t *testing.T, smartDataFilepath string)
metricsfile, err := os.Open(smartDataFilepath)
require.NoError(t, err)
metricsFileData, err := ioutil.ReadAll(metricsfile)
metricsFileData, err := io.ReadAll(metricsfile)
require.NoError(t, err)
//unmarshal because we need to change the timestamp
@@ -87,10 +86,9 @@ func TestServerTestSuite_WithCustomBasePath(t *testing.T) {
func (suite *ServerTestSuite) TestHealthRoute() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -131,10 +129,9 @@ func (suite *ServerTestSuite) TestHealthRoute() {
func (suite *ServerTestSuite) TestRegisterDevicesRoute() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -174,10 +171,9 @@ func (suite *ServerTestSuite) TestRegisterDevicesRoute() {
func (suite *ServerTestSuite) TestUploadDeviceMetricsRoute() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -229,10 +225,9 @@ func (suite *ServerTestSuite) TestUploadDeviceMetricsRoute() {
func (suite *ServerTestSuite) TestPopulateMultiple() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -311,10 +306,9 @@ func (suite *ServerTestSuite) TestPopulateMultiple() {
//TODO: this test should use a recorded request/response playback.
//func TestSendTestNotificationRoute(t *testing.T) {
// //setup
// parentPath, _ := ioutil.TempDir("", "")
// parentPath, _ := os.MkdirTemp("", "")
// defer os.RemoveAll(parentPath)
// mockCtrl := gomock.NewController(t)
// defer mockCtrl.Finish()
// fakeConfig := mock_config.NewMockInterface(mockCtrl)
// fakeConfig.EXPECT().GetString("web.database.location").AnyTimes().Return(path.Join(parentPath, "scrutiny_test.db"))
// fakeConfig.EXPECT().GetString("web.src.frontend.path").AnyTimes().Return(parentPath)
@@ -335,10 +329,9 @@ func (suite *ServerTestSuite) TestPopulateMultiple() {
func (suite *ServerTestSuite) TestSendTestNotificationRoute_WebhookFailure() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -381,10 +374,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_WebhookFailure() {
func (suite *ServerTestSuite) TestSendTestNotificationRoute_ScriptFailure() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -427,10 +419,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_ScriptFailure() {
func (suite *ServerTestSuite) TestSendTestNotificationRoute_ScriptSuccess() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -473,10 +464,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_ScriptSuccess() {
func (suite *ServerTestSuite) TestSendTestNotificationRoute_ShoutrrrFailure() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
@@ -518,10 +508,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_ShoutrrrFailure() {
func (suite *ServerTestSuite) TestGetDevicesSummaryRoute_Nvme() {
//setup
parentPath, _ := ioutil.TempDir("", "")
parentPath, _ := os.MkdirTemp("", "")
defer os.RemoveAll(parentPath)
mockCtrl := gomock.NewController(suite.T())
defer mockCtrl.Finish()
fakeConfig := mock_config.NewMockInterface(mockCtrl)
fakeConfig.EXPECT().SetDefault(gomock.Any(), gomock.Any()).AnyTimes()
fakeConfig.EXPECT().UnmarshalKey(gomock.Any(), gomock.Any()).AnyTimes().Return(nil)
+400 -167
View File
@@ -24,7 +24,7 @@
"crypto-js": "^4.1.1",
"highlight.js": "^11.6.0",
"humanize-duration": "^3.27.3",
"lodash": "4.17.21",
"lodash": "4.17.23",
"moment": "^2.29.4",
"ng-apexcharts": "^1.7.4",
"ngx-markdown": "^13.1.0",
@@ -3993,23 +3993,24 @@
}
},
"node_modules/body-parser": {
"version": "1.20.2",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.2.tgz",
"integrity": "sha512-ml9pReCu3M61kGlqoTm2umSXTlRTuGTx0bfYj+uIUKKYycG5NtSbeetV3faSU6R7ajOPw0g/J1PvK4qNy7s5bA==",
"version": "1.20.4",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.4.tgz",
"integrity": "sha512-ZTgYYLMOXY9qKU/57FAo8F+HA2dGX7bqGc71txDRC1rS4frdFI5R7NhluHxH6M0YItAP0sHB4uqAOcYKxO6uGA==",
"dev": true,
"license": "MIT",
"dependencies": {
"bytes": "3.1.2",
"bytes": "~3.1.2",
"content-type": "~1.0.5",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
"raw-body": "2.5.2",
"destroy": "~1.2.0",
"http-errors": "~2.0.1",
"iconv-lite": "~0.4.24",
"on-finished": "~2.4.1",
"qs": "~6.14.0",
"raw-body": "~2.5.3",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
"unpipe": "~1.0.0"
},
"engines": {
"node": ">= 0.8",
@@ -4200,6 +4201,7 @@
"resolved": "https://registry.npmjs.org/bytes/-/bytes-3.1.2.tgz",
"integrity": "sha512-/Nf7TyzTx6S3yRJObOAV7956r8cr2+Oj8AC5dt8wSP3BQAoeX58NoHyCU8P8zGkNXStjTSi6fzO6F0pBdcYbEg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
@@ -4263,6 +4265,36 @@
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/call-bind-apply-helpers": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/call-bind-apply-helpers/-/call-bind-apply-helpers-1.0.2.tgz",
"integrity": "sha512-Sp1ablJ0ivDkSzjcaJdxEunN5/XvksFJ2sMBFfq6x0ryhQV/2b/KwFe21cMpmHtPOSij8K99/wSfoEuTObmuMQ==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/call-bound": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/call-bound/-/call-bound-1.0.4.tgz",
"integrity": "sha512-+ys997U96po4Kx/ABpBCqhA9EuxJaQWDQg7295H4hBphv3IZg0boBKuwYpt4YXp6MZ5AmZQnU/tyMTlRpaSejg==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.2",
"get-intrinsic": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/callsites": {
"version": "3.1.0",
"resolved": "https://registry.npmjs.org/callsites/-/callsites-3.1.0.tgz",
@@ -5572,6 +5604,20 @@
"url": "https://github.com/fb55/domutils?sponsor=1"
}
},
"node_modules/dunder-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/dunder-proto/-/dunder-proto-1.0.1.tgz",
"integrity": "sha512-KIN/nDJBQRcXw0MLVhZE9iQHmG68qAVIBg9CqmUYjmQIhgij9U5MFvrqkUL5FbtyyzZuOeOt0zdeRe4UY7ct+A==",
"license": "MIT",
"dependencies": {
"call-bind-apply-helpers": "^1.0.1",
"es-errors": "^1.3.0",
"gopd": "^1.2.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/ecc-jsbn": {
"version": "0.1.2",
"resolved": "https://registry.npmjs.org/ecc-jsbn/-/ecc-jsbn-0.1.2.tgz",
@@ -5741,12 +5787,42 @@
"is-arrayish": "^0.2.1"
}
},
"node_modules/es-define-property": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/es-define-property/-/es-define-property-1.0.1.tgz",
"integrity": "sha512-e3nRfgfUZ4rNGL232gUgX06QNyyez04KdjFrF+LTRoOXmrOgFKDg4BCdsjW8EnT69eqdYGmRpJwiPVYNrCaW3g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-errors": {
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/es-errors/-/es-errors-1.3.0.tgz",
"integrity": "sha512-Zf5H2Kxt2xjTvbJvP2ZWLEICxA6j+hAmMzIlypy4xcBg1vKVnx89Wy0GbS+kf5cwCVFFzdCFh2XSCFNULS6csw==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es-module-lexer": {
"version": "0.9.3",
"resolved": "https://registry.npmjs.org/es-module-lexer/-/es-module-lexer-0.9.3.tgz",
"integrity": "sha512-1HQ2M2sPtxwnvOvT1ZClHyQDiggdNjURWpY2we6aMKCQiUVxTmVs2UYPLIrD84sS+kMdUwfBSylbJPwNnBrnHQ==",
"dev": true
},
"node_modules/es-object-atoms": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/es-object-atoms/-/es-object-atoms-1.1.1.tgz",
"integrity": "sha512-FGgH2h8zKNim9ljj7dankFPcICIK9Cp5bm+c2gQSYePhpaG5+esrLODihIorn+Pe6FGJzWhXQotPv73jTaldXA==",
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/es6-promise": {
"version": "4.2.8",
"resolved": "https://registry.npmjs.org/es6-promise/-/es6-promise-4.2.8.tgz",
@@ -6207,6 +6283,7 @@
"resolved": "https://registry.npmjs.org/etag/-/etag-1.8.1.tgz",
"integrity": "sha512-aIL5Fx7mawVa300al2BnEE4iNvo1qETxLrPI/o05L7z6go7fCw1J6EQmbK4FmJ2AS7kgVF/KEZWufBfdClMcPg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
@@ -6265,45 +6342,50 @@
}
},
"node_modules/express": {
"version": "4.18.2",
"resolved": "https://registry.npmjs.org/express/-/express-4.18.2.tgz",
"integrity": "sha512-5/PsL6iGPdfQ/lKM1UuielYgv3BUoJfz1aUwU9vHZ+J7gyvwdQXFEBIEIaxeGf0GIcreATNyBExtalisDbuMqQ==",
"version": "4.22.1",
"resolved": "https://registry.npmjs.org/express/-/express-4.22.1.tgz",
"integrity": "sha512-F2X8g9P1X7uCPZMA3MVf9wcTqlyNp7IhH5qPCI0izhaOIYXaW9L535tGA3qmjRzpH+bZczqq7hVKxTR4NWnu+g==",
"dev": true,
"license": "MIT",
"dependencies": {
"accepts": "~1.3.8",
"array-flatten": "1.1.1",
"body-parser": "1.20.1",
"content-disposition": "0.5.4",
"body-parser": "~1.20.3",
"content-disposition": "~0.5.4",
"content-type": "~1.0.4",
"cookie": "0.5.0",
"cookie-signature": "1.0.6",
"cookie": "~0.7.1",
"cookie-signature": "~1.0.6",
"debug": "2.6.9",
"depd": "2.0.0",
"encodeurl": "~1.0.2",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"finalhandler": "1.2.0",
"fresh": "0.5.2",
"http-errors": "2.0.0",
"merge-descriptors": "1.0.1",
"finalhandler": "~1.3.1",
"fresh": "~0.5.2",
"http-errors": "~2.0.0",
"merge-descriptors": "1.0.3",
"methods": "~1.1.2",
"on-finished": "2.4.1",
"on-finished": "~2.4.1",
"parseurl": "~1.3.3",
"path-to-regexp": "0.1.7",
"path-to-regexp": "~0.1.12",
"proxy-addr": "~2.0.7",
"qs": "6.11.0",
"qs": "~6.14.0",
"range-parser": "~1.2.1",
"safe-buffer": "5.2.1",
"send": "0.18.0",
"serve-static": "1.15.0",
"send": "~0.19.0",
"serve-static": "~1.16.2",
"setprototypeof": "1.2.0",
"statuses": "2.0.1",
"statuses": "~2.0.1",
"type-is": "~1.6.18",
"utils-merge": "1.0.1",
"vary": "~1.1.2"
},
"engines": {
"node": ">= 0.10.0"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/express/node_modules/array-flatten": {
@@ -6312,35 +6394,12 @@
"integrity": "sha512-PCVAQswWemu6UdxsDFFX/+gVeYqKAod3D3UVm91jHwynguOwAvYPhx8nNlM++NqRcK6CxxpUafjmhIdKiHibqg==",
"dev": true
},
"node_modules/express/node_modules/body-parser": {
"version": "1.20.1",
"resolved": "https://registry.npmjs.org/body-parser/-/body-parser-1.20.1.tgz",
"integrity": "sha512-jWi7abTbYwajOytWCQc37VulmWiRae5RyTpaCyDcS5/lMdtwSz5lOpDE67srw/HYe35f1z3fDQw+3txg7gNtWw==",
"dev": true,
"dependencies": {
"bytes": "3.1.2",
"content-type": "~1.0.4",
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"on-finished": "2.4.1",
"qs": "6.11.0",
"raw-body": "2.5.1",
"type-is": "~1.6.18",
"unpipe": "1.0.0"
},
"engines": {
"node": ">= 0.8",
"npm": "1.2.8000 || >= 1.4.16"
}
},
"node_modules/express/node_modules/cookie": {
"version": "0.5.0",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.5.0.tgz",
"integrity": "sha512-YZ3GUyn/o8gfKJlnlX7g7xq4gyO6OSuhGPKaaGssGB2qgDUS0gPgtTvoyZLTt9Ab6dC4hfc9dV5arkvc/OCmrw==",
"version": "0.7.2",
"resolved": "https://registry.npmjs.org/cookie/-/cookie-0.7.2.tgz",
"integrity": "sha512-yki5XnKuf750l50uGTllt6kKILY4nQ1eNIQatoXEByZ5dWgnKqbnqmTrBE5B4N7lrMJKQ2ytWMiTO2o0v6Ew/w==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
@@ -6350,22 +6409,34 @@
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
}
},
"node_modules/express/node_modules/finalhandler": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.2.0.tgz",
"integrity": "sha512-5uXcUVftlQMFnWC9qu/svkWv3GTd2PfUhK/3PLkYNAe7FbqJMt3515HaxE6eRL74GdsriiwujiawdaB1BpEISg==",
"node_modules/express/node_modules/encodeurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
"integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/express/node_modules/finalhandler": {
"version": "1.3.2",
"resolved": "https://registry.npmjs.org/finalhandler/-/finalhandler-1.3.2.tgz",
"integrity": "sha512-aA4RyPcd3badbdABGDuTXCMTtOneUCAYH/gxoYRTZlIJdF0YPWuGqiAsIrhNnnqdXGswYk6dGujem4w80UJFhg==",
"dev": true,
"license": "MIT",
"dependencies": {
"debug": "2.6.9",
"encodeurl": "~1.0.2",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"on-finished": "2.4.1",
"on-finished": "~2.4.1",
"parseurl": "~1.3.3",
"statuses": "2.0.1",
"statuses": "~2.0.2",
"unpipe": "~1.0.0"
},
"engines": {
@@ -6376,28 +6447,15 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"dev": true
},
"node_modules/express/node_modules/raw-body": {
"version": "2.5.1",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.1.tgz",
"integrity": "sha512-qqJBtEyVgS0ZmPGdCFPWJ3FreoqvG4MVQln/kCgF7Olq95IbOp0/BWyMwbdtn4VTvkM8Y7khCQ2Xgk/tcrCXig==",
"dev": true,
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"unpipe": "1.0.0"
},
"engines": {
"node": ">= 0.8"
}
"license": "MIT"
},
"node_modules/express/node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
@@ -6593,15 +6651,16 @@
}
},
"node_modules/flatted": {
"version": "3.2.7",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.2.7.tgz",
"integrity": "sha512-5nqDSxl8nn5BSNxyR3n4I6eDmbolI6WT+QqR547RwxQapgjQBmtktdP+HTBb/a/zLsbzERTONyUB5pefh5TtjQ==",
"dev": true
"version": "3.4.2",
"resolved": "https://registry.npmjs.org/flatted/-/flatted-3.4.2.tgz",
"integrity": "sha512-PjDse7RzhcPkIJwy5t7KPWQSZ9cAbzQXcafsetQoD7sOJRQlGikNbx7yZp2OotDnJyrDcbyRq3Ttb18iYOqkxA==",
"dev": true,
"license": "ISC"
},
"node_modules/follow-redirects": {
"version": "1.15.2",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.2.tgz",
"integrity": "sha512-VQLG33o04KaQ8uYi2tVNbdrWp1QWxNNea+nmIB4EVM28v0hmP17z7aG1+wAkNzVq4KeXTq3221ye5qTJP91JwA==",
"version": "1.15.11",
"resolved": "https://registry.npmjs.org/follow-redirects/-/follow-redirects-1.15.11.tgz",
"integrity": "sha512-deG2P0JfjrTxl50XGCDyfI97ZGVCxIpfKYmfyrQ54n5FO/0gfIES8C/Psl6kWVDolizcaaxZJnTS0QSMxvnsBQ==",
"dev": true,
"funding": [
{
@@ -6609,6 +6668,7 @@
"url": "https://github.com/sponsors/RubenVerborgh"
}
],
"license": "MIT",
"engines": {
"node": ">=4.0"
},
@@ -6668,6 +6728,7 @@
"resolved": "https://registry.npmjs.org/fresh/-/fresh-0.5.2.tgz",
"integrity": "sha512-zJ2mQYM18rEFOudeV4GShTGIQ7RbzA7ozbU9I/XBpm7kqgMywgmylMwXHxZJmkVoYkna9d2pVXVXPdYTP9ej8Q==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.6"
}
@@ -6725,9 +6786,13 @@
}
},
"node_modules/function-bind": {
"version": "1.1.1",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.1.tgz",
"integrity": "sha512-yIovAzMX49sF8Yl58fSCWJ5svSLuaibPxXQJFLmBObTuCr0Mf1KiPopGM9NiFjiYBCbfaa2Fh6breQ6ANVTI0A=="
"version": "1.1.2",
"resolved": "https://registry.npmjs.org/function-bind/-/function-bind-1.1.2.tgz",
"integrity": "sha512-7XHNxH7qX9xG5mIwxkhumTox/MIRNcOgDrxWsMt2pAr23WHp6MrRlN7FBSFpCpr+oVO0F744iUgR82nJMfG2SA==",
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/functions-have-names": {
"version": "1.2.3",
@@ -6775,13 +6840,24 @@
}
},
"node_modules/get-intrinsic": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.2.0.tgz",
"integrity": "sha512-L049y6nFOuom5wGyRc3/gdTLO94dySVKRACj1RmJZBQXlbTMhtNIgkWkUHq+jYmZvKf14EW1EoJnnjbmoHij0Q==",
"version": "1.3.0",
"resolved": "https://registry.npmjs.org/get-intrinsic/-/get-intrinsic-1.3.0.tgz",
"integrity": "sha512-9fSjSaos/fRIVIp+xSJlE6lfwhES7LNtKaCBIamHsjr2na1BiABJPo0mOjjz8GJDURarmCPGqaiVg5mfjb98CQ==",
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.1",
"has": "^1.0.3",
"has-symbols": "^1.0.3"
"call-bind-apply-helpers": "^1.0.2",
"es-define-property": "^1.0.1",
"es-errors": "^1.3.0",
"es-object-atoms": "^1.1.1",
"function-bind": "^1.1.2",
"get-proto": "^1.0.1",
"gopd": "^1.2.0",
"has-symbols": "^1.1.0",
"hasown": "^2.0.2",
"math-intrinsics": "^1.1.0"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
@@ -6796,6 +6872,19 @@
"node": ">=8.0.0"
}
},
"node_modules/get-proto": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/get-proto/-/get-proto-1.0.1.tgz",
"integrity": "sha512-sTSfBjoXBp89JvIKIefqw7U2CCebsc74kiY6awiGogKtoSGbgjYE/G/+l9sF3MWFPNc9IcoOC4ODfKHfxFmp0g==",
"license": "MIT",
"dependencies": {
"dunder-proto": "^1.0.1",
"es-object-atoms": "^1.0.0"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/get-stream": {
"version": "6.0.1",
"resolved": "https://registry.npmjs.org/get-stream/-/get-stream-6.0.1.tgz",
@@ -6884,6 +6973,18 @@
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/gopd": {
"version": "1.2.0",
"resolved": "https://registry.npmjs.org/gopd/-/gopd-1.2.0.tgz",
"integrity": "sha512-ZUKRh6/kUFoAiTAtTYPZJ3hw9wNxx+BIBOijnlG9PnrJsCcSjs1wyyD6vJpaYtgnzDrKYRSqf3OO6Rfa93xsRg==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/graceful-fs": {
"version": "4.2.11",
"resolved": "https://registry.npmjs.org/graceful-fs/-/graceful-fs-4.2.11.tgz",
@@ -6945,6 +7046,7 @@
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has/-/has-1.0.3.tgz",
"integrity": "sha512-f2dvO0VU6Oej7RkWJGrehjbzMAjFp5/VKPp5tTpWIV4JHHZK1/BxbFRtf/siA2SWTe09caDmVtYYzWEIbBS4zw==",
"dev": true,
"dependencies": {
"function-bind": "^1.1.1"
},
@@ -6994,9 +7096,10 @@
}
},
"node_modules/has-symbols": {
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.0.3.tgz",
"integrity": "sha512-l3LCuF6MgDNwTDKkdYGEihYjt5pRPbEg46rtlmnSPlUbgmB8LOIrKJbYYFBSbnPaJexMKtiPO8hmeRjRz2Td+A==",
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/has-symbols/-/has-symbols-1.1.0.tgz",
"integrity": "sha512-1cDNdwJ2Jaohmb3sg4OmKaMBwuC48sYni5HUw2DvsC8LjGTLK9h+eb1X6RyuOHe4hT0ULCW68iomhjUoKUqlPQ==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
@@ -7024,6 +7127,18 @@
"integrity": "sha512-8Rf9Y83NBReMnx0gFzA8JImQACstCYWUplepDa9xprwwtmgEZUF0h/i5xSA625zB/I37EtrswSST6OXxwaaIJQ==",
"dev": true
},
"node_modules/hasown": {
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/hasown/-/hasown-2.0.2.tgz",
"integrity": "sha512-0hJU9SCPvmMzIBdZFqNPXWa6dqh7WdH0cII9y+CyS8rG3nL48Bclra9HmKhVVUHyPWNH5Y7xDwAB7bfgSjkUMQ==",
"license": "MIT",
"dependencies": {
"function-bind": "^1.1.2"
},
"engines": {
"node": ">= 0.4"
}
},
"node_modules/hdr-histogram-js": {
"version": "2.0.3",
"resolved": "https://registry.npmjs.org/hdr-histogram-js/-/hdr-histogram-js-2.0.3.tgz",
@@ -7146,26 +7261,32 @@
"dev": true
},
"node_modules/http-errors": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.0.tgz",
"integrity": "sha512-FtwrG/euBzaEjYeRqOgly7G0qviiXoJWnvEH2Z1plBdXgbyjv34pHTSb9zoeHMyDy33+DWy5Wt9Wo+TURtOYSQ==",
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/http-errors/-/http-errors-2.0.1.tgz",
"integrity": "sha512-4FbRdAX+bSdmo4AUFuS0WNiPz8NgFt+r8ThgNWmlrjQjt1Q7ZR9+zTlce2859x4KSXrwIsaeTqDoKQmtP8pLmQ==",
"dev": true,
"license": "MIT",
"dependencies": {
"depd": "2.0.0",
"inherits": "2.0.4",
"setprototypeof": "1.2.0",
"statuses": "2.0.1",
"toidentifier": "1.0.1"
"depd": "~2.0.0",
"inherits": "~2.0.4",
"setprototypeof": "~1.2.0",
"statuses": "~2.0.2",
"toidentifier": "~1.0.1"
},
"engines": {
"node": ">= 0.8"
},
"funding": {
"type": "opencollective",
"url": "https://opencollective.com/express"
}
},
"node_modules/http-errors/node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
@@ -8020,10 +8141,11 @@
"dev": true
},
"node_modules/js-yaml": {
"version": "3.14.1",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.1.tgz",
"integrity": "sha512-okMH7OXXJ7YrN9Ok3/SXrnu4iX9yOk+25nqX4imS2npuvTYDmo/QEZoqwZkYaIDk3jVvBOTOIEgEhaLOynBS9g==",
"version": "3.14.2",
"resolved": "https://registry.npmjs.org/js-yaml/-/js-yaml-3.14.2.tgz",
"integrity": "sha512-PMSmkqxr106Xa156c2M265Z+FTrPl+oxd/rgOQy2tijQeK5TxQ43psO1ZCwhVOSdnn+RzkzlRz/eY4BgJBYVpg==",
"dev": true,
"license": "MIT",
"dependencies": {
"argparse": "^1.0.7",
"esprima": "^4.0.0"
@@ -8576,9 +8698,10 @@
}
},
"node_modules/lodash": {
"version": "4.17.21",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.21.tgz",
"integrity": "sha512-v2kDEe57lecTulaDIuNTPy3Ry4gLGJ6Z1O3vE1krgXZNrsQ+LFTGHVxVjcXPs17LhbZVGedAJv8XZ1tvj5FvSg=="
"version": "4.17.23",
"resolved": "https://registry.npmjs.org/lodash/-/lodash-4.17.23.tgz",
"integrity": "sha512-LgVTMpQtIopCi79SJeDiP0TfWi5CNEc/L/aRdTh3yIvmZXTnheWpKjSZhnvMl8iXbC1tFg9gdHHDMLoV7CnG+w==",
"license": "MIT"
},
"node_modules/lodash.debounce": {
"version": "4.0.8",
@@ -8809,6 +8932,15 @@
"node": ">= 12"
}
},
"node_modules/math-intrinsics": {
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/math-intrinsics/-/math-intrinsics-1.1.0.tgz",
"integrity": "sha512-/IXtbwEk5HTPyEwyKX6hGkYXxM9nbj64B+ilVJnC/R6B0pH5G4V3b0pVbL7DBj4tkhBAppbQUlf6F6Xl9LHu1g==",
"license": "MIT",
"engines": {
"node": ">= 0.4"
}
},
"node_modules/media-typer": {
"version": "0.3.0",
"resolved": "https://registry.npmjs.org/media-typer/-/media-typer-0.3.0.tgz",
@@ -8831,10 +8963,14 @@
}
},
"node_modules/merge-descriptors": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.1.tgz",
"integrity": "sha512-cCi6g3/Zr1iqQi6ySbseM1Xvooa98N0w31jzUYrXPX2xqObmFGHJ0tQ5u74H3mVh7wLouTseZyYIq39g8cNp1w==",
"dev": true
"version": "1.0.3",
"resolved": "https://registry.npmjs.org/merge-descriptors/-/merge-descriptors-1.0.3.tgz",
"integrity": "sha512-gaNvAS7TZ897/rVaZ0nMtAyxNyi/pdbjbAwUpFQpN70GqnVfOiXpeUUMKRBmzXaSQ8DdTX4/0ms62r2K+hE6mQ==",
"dev": true,
"license": "MIT",
"funding": {
"url": "https://github.com/sponsors/sindresorhus"
}
},
"node_modules/merge-stream": {
"version": "2.0.0",
@@ -9272,10 +9408,11 @@
"optional": true
},
"node_modules/node-forge": {
"version": "1.3.1",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.1.tgz",
"integrity": "sha512-dPEtOeMvF9VMcYV/1Wb8CPoVAXtp6MKMlcbAt4ddqmGqUJ6fQZFXkNZNkNlfevtNkGtaSoXf/vNNNSvgrdXwtA==",
"version": "1.3.3",
"resolved": "https://registry.npmjs.org/node-forge/-/node-forge-1.3.3.tgz",
"integrity": "sha512-rLvcdSyRCyouf6jcOIPe/BgwG/d7hKjzMKOas33/pHEr6gbq18IK9zV7DiPvzsz0oBJPme6qr6H6kGZuI9/DZg==",
"dev": true,
"license": "(BSD-3-Clause OR GPL-2.0)",
"engines": {
"node": ">= 6.13.0"
}
@@ -9747,10 +9884,14 @@
}
},
"node_modules/object-inspect": {
"version": "1.12.3",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.12.3.tgz",
"integrity": "sha512-geUvdk7c+eizMNUDkRpW1wJwgfOiOeHbxBR/hLXK1aT6zmVSO0jsQcs7fj6MGw89jC/cjGfLcNOrtMYtGqm81g==",
"version": "1.13.4",
"resolved": "https://registry.npmjs.org/object-inspect/-/object-inspect-1.13.4.tgz",
"integrity": "sha512-W67iLl4J2EXEGTbfeHCffrjDfitvLANg0UlX3wFUUSTx92KXRFegMHUVgSqE+wvhAbi4WqjGg9czysTV2Epbew==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
@@ -10205,10 +10346,11 @@
"dev": true
},
"node_modules/path-to-regexp": {
"version": "0.1.7",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.7.tgz",
"integrity": "sha512-5DFkuoqlv1uYQKxy8omFBeJPQcdoE07Kv2sferDCrAq1ohOU+MSDswDIbnx3YAM60qIOnYa53wBhXW0EbMonrQ==",
"dev": true
"version": "0.1.12",
"resolved": "https://registry.npmjs.org/path-to-regexp/-/path-to-regexp-0.1.12.tgz",
"integrity": "sha512-RA1GjUVMnvYFxuqovrEqZoxxW5NUZqbwKtYz/Tt7nXerk0LbLblQmrsgdeOxV5SFHf0UDggjS/bSeOZwt1pmEQ==",
"dev": true,
"license": "MIT"
},
"node_modules/path-type": {
"version": "4.0.0",
@@ -11353,12 +11495,13 @@
}
},
"node_modules/qs": {
"version": "6.11.0",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.11.0.tgz",
"integrity": "sha512-MvjoMCJwEarSbUYk5O+nmoSzSutSsTwF85zcHPQ9OrlFoZOYIjaqBAJIqIXjptyD5vThxGq52Xu/MaJzRkIk4Q==",
"version": "6.14.1",
"resolved": "https://registry.npmjs.org/qs/-/qs-6.14.1.tgz",
"integrity": "sha512-4EK3+xJl8Ts67nLYNwqw/dsFVnCf+qR7RgXSK9jEEm9unao3njwMDdmsdvoKBKHzxd7tCYz5e5M+SnMjdtXGQQ==",
"dev": true,
"license": "BSD-3-Clause",
"dependencies": {
"side-channel": "^1.0.4"
"side-channel": "^1.1.0"
},
"engines": {
"node": ">=0.6"
@@ -11449,15 +11592,16 @@
}
},
"node_modules/raw-body": {
"version": "2.5.2",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.2.tgz",
"integrity": "sha512-8zGqypfENjCIqGhgXToC8aB2r7YrBX+AQAfIPs/Mlk+BtPTztOvTS01NRW/3Eh60J+a48lt8qsCzirQ6loCVfA==",
"version": "2.5.3",
"resolved": "https://registry.npmjs.org/raw-body/-/raw-body-2.5.3.tgz",
"integrity": "sha512-s4VSOf6yN0rvbRZGxs8Om5CWj6seneMwK3oDb4lWDH0UPhWcxwOWw5+qk24bxq87szX1ydrwylIOp2uG1ojUpA==",
"dev": true,
"license": "MIT",
"dependencies": {
"bytes": "3.1.2",
"http-errors": "2.0.0",
"iconv-lite": "0.4.24",
"unpipe": "1.0.0"
"bytes": "~3.1.2",
"http-errors": "~2.0.1",
"iconv-lite": "~0.4.24",
"unpipe": "~1.0.0"
},
"engines": {
"node": ">= 0.8"
@@ -12144,24 +12288,25 @@
"dev": true
},
"node_modules/send": {
"version": "0.18.0",
"resolved": "https://registry.npmjs.org/send/-/send-0.18.0.tgz",
"integrity": "sha512-qqWzuOjSFOuqPjFe4NOsMLafToQQwBSOEpS+FwEt3A2V3vKubTquT3vmLTQpFgMXp8AlFWFuP1qKaJZOtPpVXg==",
"version": "0.19.2",
"resolved": "https://registry.npmjs.org/send/-/send-0.19.2.tgz",
"integrity": "sha512-VMbMxbDeehAxpOtWJXlcUS5E8iXh6QmN+BkRX1GARS3wRaXEEgzCcB10gTQazO42tpNIya8xIyNx8fll1OFPrg==",
"dev": true,
"license": "MIT",
"dependencies": {
"debug": "2.6.9",
"depd": "2.0.0",
"destroy": "1.2.0",
"encodeurl": "~1.0.2",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"etag": "~1.8.1",
"fresh": "0.5.2",
"http-errors": "2.0.0",
"fresh": "~0.5.2",
"http-errors": "~2.0.1",
"mime": "1.6.0",
"ms": "2.1.3",
"on-finished": "2.4.1",
"on-finished": "~2.4.1",
"range-parser": "~1.2.1",
"statuses": "2.0.1"
"statuses": "~2.0.2"
},
"engines": {
"node": ">= 0.8.0"
@@ -12172,6 +12317,7 @@
"resolved": "https://registry.npmjs.org/debug/-/debug-2.6.9.tgz",
"integrity": "sha512-bC7ElrdJaJnPbAP+1EotYvqZsb3ecl5wi6Bfi6BJTUcNowp6cvspg0jXznRTKDjm/E7AdgFBVeAPVMNcKGsHMA==",
"dev": true,
"license": "MIT",
"dependencies": {
"ms": "2.0.0"
}
@@ -12180,13 +12326,25 @@
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.0.0.tgz",
"integrity": "sha512-Tpp60P6IUJDTuOq/5Z8cdskzJujfwqfOTkrwIwj7IRISpnkJnT6SyJ4PCPnGMoFjC9ddhal5KVIYtAt97ix05A==",
"dev": true
"dev": true,
"license": "MIT"
},
"node_modules/send/node_modules/encodeurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
"integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/send/node_modules/mime": {
"version": "1.6.0",
"resolved": "https://registry.npmjs.org/mime/-/mime-1.6.0.tgz",
"integrity": "sha512-x0Vn8spI+wuJ1O6S7gnbaQg8Pxh4NNHb7KSINmEWKiPE4RKOplvijn+NkmYmmRgP68mc70j2EbeTFRsrswaQeg==",
"dev": true,
"license": "MIT",
"bin": {
"mime": "cli.js"
},
@@ -12198,13 +12356,15 @@
"version": "2.1.3",
"resolved": "https://registry.npmjs.org/ms/-/ms-2.1.3.tgz",
"integrity": "sha512-6FlzubTLZG3J2a/NVCAleEhjzq5oxgHyaCU9yYXvcLsvoVaHJq/s5xXI6/XXP6tz7R9xAOtHnSO/tXtF3WRTlA==",
"dev": true
"dev": true,
"license": "MIT"
},
"node_modules/send/node_modules/statuses": {
"version": "2.0.1",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.1.tgz",
"integrity": "sha512-RwNA9Z/7PrK06rYLIzFMlaF+l73iwpzsqRIFgbMLbTcLD6cOao82TaWefPXQvB2fOC4AjuYSEndS7N/mTCbkdQ==",
"version": "2.0.2",
"resolved": "https://registry.npmjs.org/statuses/-/statuses-2.0.2.tgz",
"integrity": "sha512-DvEy55V3DB7uknRo+4iOGT5fP1slR8wQohVdknigZPMpMstaKJQWhwiYBACJE3Ul2pTnATihhBYnRhZQHGBiRw==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
@@ -12288,20 +12448,31 @@
"dev": true
},
"node_modules/serve-static": {
"version": "1.15.0",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.15.0.tgz",
"integrity": "sha512-XGuRDNjXUijsUL0vl6nSD7cwURuzEgglbOaFuZM9g3kwDXOWVTck0jLzjPzGD+TazWbboZYu52/9/XPdUgne9g==",
"version": "1.16.3",
"resolved": "https://registry.npmjs.org/serve-static/-/serve-static-1.16.3.tgz",
"integrity": "sha512-x0RTqQel6g5SY7Lg6ZreMmsOzncHFU7nhnRWkKgWuMTu5NN0DR5oruckMqRvacAN9d5w6ARnRBXl9xhDCgfMeA==",
"dev": true,
"license": "MIT",
"dependencies": {
"encodeurl": "~1.0.2",
"encodeurl": "~2.0.0",
"escape-html": "~1.0.3",
"parseurl": "~1.3.3",
"send": "0.18.0"
"send": "~0.19.1"
},
"engines": {
"node": ">= 0.8.0"
}
},
"node_modules/serve-static/node_modules/encodeurl": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/encodeurl/-/encodeurl-2.0.0.tgz",
"integrity": "sha512-Q0n9HRi4m6JuGIV1eFlmvJB7ZEVxu93IrMyiMsGC0lrMJMWzRgx6WGquyfQgZVb31vhGgXnfmPNNXmxnOkRBrg==",
"dev": true,
"license": "MIT",
"engines": {
"node": ">= 0.8"
}
},
"node_modules/set-blocking": {
"version": "2.0.0",
"resolved": "https://registry.npmjs.org/set-blocking/-/set-blocking-2.0.0.tgz",
@@ -12354,14 +12525,76 @@
}
},
"node_modules/side-channel": {
"version": "1.0.4",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.0.4.tgz",
"integrity": "sha512-q5XPytqFEIKHkGdiMIrY10mvLRvnQh42/+GoBlFW3b2LXLE2xxJpZFdm94we0BaoV3RwJyGqg5wS7epxTv0Zvw==",
"version": "1.1.0",
"resolved": "https://registry.npmjs.org/side-channel/-/side-channel-1.1.0.tgz",
"integrity": "sha512-ZX99e6tRweoUXqR+VBrslhda51Nh5MTQwou5tnUDgbtyM0dBgmhEDtWGP/xbKn6hqfPRHujUNwz5fy/wbbhnpw==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bind": "^1.0.0",
"get-intrinsic": "^1.0.2",
"object-inspect": "^1.9.0"
"es-errors": "^1.3.0",
"object-inspect": "^1.13.3",
"side-channel-list": "^1.0.0",
"side-channel-map": "^1.0.1",
"side-channel-weakmap": "^1.0.2"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-list": {
"version": "1.0.0",
"resolved": "https://registry.npmjs.org/side-channel-list/-/side-channel-list-1.0.0.tgz",
"integrity": "sha512-FCLHtRD/gnpCiCHEiJLOwdmFP+wzCmDEkc9y7NsYxeF4u7Btsn1ZuwgwJGxImImHicJArLP4R0yX4c2KCrMrTA==",
"dev": true,
"license": "MIT",
"dependencies": {
"es-errors": "^1.3.0",
"object-inspect": "^1.13.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-map": {
"version": "1.0.1",
"resolved": "https://registry.npmjs.org/side-channel-map/-/side-channel-map-1.0.1.tgz",
"integrity": "sha512-VCjCNfgMsby3tTdo02nbjtM/ewra6jPHmpThenkTYh8pG9ucZ/1P8So4u4FGBek/BjpOVsDCMoLA/iuBKIFXRA==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.5",
"object-inspect": "^1.13.3"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
}
},
"node_modules/side-channel-weakmap": {
"version": "1.0.2",
"resolved": "https://registry.npmjs.org/side-channel-weakmap/-/side-channel-weakmap-1.0.2.tgz",
"integrity": "sha512-WPS/HvHQTYnHisLo9McqBHOJk2FkHO/tlpvldyrnem4aeQp4hai3gythswg6p01oSoTl58rcpiFAjF2br2Ak2A==",
"dev": true,
"license": "MIT",
"dependencies": {
"call-bound": "^1.0.2",
"es-errors": "^1.3.0",
"get-intrinsic": "^1.2.5",
"object-inspect": "^1.13.3",
"side-channel-map": "^1.0.1"
},
"engines": {
"node": ">= 0.4"
},
"funding": {
"url": "https://github.com/sponsors/ljharb"
+1 -1
View File
@@ -35,7 +35,7 @@
"crypto-js": "^4.1.1",
"highlight.js": "^11.6.0",
"humanize-duration": "^3.27.3",
"lodash": "4.17.21",
"lodash": "4.17.23",
"moment": "^2.29.4",
"ng-apexcharts": "^1.7.4",
"ngx-markdown": "^13.1.0",
@@ -96,6 +96,7 @@
<button (click)="changeSummaryTempDuration('year')" mat-menu-item>year</button>
<button (click)="changeSummaryTempDuration('month')" mat-menu-item>month</button>
<button (click)="changeSummaryTempDuration('week')" mat-menu-item>week</button>
<button (click)="changeSummaryTempDuration('day')" mat-menu-item>day</button>
</mat-menu>
</div>
</div>
@@ -32,7 +32,7 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
summaryData: { [key: string]: DeviceSummaryModel };
hostGroups: { [hostId: string]: string[] } = {}
temperatureOptions: ApexOptions;
tempDurationKey = 'forever'
tempDurationKey = 'week'
config: AppConfig;
showArchived: boolean;
@@ -272,11 +272,11 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
}
/*
DURATION_KEY_DAY = "day"
DURATION_KEY_WEEK = "week"
DURATION_KEY_MONTH = "month"
DURATION_KEY_YEAR = "year"
DURATION_KEY_FOREVER = "forever"
DURATION_KEY_MONTH = "month"
DURATION_KEY_YEAR = "year"
DURATION_KEY_FOREVER = "forever"
*/
changeSummaryTempDuration(durationKey: string): void {