Compare commits
84 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
| c34ee85e48 | |||
| 91e8eb1def | |||
| a01b8fe083 | |||
| 550fb542d4 | |||
| 7841063783 | |||
| 8e05b2e2f8 | |||
| 64e1c93d16 | |||
| b227054b52 | |||
| 66bd6f99c5 | |||
| c6579864b8 | |||
| 2361c329e2 | |||
| 5ea149d878 | |||
| 30bd18f816 | |||
| 0f0efac866 | |||
| 04563c0d0d | |||
| 9316eccabe | |||
| b71d6660a6 | |||
| 0e2fec4e93 | |||
| ff171282cc | |||
| ea8fe208d0 | |||
| 9ae9c387cc | |||
| 772b4f6528 | |||
| 4a16ca0d5a | |||
| 316ce856f7 | |||
| 6e0321f488 | |||
| 338d2ae04e | |||
| 4419f7f429 | |||
| 797a6b0429 | |||
| d0b545dfb7 | |||
| b0bff53bbd | |||
| b4adf3d88d | |||
| eefdc548b2 | |||
| fb918e2d6e | |||
| 3d9001a5e4 | |||
| fbe7d63a24 | |||
| d718b0898b | |||
| 44c7211b5f | |||
| 157c93b967 | |||
| 7babc280a0 | |||
| e364e480e8 | |||
| bfefe7e98a | |||
| 831cca7853 | |||
| 46f3b1c02c | |||
| 8a1ae2ffa0 | |||
| 145c819fc1 | |||
| a9ea231de0 | |||
| c2488af1c3 | |||
| ecf7a447a7 | |||
| f8e61af2f9 | |||
| ee61d986d8 | |||
| 8fe8cec09a | |||
| b953456d6b | |||
| 4057699cad | |||
| d3e7fc6067 | |||
| 09a8574d83 | |||
| 7695cc185f | |||
| fc7208020e | |||
| 75d5930835 | |||
| 3c9e16169e | |||
| 9e1076f302 | |||
| 75ab87e109 | |||
| 0b8251fce2 | |||
| f57b71ae96 | |||
| ce324c3de1 | |||
| 281b56d287 | |||
| cbd23e334b | |||
| 7a0b9c9e0d | |||
| 44b3d982dd | |||
| 769f253e7d | |||
| fbd5bb57ac | |||
| b9eb5687cd | |||
| cbd230a7e0 | |||
| 892e9685f3 | |||
| 7ba7b6efda | |||
| 453069deec | |||
| de5f2c3324 | |||
| d486f14433 | |||
| cb47dd7185 | |||
| 6ae4d233cd | |||
| f8bb185854 | |||
| 1da07caaa6 | |||
| fe96c27732 | |||
| 7287775cca | |||
| 28ac3ac7ec |
@@ -37,4 +37,6 @@ ghcr.io/analogj/scrutiny:master-omnibus
|
||||
docker exec scrutiny scrutiny-collector-metrics run
|
||||
```
|
||||
|
||||
The log files will be available on your host in the `config` directory. Please attach them to this issue.
|
||||
The log files will be available on your host in the `config` directory. Please attach them to this issue.
|
||||
|
||||
Please also provide the output of `docker info`
|
||||
@@ -1,85 +0,0 @@
|
||||
name: CI
|
||||
# This workflow is triggered on pushes & pull requests
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
runs-on: ubuntu-latest
|
||||
container: techknowlogick/xgo:go-1.17.x
|
||||
|
||||
# Service containers to run with `build` (Required for end-to-end testing)
|
||||
services:
|
||||
influxdb:
|
||||
image: influxdb:2.2
|
||||
env:
|
||||
DOCKER_INFLUXDB_INIT_MODE: setup
|
||||
DOCKER_INFLUXDB_INIT_USERNAME: admin
|
||||
DOCKER_INFLUXDB_INIT_PASSWORD: password12345
|
||||
DOCKER_INFLUXDB_INIT_ORG: scrutiny
|
||||
DOCKER_INFLUXDB_INIT_BUCKET: metrics
|
||||
DOCKER_INFLUXDB_INIT_ADMIN_TOKEN: my-super-secret-auth-token
|
||||
ports:
|
||||
- 8086:8086
|
||||
env:
|
||||
PROJECT_PATH: /go/src/github.com/analogj/scrutiny
|
||||
CGO_ENABLED: 1
|
||||
steps:
|
||||
- name: Git
|
||||
run: |
|
||||
apt-get update && apt-get install -y software-properties-common
|
||||
add-apt-repository ppa:git-core/ppa && apt-get update && apt-get install -y git
|
||||
git --version
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Test
|
||||
run: |
|
||||
mkdir -p $(dirname "$PROJECT_PATH")
|
||||
cp -a $GITHUB_WORKSPACE $PROJECT_PATH
|
||||
cd $PROJECT_PATH
|
||||
|
||||
go mod vendor
|
||||
go test -race -coverprofile=coverage.txt -covermode=atomic -v -tags "static" $(go list ./... | grep -v /vendor/)
|
||||
- name: Generate coverage report
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
files: ${{ env.PROJECT_PATH }}/coverage.txt
|
||||
flags: unittests
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
- name: Build Binaries
|
||||
run: |
|
||||
|
||||
cd $PROJECT_PATH
|
||||
make all
|
||||
|
||||
- name: Archive
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: binaries.zip
|
||||
path: |
|
||||
/build/scrutiny-web-linux-amd64
|
||||
/build/scrutiny-collector-metrics-linux-amd64
|
||||
/build/scrutiny-web-linux-arm64
|
||||
/build/scrutiny-collector-metrics-linux-arm64
|
||||
/build/scrutiny-web-linux-arm-5
|
||||
/build/scrutiny-collector-metrics-linux-arm-5
|
||||
/build/scrutiny-web-linux-arm-6
|
||||
/build/scrutiny-collector-metrics-linux-arm-6
|
||||
/build/scrutiny-web-linux-arm-7
|
||||
/build/scrutiny-collector-metrics-linux-arm-7
|
||||
/build/scrutiny-web-windows-4.0-amd64.exe
|
||||
/build/scrutiny-collector-metrics-windows-4.0-amd64.exe
|
||||
# /build/scrutiny-web-darwin-arm64
|
||||
# /build/scrutiny-collector-metrics-darwin-arm64
|
||||
# /build/scrutiny-web-darwin-amd64
|
||||
# /build/scrutiny-collector-metrics-darwin-amd64
|
||||
# /build/scrutiny-web-freebsd-amd64
|
||||
# /build/scrutiny-collector-metrics-freebsd-amd64
|
||||
- uses: codecov/codecov-action@v2
|
||||
with:
|
||||
file: ${{ env.PROJECT_PATH }}/coverage.txt
|
||||
flags: unittests
|
||||
fail_ci_if_error: false
|
||||
|
||||
|
||||
@@ -0,0 +1,114 @@
|
||||
name: CI
|
||||
# This workflow is triggered on pushes & pull requests
|
||||
on: [pull_request]
|
||||
|
||||
jobs:
|
||||
test-frontend:
|
||||
name: Test Frontend
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Test Frontend
|
||||
run: |
|
||||
make binary-frontend-test-coverage
|
||||
- name: Upload coverage
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverage
|
||||
path: ${{ github.workspace }}/webapp/frontend/coverage/lcov.info
|
||||
retention-days: 1
|
||||
test-backend:
|
||||
name: Test Backend
|
||||
runs-on: ubuntu-latest
|
||||
container: ghcr.io/packagrio/packagr:latest-golang
|
||||
# Service containers to run with `build` (Required for end-to-end testing)
|
||||
services:
|
||||
influxdb:
|
||||
image: influxdb:2.2
|
||||
env:
|
||||
DOCKER_INFLUXDB_INIT_MODE: setup
|
||||
DOCKER_INFLUXDB_INIT_USERNAME: admin
|
||||
DOCKER_INFLUXDB_INIT_PASSWORD: password12345
|
||||
DOCKER_INFLUXDB_INIT_ORG: scrutiny
|
||||
DOCKER_INFLUXDB_INIT_BUCKET: metrics
|
||||
DOCKER_INFLUXDB_INIT_ADMIN_TOKEN: my-super-secret-auth-token
|
||||
ports:
|
||||
- 8086:8086
|
||||
env:
|
||||
STATIC: true
|
||||
steps:
|
||||
- name: Git
|
||||
run: |
|
||||
apt-get update && apt-get install -y software-properties-common
|
||||
add-apt-repository ppa:git-core/ppa && apt-get update && apt-get install -y git
|
||||
git --version
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Test Backend
|
||||
run: |
|
||||
make binary-clean binary-test-coverage
|
||||
- name: Upload coverage
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: coverage
|
||||
path: ${{ github.workspace }}/coverage.txt
|
||||
retention-days: 1
|
||||
test-coverage:
|
||||
name: Test Coverage Upload
|
||||
needs:
|
||||
- test-backend
|
||||
- test-frontend
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- name: Download coverage reports
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: coverage
|
||||
- name: Upload coverage reports
|
||||
uses: codecov/codecov-action@v2
|
||||
with:
|
||||
files: ${{ github.workspace }}/coverage.txt,${{ github.workspace }}/lcov.info
|
||||
flags: unittests
|
||||
fail_ci_if_error: true
|
||||
verbose: true
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.cfg.goos }}/${{ matrix.cfg.goarch }}
|
||||
runs-on: ${{ matrix.cfg.on }}
|
||||
env:
|
||||
GOOS: ${{ matrix.cfg.goos }}
|
||||
GOARCH: ${{ matrix.cfg.goarch }}
|
||||
GOARM: ${{ matrix.cfg.goarm }}
|
||||
STATIC: true
|
||||
strategy:
|
||||
matrix:
|
||||
cfg:
|
||||
- { on: ubuntu-latest, goos: linux, goarch: amd64 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm, goarm: 5 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm, goarm: 6 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm, goarm: 7 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm64 }
|
||||
- { on: macos-latest, goos: darwin, goarch: amd64 }
|
||||
- { on: macos-latest, goos: darwin, goarch: arm64 }
|
||||
- { on: macos-latest, goos: freebsd, goarch: amd64 }
|
||||
- { on: windows-latest, goos: windows, goarch: amd64 }
|
||||
- { on: windows-latest, goos: windows, goarch: arm64 }
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '^1.18.3'
|
||||
- name: Build Binaries
|
||||
run: |
|
||||
make binary-clean binary-all
|
||||
- name: Archive
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: binaries.zip
|
||||
path: |
|
||||
scrutiny-web-*
|
||||
scrutiny-collector-metrics-*
|
||||
@@ -1,7 +1,5 @@
|
||||
name: Docker
|
||||
on:
|
||||
schedule:
|
||||
- cron: '36 12 * * *'
|
||||
push:
|
||||
branches: [ master, beta ]
|
||||
# Publish semver tags as releases.
|
||||
@@ -83,8 +81,7 @@ jobs:
|
||||
options: -v ${{ github.workspace }}:/work
|
||||
run: |
|
||||
cd /work
|
||||
make frontend
|
||||
ls -alt /work
|
||||
make binary-frontend && echo "print contents of /work/dist" && ls -alt /work/dist
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
@@ -144,8 +141,8 @@ jobs:
|
||||
options: -v ${{ github.workspace }}:/work
|
||||
run: |
|
||||
cd /work
|
||||
make frontend
|
||||
ls -alt /work
|
||||
make binary-frontend && echo "print contents of /work/dist" && ls -alt /work/dist
|
||||
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
|
||||
@@ -0,0 +1,69 @@
|
||||
name: Docker - Nightly
|
||||
on:
|
||||
schedule:
|
||||
- cron: '36 12 * * *'
|
||||
|
||||
env:
|
||||
REGISTRY: ghcr.io
|
||||
IMAGE_NAME: ${{ github.repository }}
|
||||
|
||||
jobs:
|
||||
omnibus:
|
||||
runs-on: ubuntu-latest
|
||||
permissions:
|
||||
contents: read
|
||||
packages: write
|
||||
|
||||
steps:
|
||||
- name: Checkout repository
|
||||
uses: actions/checkout@v2
|
||||
- name: "Populate frontend version information"
|
||||
run: "cd webapp/frontend && ./git.version.sh"
|
||||
- name: "Generate frontend & version information"
|
||||
uses: addnab/docker-run-action@v3
|
||||
with:
|
||||
image: node:lts
|
||||
options: -v ${{ github.workspace }}:/work
|
||||
run: |
|
||||
cd /work
|
||||
make binary-frontend && echo "print contents of /work/dist" && ls -alt /work/dist
|
||||
|
||||
|
||||
- name: Set up QEMU
|
||||
uses: docker/setup-qemu-action@v2
|
||||
with:
|
||||
platforms: 'arm64,arm'
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v2
|
||||
# Login against a Docker registry except on PR
|
||||
# https://github.com/docker/login-action
|
||||
- name: Log into registry ${{ env.REGISTRY }}
|
||||
if: github.event_name != 'pull_request'
|
||||
uses: docker/login-action@v2
|
||||
with:
|
||||
registry: ${{ env.REGISTRY }}
|
||||
username: ${{ github.actor }}
|
||||
password: ${{ secrets.GITHUB_TOKEN }}
|
||||
# Extract metadata (tags, labels) for Docker
|
||||
# https://github.com/docker/metadata-action
|
||||
- name: Extract Docker metadata
|
||||
id: meta
|
||||
uses: docker/metadata-action@v4
|
||||
with:
|
||||
tags: |
|
||||
type=ref,enable=true,event=branch,suffix=-omnibus-nightly
|
||||
type=ref,enable=true,event=tag,suffix=-omnibus-nightly
|
||||
images: ${{ env.REGISTRY }}/${{ env.IMAGE_NAME }}
|
||||
# Build and push Docker image with Buildx (don't push on PR)
|
||||
# https://github.com/docker/build-push-action
|
||||
- name: Build and push Docker image
|
||||
uses: docker/build-push-action@v3
|
||||
with:
|
||||
platforms: linux/amd64,linux/arm64
|
||||
context: .
|
||||
file: docker/Dockerfile
|
||||
push: false
|
||||
tags: ${{ steps.meta.outputs.tags }}
|
||||
labels: ${{ steps.meta.outputs.labels }}
|
||||
# cache-from: type=gha
|
||||
# cache-to: type=gha,mode=max
|
||||
@@ -1,83 +0,0 @@
|
||||
# compiles FreeBSD artifacts and attaches them to build
|
||||
name: Release FreeBSD
|
||||
|
||||
on:
|
||||
release:
|
||||
# Only use the types keyword to narrow down the activity types that will trigger your workflow.
|
||||
types: [published]
|
||||
workflow_dispatch:
|
||||
inputs:
|
||||
tag_name:
|
||||
description: 'tag to build artifacts for'
|
||||
required: true
|
||||
default: 'v0.0.0'
|
||||
jobs:
|
||||
|
||||
release-freebsd:
|
||||
name: Release FreeBSD
|
||||
runs-on: macos-10.15
|
||||
env:
|
||||
PROJECT_PATH: /go/src/github.com/analogj/scrutiny
|
||||
GOPATH: /go
|
||||
GOOS: freebsd
|
||||
GOARCH: amd64
|
||||
steps:
|
||||
- name: Checkout
|
||||
uses: actions/checkout@v2
|
||||
with:
|
||||
ref: ${{github.event.release.tag_name || github.event.inputs.tag_name }}
|
||||
- name: Build Binaries
|
||||
uses: vmactions/freebsd-vm@v0.1.5
|
||||
with:
|
||||
envs: 'PROJECT_PATH GOPATH GOOS GOARCH'
|
||||
usesh: true
|
||||
#TODO: lock go version using https://www.jeremymorgan.com/tutorials/golang/how-to-install-go-freebsd/
|
||||
prepare: pkg install -y curl go gmake
|
||||
run: |
|
||||
pwd
|
||||
ls -lah
|
||||
whoami
|
||||
freebsd-version
|
||||
|
||||
mkdir -p $(dirname "$PROJECT_PATH")
|
||||
cp -R $GITHUB_WORKSPACE $PROJECT_PATH
|
||||
cd $PROJECT_PATH
|
||||
|
||||
mkdir -p $GITHUB_WORKSPACE/dist
|
||||
|
||||
echo "building web binary (OS = ${GOOS}, ARCH = ${GOARCH})"
|
||||
go build -ldflags "-extldflags=-static -X main.goos=${GOOS} -X main.goarch=${GOARCH}" -o $GITHUB_WORKSPACE/dist/scrutiny-web-${GOOS}-${GOARCH} -tags "static netgo sqlite_omit_load_extension" webapp/backend/cmd/scrutiny/scrutiny.go
|
||||
|
||||
chmod +x "$GITHUB_WORKSPACE/dist/scrutiny-web-${GOOS}-${GOARCH}"
|
||||
file "$GITHUB_WORKSPACE/dist/scrutiny-web-${GOOS}-${GOARCH}" || true
|
||||
ldd "$GITHUB_WORKSPACE/dist/scrutiny-web-${GOOS}-${GOARCH}" || true
|
||||
|
||||
echo "building collector binary (OS = ${GOOS}, ARCH = ${GOARCH})"
|
||||
go build -ldflags "-extldflags=-static -X main.goos=${GOOS} -X main.goarch=${GOARCH}" -o $GITHUB_WORKSPACE/dist/scrutiny-collector-metrics-${GOOS}-${GOARCH} -tags "static netgo" collector/cmd/collector-metrics/collector-metrics.go
|
||||
|
||||
chmod +x "$GITHUB_WORKSPACE/dist/scrutiny-collector-metrics-${GOOS}-${GOARCH}"
|
||||
file "$GITHUB_WORKSPACE/dist/scrutiny-collector-metrics-${GOOS}-${GOARCH}" || true
|
||||
ldd "$GITHUB_WORKSPACE/dist/scrutiny-collector-metrics-${GOOS}-${GOARCH}" || true
|
||||
|
||||
- name: Release Asset - Collector - freebsd-amd64
|
||||
id: upload-release-asset2
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.SCRUTINY_GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
|
||||
asset_path: './dist/scrutiny-collector-metrics-freebsd-amd64'
|
||||
asset_name: scrutiny-collector-metrics-freebsd-amd64
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
- name: Release Asset - Web - freebsd-amd64
|
||||
id: upload-release-asset1
|
||||
uses: actions/upload-release-asset@v1
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.SCRUTINY_GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
|
||||
asset_path: './dist/scrutiny-web-freebsd-amd64'
|
||||
asset_name: scrutiny-web-freebsd-amd64
|
||||
asset_content_type: application/octet-stream
|
||||
|
||||
@@ -19,11 +19,8 @@ jobs:
|
||||
run: "cd webapp/frontend && ./git.version.sh"
|
||||
- name: Build Frontend
|
||||
run: |
|
||||
cd webapp/frontend
|
||||
npm install -g @angular/cli@9.1.4
|
||||
npm install
|
||||
mkdir -p dist
|
||||
npm run build:prod -- --output-path=dist
|
||||
apt-get update && apt-get install -y make
|
||||
make binary-frontend
|
||||
tar -czf scrutiny-web-frontend.tar.gz dist
|
||||
- name: Upload Frontend Asset
|
||||
id: upload-release-asset3
|
||||
@@ -32,6 +29,6 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.SCRUTINY_GITHUB_TOKEN }}
|
||||
with:
|
||||
upload_url: ${{ github.event.release.upload_url }} # This pulls from the CREATE RELEASE step above, referencing it's ID to get its outputs object, which include a `upload_url`. See this blog post for more info: https://jasonet.co/posts/new-features-of-github-actions/#passing-data-to-future-steps
|
||||
asset_path: './webapp/frontend/scrutiny-web-frontend.tar.gz'
|
||||
asset_path: './scrutiny-web-frontend.tar.gz'
|
||||
asset_name: scrutiny-web-frontend.tar.gz
|
||||
asset_content_type: application/gzip
|
||||
|
||||
@@ -13,10 +13,10 @@ on:
|
||||
default: 'webapp/backend/pkg/version/version.go'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
name: Build
|
||||
release:
|
||||
name: Create Release Commit
|
||||
runs-on: ubuntu-latest
|
||||
container: techknowlogick/xgo:go-1.17.x
|
||||
container: ghcr.io/packagrio/packagr:latest-golang
|
||||
# Service containers to run with `build` (Required for end-to-end testing)
|
||||
services:
|
||||
influxdb:
|
||||
@@ -31,8 +31,7 @@ jobs:
|
||||
ports:
|
||||
- 8086:8086
|
||||
env:
|
||||
PROJECT_PATH: /go/src/github.com/analogj/scrutiny
|
||||
CGO_ENABLED: 1
|
||||
STATIC: true
|
||||
steps:
|
||||
- name: Git
|
||||
run: |
|
||||
@@ -53,34 +52,80 @@ jobs:
|
||||
GITHUB_TOKEN: ${{ secrets.SCRUTINY_GITHUB_TOKEN }} # Leave this line unchanged
|
||||
- name: Test
|
||||
run: |
|
||||
mkdir -p $(dirname "$PROJECT_PATH")
|
||||
cp -a $GITHUB_WORKSPACE $PROJECT_PATH
|
||||
cd $PROJECT_PATH
|
||||
|
||||
go mod vendor
|
||||
go test -v -tags "static" $(go list ./... | grep -v /vendor/)
|
||||
|
||||
- name: Build Binaries
|
||||
run: |
|
||||
|
||||
cd $PROJECT_PATH
|
||||
make all
|
||||
|
||||
# restore modified dir to GH workspace.
|
||||
cp -arf $PROJECT_PATH/. $GITHUB_WORKSPACE/
|
||||
|
||||
# copy all the build artifacts to the GH workspace
|
||||
cp -arf /build/. $GITHUB_WORKSPACE/
|
||||
|
||||
- name: Commit Changes
|
||||
make binary-clean binary-test-coverage
|
||||
- name: Commit Changes Locally
|
||||
id: commit
|
||||
uses: packagrio/action-releasr-go@master
|
||||
env:
|
||||
# This is necessary in order to push a commit to the repo
|
||||
GITHUB_TOKEN: ${{ secrets.SCRUTINY_GITHUB_TOKEN }} # Leave this line unchanged
|
||||
with:
|
||||
version_metadata_path: ${{ github.event.inputs.version_metadata_path }}
|
||||
- name: Publish Release
|
||||
- name: Upload workspace
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: workspace
|
||||
path: ${{ github.workspace }}/**/*
|
||||
retention-days: 1
|
||||
|
||||
build:
|
||||
name: Build ${{ matrix.cfg.goos }}/${{ matrix.cfg.goarch }}${{ matrix.cfg.goarm }}
|
||||
needs: release
|
||||
runs-on: ${{ matrix.cfg.on }}
|
||||
env:
|
||||
GOOS: ${{ matrix.cfg.goos }}
|
||||
GOARCH: ${{ matrix.cfg.goarch }}
|
||||
GOARM: ${{ matrix.cfg.goarm }}
|
||||
STATIC: true
|
||||
strategy:
|
||||
matrix:
|
||||
cfg:
|
||||
- { on: ubuntu-latest, goos: linux, goarch: amd64 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm, goarm: 5 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm, goarm: 6 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm, goarm: 7 }
|
||||
- { on: ubuntu-latest, goos: linux, goarch: arm64 }
|
||||
- { on: macos-latest, goos: darwin, goarch: amd64 }
|
||||
- { on: macos-latest, goos: darwin, goarch: arm64 }
|
||||
- { on: macos-latest, goos: freebsd, goarch: amd64 }
|
||||
- { on: windows-latest, goos: windows, goarch: amd64 }
|
||||
- { on: windows-latest, goos: windows, goarch: arm64 }
|
||||
steps:
|
||||
- name: Download workspace
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: workspace
|
||||
- uses: actions/setup-go@v3
|
||||
with:
|
||||
go-version: '1.18.3' # The Go version to download (if necessary) and use.
|
||||
- name: Build Binaries
|
||||
run: |
|
||||
make binary-clean binary-all
|
||||
- name: Archive
|
||||
uses: actions/upload-artifact@v2
|
||||
with:
|
||||
name: binaries.zip
|
||||
path: |
|
||||
scrutiny-web-*
|
||||
scrutiny-collector-metrics-*
|
||||
|
||||
release-publish:
|
||||
name: Publish Release
|
||||
needs: build
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Download workspace
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: workspace
|
||||
- name: Download binaries
|
||||
uses: actions/download-artifact@v3
|
||||
with:
|
||||
name: binaries.zip
|
||||
- name: List
|
||||
shell: bash
|
||||
run: |
|
||||
ls -alt
|
||||
- name: Publish Release & Assets
|
||||
id: publish
|
||||
uses: packagrio/action-publishr-go@master
|
||||
env:
|
||||
@@ -89,15 +134,23 @@ jobs:
|
||||
with:
|
||||
version_metadata_path: ${{ github.event.inputs.version_metadata_path }}
|
||||
upload_assets:
|
||||
scrutiny-web-linux-amd64
|
||||
scrutiny-collector-metrics-darwin-amd64
|
||||
scrutiny-collector-metrics-darwin-arm64
|
||||
scrutiny-collector-metrics-freebsd-amd64
|
||||
scrutiny-collector-metrics-linux-amd64
|
||||
scrutiny-web-linux-arm64
|
||||
scrutiny-collector-metrics-linux-arm64
|
||||
scrutiny-web-linux-arm-5
|
||||
scrutiny-collector-metrics-linux-arm-5
|
||||
scrutiny-web-linux-arm-6
|
||||
scrutiny-collector-metrics-linux-arm-6
|
||||
scrutiny-web-linux-arm-7
|
||||
scrutiny-collector-metrics-linux-arm-7
|
||||
scrutiny-web-windows-4.0-amd64.exe
|
||||
scrutiny-collector-metrics-windows-4.0-amd64.exe
|
||||
scrutiny-collector-metrics-linux-arm64
|
||||
scrutiny-collector-metrics-windows-amd64.exe
|
||||
scrutiny-collector-metrics-windows-arm64.exe
|
||||
scrutiny-web-darwin-amd64
|
||||
scrutiny-web-darwin-arm64
|
||||
scrutiny-web-freebsd-amd64
|
||||
scrutiny-web-linux-amd64
|
||||
scrutiny-web-linux-arm-5
|
||||
scrutiny-web-linux-arm-6
|
||||
scrutiny-web-linux-arm-7
|
||||
scrutiny-web-linux-arm64
|
||||
scrutiny-web-windows-amd64.exe
|
||||
scrutiny-web-windows-arm64.exe
|
||||
|
||||
@@ -1,19 +0,0 @@
|
||||
name: Cleanup Artifacts
|
||||
|
||||
on:
|
||||
schedule:
|
||||
# Every day at 1am
|
||||
- cron: '0 1 * * *'
|
||||
|
||||
jobs:
|
||||
remove-old-artifacts:
|
||||
runs-on: ubuntu-latest
|
||||
timeout-minutes: 10
|
||||
|
||||
steps:
|
||||
- name: Remove old artifacts
|
||||
uses: c-hive/gha-remove-artifacts@v1
|
||||
with:
|
||||
age: '1 day'
|
||||
skip-tags: true
|
||||
skip-recent: 5
|
||||
+4
-3
@@ -9,8 +9,9 @@ Depending on the functionality you are adding, you may need to setup a developme
|
||||
|
||||
# Modifying the Scrutiny Backend Server (API)
|
||||
|
||||
1. install the [Go runtime](https://go.dev/doc/install) (v1.17+)
|
||||
2. download the `scrutiny-web-frontend.tar.gz` for the [latest release](https://github.com/AnalogJ/scrutiny/releases/latest). Extract to a folder named `dist`
|
||||
1. install the [Go runtime](https://go.dev/doc/install) (v1.18+)
|
||||
2. download the `scrutiny-web-frontend.tar.gz` for
|
||||
the [latest release](https://github.com/AnalogJ/scrutiny/releases/latest). Extract to a folder named `dist`
|
||||
3. create a `scrutiny.yaml` config file
|
||||
```yaml
|
||||
# config file for local development. store as scrutiny.yaml
|
||||
@@ -62,7 +63,7 @@ The frontend is written in Angular. If you're working on the frontend and can us
|
||||
If you're developing a feature that requires changes to the backend and the frontend, or a frontend feature that requires real data,
|
||||
you'll need to follow the steps below:
|
||||
|
||||
1. install the [Go runtime](https://go.dev/doc/install) (v1.17+)
|
||||
1. install the [Go runtime](https://go.dev/doc/install) (v1.18+)
|
||||
2. install [NodeJS](https://nodejs.org/en/download/)
|
||||
3. create a `scrutiny.yaml` config file
|
||||
```yaml
|
||||
|
||||
@@ -1,66 +1,133 @@
|
||||
export CGO_ENABLED = 1
|
||||
.ONESHELL: # Applies to every targets in the file! .ONESHELL instructs make to invoke a single instance of the shell and provide it with the entire recipe, regardless of how many lines it contains.
|
||||
.SHELLFLAGS = -ec
|
||||
|
||||
########################################################################################################################
|
||||
# Global Env Settings
|
||||
########################################################################################################################
|
||||
|
||||
GO_WORKSPACE ?= /go/src/github.com/analogj/scrutiny
|
||||
|
||||
BINARY=\
|
||||
linux/amd64 \
|
||||
linux/arm-5 \
|
||||
linux/arm-6 \
|
||||
linux/arm-7 \
|
||||
linux/arm64 \
|
||||
COLLECTOR_BINARY_NAME = scrutiny-collector-metrics
|
||||
WEB_BINARY_NAME = scrutiny-web
|
||||
LD_FLAGS =
|
||||
|
||||
.ONESHELL: # Applies to every targets in the file! .ONESHELL instructs make to invoke a single instance of the shell and provide it with the entire recipe, regardless of how many lines it contains.
|
||||
.PHONY: all $(BINARY)
|
||||
all: $(BINARY) windows/amd64
|
||||
STATIC_TAGS =
|
||||
# enable multiarch docker image builds
|
||||
DOCKER_TARGETARCH_BUILD_ARG =
|
||||
ifdef TARGETARCH
|
||||
DOCKER_TARGETARCH_BUILD_ARG := $(DOCKER_TARGETARCH_BUILD_ARG) --build-arg TARGETARCH=$(TARGETARCH)
|
||||
endif
|
||||
|
||||
$(BINARY): OS = $(word 1,$(subst /, ,$*))
|
||||
$(BINARY): ARCH = $(word 2,$(subst /, ,$*))
|
||||
$(BINARY): build/scrutiny-web-%:
|
||||
@echo "building web binary (OS = $(OS), ARCH = $(ARCH))"
|
||||
xgo -v --targets="$(OS)/$(ARCH)" -ldflags "-extldflags=-static -X main.goos=$(OS) -X main.goarch=$(ARCH)" -out scrutiny-web -tags "static netgo sqlite_omit_load_extension" ${GO_WORKSPACE}/webapp/backend/cmd/scrutiny/
|
||||
# enable to build static binaries.
|
||||
ifdef STATIC
|
||||
export CGO_ENABLED = 0
|
||||
LD_FLAGS := $(LD_FLAGS) -extldflags=-static
|
||||
STATIC_TAGS := $(STATIC_TAGS) -tags "static netgo"
|
||||
endif
|
||||
ifdef GOOS
|
||||
COLLECTOR_BINARY_NAME := $(COLLECTOR_BINARY_NAME)-$(GOOS)
|
||||
WEB_BINARY_NAME := $(WEB_BINARY_NAME)-$(GOOS)
|
||||
LD_FLAGS := $(LD_FLAGS) -X main.goos=$(GOOS)
|
||||
endif
|
||||
ifdef GOARCH
|
||||
COLLECTOR_BINARY_NAME := $(COLLECTOR_BINARY_NAME)-$(GOARCH)
|
||||
WEB_BINARY_NAME := $(WEB_BINARY_NAME)-$(GOARCH)
|
||||
LD_FLAGS := $(LD_FLAGS) -X main.goarch=$(GOARCH)
|
||||
endif
|
||||
ifdef GOARM
|
||||
COLLECTOR_BINARY_NAME := $(COLLECTOR_BINARY_NAME)-$(GOARM)
|
||||
WEB_BINARY_NAME := $(WEB_BINARY_NAME)-$(GOARM)
|
||||
endif
|
||||
ifeq ($(OS),Windows_NT)
|
||||
COLLECTOR_BINARY_NAME := $(COLLECTOR_BINARY_NAME).exe
|
||||
WEB_BINARY_NAME := $(WEB_BINARY_NAME).exe
|
||||
endif
|
||||
|
||||
chmod +x "/build/scrutiny-web-$(OS)-$(ARCH)"
|
||||
file "/build/scrutiny-web-$(OS)-$(ARCH)" || true
|
||||
ldd "/build/scrutiny-web-$(OS)-$(ARCH)" || true
|
||||
########################################################################################################################
|
||||
# Binary
|
||||
########################################################################################################################
|
||||
.PHONY: all
|
||||
all: binary-all
|
||||
|
||||
@echo "building collector binary (OS = $(OS), ARCH = $(ARCH))"
|
||||
xgo -v --targets="$(OS)/$(ARCH)" -ldflags "-extldflags=-static -X main.goos=$(OS) -X main.goarch=$(ARCH)" -out scrutiny-collector-metrics -tags "static netgo" ${GO_WORKSPACE}/collector/cmd/collector-metrics/
|
||||
|
||||
chmod +x "/build/scrutiny-collector-metrics-$(OS)-$(ARCH)"
|
||||
file "/build/scrutiny-collector-metrics-$(OS)-$(ARCH)" || true
|
||||
ldd "/build/scrutiny-collector-metrics-$(OS)-$(ARCH)" || true
|
||||
|
||||
windows/amd64: export OS = windows
|
||||
windows/amd64: export ARCH = amd64
|
||||
windows/amd64:
|
||||
@echo "building web binary (OS = $(OS), ARCH = $(ARCH))"
|
||||
xgo -v --targets="$(OS)/$(ARCH)" -ldflags "-extldflags=-static -X main.goos=$(OS) -X main.goarch=$(ARCH)" -out scrutiny-web -tags "static netgo sqlite_omit_load_extension" ${GO_WORKSPACE}/webapp/backend/cmd/scrutiny/
|
||||
|
||||
@echo "building collector binary (OS = $(OS), ARCH = $(ARCH))"
|
||||
xgo -v --targets="$(OS)/$(ARCH)" -ldflags "-extldflags=-static -X main.goos=$(OS) -X main.goarch=$(ARCH)" -out scrutiny-collector-metrics -tags "static netgo" ${GO_WORKSPACE}/collector/cmd/collector-metrics/
|
||||
.PHONY: binary-all
|
||||
binary-all: binary-collector binary-web
|
||||
@echo "built binary-collector and binary-web targets"
|
||||
|
||||
|
||||
docker-collector:
|
||||
@echo "building collector docker image"
|
||||
docker build --build-arg TARGETARCH=amd64 -f docker/Dockerfile.collector -t analogj/scrutiny-dev:collector .
|
||||
.PHONY: binary-clean
|
||||
binary-clean:
|
||||
go clean
|
||||
|
||||
docker-web:
|
||||
@echo "building web docker image"
|
||||
docker build --build-arg TARGETARCH=amd64 -f docker/Dockerfile.web -t analogj/scrutiny-dev:web .
|
||||
.PHONY: binary-dep
|
||||
binary-dep:
|
||||
go mod vendor
|
||||
|
||||
docker-omnibus:
|
||||
@echo "building omnibus docker image"
|
||||
docker build --build-arg TARGETARCH=amd64 -f docker/Dockerfile -t analogj/scrutiny-dev:omnibus .
|
||||
.PHONY: binary-test
|
||||
binary-test: binary-dep
|
||||
go test -v $(STATIC_TAGS) ./...
|
||||
|
||||
.PHONY: binary-test-coverage
|
||||
binary-test-coverage: binary-dep
|
||||
go test -coverprofile=coverage.txt -covermode=atomic -v $(STATIC_TAGS) ./...
|
||||
|
||||
.PHONY: binary-collector
|
||||
binary-collector: binary-dep
|
||||
go build -ldflags "$(LD_FLAGS)" -o $(COLLECTOR_BINARY_NAME) $(STATIC_TAGS) ./collector/cmd/collector-metrics/
|
||||
ifneq ($(OS),Windows_NT)
|
||||
chmod +x $(COLLECTOR_BINARY_NAME)
|
||||
file $(COLLECTOR_BINARY_NAME) || true
|
||||
ldd $(COLLECTOR_BINARY_NAME) || true
|
||||
./$(COLLECTOR_BINARY_NAME) || true
|
||||
endif
|
||||
|
||||
.PHONY: binary-web
|
||||
binary-web: binary-dep
|
||||
go build -ldflags "$(LD_FLAGS)" -o $(WEB_BINARY_NAME) $(STATIC_TAGS) ./webapp/backend/cmd/scrutiny/
|
||||
ifneq ($(OS),Windows_NT)
|
||||
chmod +x $(WEB_BINARY_NAME)
|
||||
file $(WEB_BINARY_NAME) || true
|
||||
ldd $(WEB_BINARY_NAME) || true
|
||||
./$(WEB_BINARY_NAME) || true
|
||||
endif
|
||||
|
||||
########################################################################################################################
|
||||
# Binary
|
||||
########################################################################################################################
|
||||
|
||||
.PHONY: binary-frontend
|
||||
# reduce logging, disable angular-cli analytics for ci environment
|
||||
frontend: export NPM_CONFIG_LOGLEVEL = warn
|
||||
frontend: export NG_CLI_ANALYTICS = false
|
||||
frontend:
|
||||
binary-frontend: export NPM_CONFIG_LOGLEVEL = warn
|
||||
binary-frontend: export NG_CLI_ANALYTICS = false
|
||||
binary-frontend:
|
||||
cd webapp/frontend
|
||||
npm install -g @angular/cli@9.1.4
|
||||
mkdir -p $(CURDIR)/dist
|
||||
npm install
|
||||
npm ci
|
||||
npm run build:prod -- --output-path=$(CURDIR)/dist
|
||||
|
||||
# clean:
|
||||
# rm scrutiny-collector-metrics-* scrutiny-web-*
|
||||
.PHONY: binary-frontend-test-coverage
|
||||
# reduce logging, disable angular-cli analytics for ci environment
|
||||
binary-frontend-test-coverage:
|
||||
cd webapp/frontend
|
||||
npm ci
|
||||
npx ng test --watch=false --browsers=ChromeHeadless --code-coverage
|
||||
|
||||
########################################################################################################################
|
||||
# Docker
|
||||
# NOTE: these docker make targets are only used for local development (not used by Github Actions/CI)
|
||||
# NOTE: docker-web and docker-omnibus require `make binary-frontend` or frontend.tar.gz content in /dist before executing.
|
||||
########################################################################################################################
|
||||
.PHONY: docker-collector
|
||||
docker-collector:
|
||||
@echo "building collector docker image"
|
||||
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile.collector -t analogj/scrutiny-dev:collector .
|
||||
|
||||
.PHONY: docker-web
|
||||
docker-web:
|
||||
@echo "building web docker image"
|
||||
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile.web -t analogj/scrutiny-dev:web .
|
||||
|
||||
.PHONY: docker-omnibus
|
||||
docker-omnibus:
|
||||
@echo "building omnibus docker image"
|
||||
docker build $(DOCKER_TARGETARCH_BUILD_ARG) -f docker/Dockerfile -t analogj/scrutiny-dev:omnibus .
|
||||
|
||||
@@ -232,18 +232,18 @@ scrutiny-collector-metrics run --debug --log-file /tmp/collector.log
|
||||
|
||||
# Supported Architectures
|
||||
|
||||
|
||||
| Architecture Name | Binaries | Docker |
|
||||
| --- | --- | --- |
|
||||
| amd64 | :white_check_mark: | :white_check_mark: |
|
||||
| arm-5 | :white_check_mark: | |
|
||||
| arm-6 | :white_check_mark: | |
|
||||
| arm-7 | :white_check_mark: | web/collector only. see [#236](https://github.com/AnalogJ/scrutiny/issues/236) |
|
||||
| arm64 | :white_check_mark: | :white_check_mark: |
|
||||
| freebsd | collector only. see [#238](https://github.com/AnalogJ/scrutiny/issues/238) | |
|
||||
| linux-amd64 | :white_check_mark: | :white_check_mark: |
|
||||
| linux-arm-5 | :white_check_mark: | |
|
||||
| linux-arm-6 | :white_check_mark: | |
|
||||
| linux-arm-7 | :white_check_mark: | web/collector only. see [#236](https://github.com/AnalogJ/scrutiny/issues/236) |
|
||||
| linux-arm64 | :white_check_mark: | :white_check_mark: |
|
||||
| freebsd-amd64 | collector only. see [#238](https://github.com/AnalogJ/scrutiny/issues/238) | |
|
||||
| macos-amd64 | | :white_check_mark: |
|
||||
| macos-arm64 | | :white_check_mark: |
|
||||
| windows-amd64 | :white_check_mark: | |
|
||||
| windows-amd64 | :white_check_mark: | WIP, see [#15](https://github.com/AnalogJ/scrutiny/issues/15) |
|
||||
| windows-arm64 | :white_check_mark: | |
|
||||
|
||||
|
||||
# Contributing
|
||||
|
||||
@@ -9,6 +9,7 @@ import (
|
||||
"github.com/analogj/scrutiny/collector/pkg/detect"
|
||||
"github.com/analogj/scrutiny/collector/pkg/errors"
|
||||
"github.com/analogj/scrutiny/collector/pkg/models"
|
||||
"github.com/samber/lo"
|
||||
"github.com/sirupsen/logrus"
|
||||
"net/url"
|
||||
"os"
|
||||
@@ -56,11 +57,16 @@ func (mc *MetricsCollector) Run() error {
|
||||
Logger: mc.logger,
|
||||
Config: mc.config,
|
||||
}
|
||||
detectedStorageDevices, err := deviceDetector.Start()
|
||||
rawDetectedStorageDevices, err := deviceDetector.Start()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
//filter any device with empty wwn (they are invalid)
|
||||
detectedStorageDevices := lo.Filter[models.Device](rawDetectedStorageDevices, func(dev models.Device, _ int) bool {
|
||||
return len(dev.WWN) > 0
|
||||
})
|
||||
|
||||
mc.logger.Infoln("Sending detected devices to API, for filtering & validation")
|
||||
jsonObj, _ := json.Marshal(detectedStorageDevices)
|
||||
mc.logger.Debugf("Detected devices: %v", string(jsonObj))
|
||||
@@ -98,10 +104,10 @@ func (mc *MetricsCollector) Run() error {
|
||||
|
||||
func (mc *MetricsCollector) Validate() error {
|
||||
mc.logger.Infoln("Verifying required tools")
|
||||
_, lookErr := exec.LookPath("smartctl")
|
||||
_, lookErr := exec.LookPath(mc.config.GetString("commands.metrics_smartctl_bin"))
|
||||
|
||||
if lookErr != nil {
|
||||
return errors.DependencyMissingError("smartctl is missing")
|
||||
return errors.DependencyMissingError(fmt.Sprintf("%s binary is missing", mc.config.GetString("commands.metrics_smartctl_bin")))
|
||||
}
|
||||
|
||||
return nil
|
||||
@@ -124,7 +130,7 @@ func (mc *MetricsCollector) Collect(deviceWWN string, deviceName string, deviceT
|
||||
}
|
||||
args = append(args, fullDeviceName)
|
||||
|
||||
result, err := mc.shell.Command(mc.logger, "smartctl", args, "", os.Environ())
|
||||
result, err := mc.shell.Command(mc.logger, mc.config.GetString("commands.metrics_smartctl_bin"), args, "", os.Environ())
|
||||
resultBytes := []byte(result)
|
||||
if err != nil {
|
||||
if exitError, ok := err.(*exec.ExitError); ok {
|
||||
|
||||
@@ -43,6 +43,7 @@ func (c *configuration) Init() error {
|
||||
|
||||
c.SetDefault("api.endpoint", "http://localhost:8080")
|
||||
|
||||
c.SetDefault("commands.metrics_smartctl_bin", "smartctl")
|
||||
c.SetDefault("commands.metrics_scan_args", "--scan --json")
|
||||
c.SetDefault("commands.metrics_info_args", "--info --json")
|
||||
c.SetDefault("commands.metrics_smart_args", "--xall --json")
|
||||
|
||||
@@ -29,7 +29,7 @@ type Detect struct {
|
||||
func (d *Detect) SmartctlScan() ([]models.Device, error) {
|
||||
//we use smartctl to detect all the drives available.
|
||||
args := strings.Split(d.Config.GetString("commands.metrics_scan_args"), " ")
|
||||
detectedDeviceConnJson, err := d.Shell.Command(d.Logger, "smartctl", args, "", os.Environ())
|
||||
detectedDeviceConnJson, err := d.Shell.Command(d.Logger, d.Config.GetString("commands.metrics_smartctl_bin"), args, "", os.Environ())
|
||||
if err != nil {
|
||||
d.Logger.Errorf("Error scanning for devices: %v", err)
|
||||
return nil, err
|
||||
@@ -60,7 +60,7 @@ func (d *Detect) SmartCtlInfo(device *models.Device) error {
|
||||
}
|
||||
args = append(args, fullDeviceName)
|
||||
|
||||
availableDeviceInfoJson, err := d.Shell.Command(d.Logger, "smartctl", args, "", os.Environ())
|
||||
availableDeviceInfoJson, err := d.Shell.Command(d.Logger, d.Config.GetString("commands.metrics_smartctl_bin"), args, "", os.Environ())
|
||||
if err != nil {
|
||||
d.Logger.Errorf("Could not retrieve device information for %s: %v", device.DeviceName, err)
|
||||
return err
|
||||
@@ -149,10 +149,35 @@ func (d *Detect) TransformDetectedDevices(detectedDeviceConns models.Scan) []mod
|
||||
//create a new device group, and replace the one generated by smartctl --scan
|
||||
overrideDeviceGroup := []models.Device{}
|
||||
|
||||
for _, overrideDeviceType := range overrideDevice.DeviceType {
|
||||
if overrideDevice.DeviceType != nil {
|
||||
for _, overrideDeviceType := range overrideDevice.DeviceType {
|
||||
overrideDeviceGroup = append(overrideDeviceGroup, models.Device{
|
||||
HostId: d.Config.GetString("host.id"),
|
||||
DeviceType: overrideDeviceType,
|
||||
DeviceName: strings.TrimPrefix(overrideDeviceFile, DevicePrefix()),
|
||||
})
|
||||
}
|
||||
} else {
|
||||
//user may have specified device in config file without device type (default to scanned device type)
|
||||
|
||||
//check if the device file was detected by the scanner
|
||||
var deviceType string
|
||||
if scannedDevice, foundScannedDevice := groupedDevices[overrideDeviceFile]; foundScannedDevice {
|
||||
if len(scannedDevice) > 0 {
|
||||
//take the device type from the first grouped device
|
||||
deviceType = scannedDevice[0].DeviceType
|
||||
} else {
|
||||
deviceType = "ata"
|
||||
}
|
||||
|
||||
} else {
|
||||
//fallback to ata if no scanned device detected
|
||||
deviceType = "ata"
|
||||
}
|
||||
|
||||
overrideDeviceGroup = append(overrideDeviceGroup, models.Device{
|
||||
HostId: d.Config.GetString("host.id"),
|
||||
DeviceType: overrideDeviceType,
|
||||
DeviceType: deviceType,
|
||||
DeviceName: strings.TrimPrefix(overrideDeviceFile, DevicePrefix()),
|
||||
})
|
||||
}
|
||||
|
||||
@@ -19,6 +19,7 @@ func TestDetect_SmartctlScan(t *testing.T) {
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
|
||||
fakeShell := mock_shell.NewMockInterface(mockCtrl)
|
||||
@@ -47,6 +48,7 @@ func TestDetect_SmartctlScan_Megaraid(t *testing.T) {
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
|
||||
fakeShell := mock_shell.NewMockInterface(mockCtrl)
|
||||
@@ -78,6 +80,7 @@ func TestDetect_SmartctlScan_Nvme(t *testing.T) {
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
|
||||
fakeShell := mock_shell.NewMockInterface(mockCtrl)
|
||||
@@ -108,6 +111,7 @@ func TestDetect_TransformDetectedDevices_Empty(t *testing.T) {
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{})
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
|
||||
detectedDevices := models.Scan{
|
||||
@@ -140,6 +144,7 @@ func TestDetect_TransformDetectedDevices_Ignore(t *testing.T) {
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{{Device: "/dev/sda", DeviceType: nil, Ignore: true}})
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
|
||||
detectedDevices := models.Scan{
|
||||
@@ -170,6 +175,7 @@ func TestDetect_TransformDetectedDevices_Raid(t *testing.T) {
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{
|
||||
{
|
||||
@@ -210,6 +216,7 @@ func TestDetect_TransformDetectedDevices_Simple(t *testing.T) {
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{{Device: "/dev/sda", DeviceType: []string{"sat+megaraid"}}})
|
||||
detectedDevices := models.Scan{
|
||||
@@ -234,3 +241,59 @@ func TestDetect_TransformDetectedDevices_Simple(t *testing.T) {
|
||||
require.Equal(t, 1, len(transformedDevices))
|
||||
require.Equal(t, "sat+megaraid", transformedDevices[0].DeviceType)
|
||||
}
|
||||
|
||||
// test https://github.com/AnalogJ/scrutiny/issues/255#issuecomment-1164024126
|
||||
func TestDetect_TransformDetectedDevices_WithoutDeviceTypeOverride(t *testing.T) {
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{{Device: "/dev/sda"}})
|
||||
detectedDevices := models.Scan{
|
||||
Devices: []models.ScanDevice{
|
||||
{
|
||||
Name: "/dev/sda",
|
||||
InfoName: "/dev/sda",
|
||||
Protocol: "ata",
|
||||
Type: "scsi",
|
||||
},
|
||||
},
|
||||
}
|
||||
|
||||
d := detect.Detect{
|
||||
Config: fakeConfig,
|
||||
}
|
||||
|
||||
//test
|
||||
transformedDevices := d.TransformDetectedDevices(detectedDevices)
|
||||
|
||||
//assert
|
||||
require.Equal(t, 1, len(transformedDevices))
|
||||
require.Equal(t, "scsi", transformedDevices[0].DeviceType)
|
||||
}
|
||||
|
||||
func TestDetect_TransformDetectedDevices_WhenDeviceNotDetected(t *testing.T) {
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("host.id").AnyTimes().Return("")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_smartctl_bin").AnyTimes().Return("smartctl")
|
||||
fakeConfig.EXPECT().GetString("commands.metrics_scan_args").AnyTimes().Return("--scan --json")
|
||||
fakeConfig.EXPECT().GetDeviceOverrides().AnyTimes().Return([]models.ScanOverride{{Device: "/dev/sda"}})
|
||||
detectedDevices := models.Scan{}
|
||||
|
||||
d := detect.Detect{
|
||||
Config: fakeConfig,
|
||||
}
|
||||
|
||||
//test
|
||||
transformedDevices := d.TransformDetectedDevices(detectedDevices)
|
||||
|
||||
//assert
|
||||
require.Equal(t, 1, len(transformedDevices))
|
||||
require.Equal(t, "ata", transformedDevices[0].DeviceType)
|
||||
}
|
||||
|
||||
+10
-11
@@ -1,17 +1,19 @@
|
||||
########################################################################################################################
|
||||
# Omnibus Image
|
||||
# NOTE: this image requires the `make binary-frontend` target to have been run before `docker build` The `dist` directory must exist.
|
||||
########################################################################################################################
|
||||
|
||||
|
||||
########
|
||||
FROM golang:1.17.10-buster as backendbuild
|
||||
FROM golang:1.18-bullseye as backendbuild
|
||||
|
||||
WORKDIR /go/src/github.com/analogj/scrutiny
|
||||
|
||||
COPY . /go/src/github.com/analogj/scrutiny
|
||||
RUN make binary-clean binary-all WEB_BINARY_NAME=scrutiny
|
||||
|
||||
RUN go mod vendor && \
|
||||
go build -ldflags '-w -extldflags "-static"' -o scrutiny webapp/backend/cmd/scrutiny/scrutiny.go && \
|
||||
go build -ldflags '-w -extldflags "-static"' -o scrutiny-collector-selftest collector/cmd/collector-selftest/collector-selftest.go && \
|
||||
go build -ldflags '-w -extldflags "-static"' -o scrutiny-collector-metrics collector/cmd/collector-metrics/collector-metrics.go
|
||||
|
||||
########
|
||||
FROM ubuntu:latest as runtime
|
||||
FROM debian:bullseye-slim as runtime
|
||||
ARG TARGETARCH
|
||||
EXPOSE 8080
|
||||
WORKDIR /opt/scrutiny
|
||||
@@ -25,8 +27,7 @@ RUN apt-get update && apt-get install -y cron smartmontools ca-certificates curl
|
||||
"arm64") S6_ARCH=aarch64 ;; \
|
||||
esac \
|
||||
&& curl https://github.com/just-containers/s6-overlay/releases/download/v1.21.8.0/s6-overlay-${S6_ARCH}.tar.gz -L -s --output /tmp/s6-overlay-${S6_ARCH}.tar.gz \
|
||||
&& tar xzf /tmp/s6-overlay-${S6_ARCH}.tar.gz -C / --exclude="./bin" \
|
||||
&& tar xzf /tmp/s6-overlay-${S6_ARCH}.tar.gz -C /usr ./bin \
|
||||
&& tar xzf /tmp/s6-overlay-${S6_ARCH}.tar.gz -C / \
|
||||
&& rm -rf /tmp/s6-overlay-${S6_ARCH}.tar.gz \
|
||||
&& curl -L https://dl.influxdata.com/influxdb/releases/influxdb2-2.2.0-${TARGETARCH}.deb --output /tmp/influxdb2-2.2.0-${TARGETARCH}.deb \
|
||||
&& dpkg -i --force-all /tmp/influxdb2-2.2.0-${TARGETARCH}.deb
|
||||
@@ -35,11 +36,9 @@ COPY /rootfs /
|
||||
|
||||
COPY /rootfs/etc/cron.d/scrutiny /etc/cron.d/scrutiny
|
||||
COPY --from=backendbuild /go/src/github.com/analogj/scrutiny/scrutiny /opt/scrutiny/bin/
|
||||
COPY --from=backendbuild /go/src/github.com/analogj/scrutiny/scrutiny-collector-selftest /opt/scrutiny/bin/
|
||||
COPY --from=backendbuild /go/src/github.com/analogj/scrutiny/scrutiny-collector-metrics /opt/scrutiny/bin/
|
||||
COPY dist /opt/scrutiny/web
|
||||
RUN chmod +x /opt/scrutiny/bin/scrutiny && \
|
||||
chmod +x /opt/scrutiny/bin/scrutiny-collector-selftest && \
|
||||
chmod +x /opt/scrutiny/bin/scrutiny-collector-metrics && \
|
||||
chmod 0644 /etc/cron.d/scrutiny && \
|
||||
rm -f /etc/cron.daily/* && \
|
||||
|
||||
@@ -1,16 +1,19 @@
|
||||
########################################################################################################################
|
||||
# Collector Image
|
||||
########################################################################################################################
|
||||
|
||||
|
||||
########
|
||||
FROM golang:1.17.10-buster as backendbuild
|
||||
FROM golang:1.18-bullseye as backendbuild
|
||||
|
||||
WORKDIR /go/src/github.com/analogj/scrutiny
|
||||
|
||||
COPY . /go/src/github.com/analogj/scrutiny
|
||||
|
||||
RUN go mod vendor && \
|
||||
go build -ldflags '-w -extldflags "-static"' -o scrutiny-collector-selftest collector/cmd/collector-selftest/collector-selftest.go && \
|
||||
go build -ldflags '-w -extldflags "-static"' -o scrutiny-collector-metrics collector/cmd/collector-metrics/collector-metrics.go
|
||||
RUN make binary-clean binary-collector
|
||||
|
||||
########
|
||||
FROM ubuntu:latest as runtime
|
||||
FROM debian:bullseye-slim as runtime
|
||||
WORKDIR /scrutiny
|
||||
ENV PATH="/opt/scrutiny/bin:${PATH}"
|
||||
|
||||
@@ -18,10 +21,8 @@ RUN apt-get update && apt-get install -y cron smartmontools ca-certificates tzda
|
||||
|
||||
COPY /docker/entrypoint-collector.sh /entrypoint-collector.sh
|
||||
COPY /rootfs/etc/cron.d/scrutiny /etc/cron.d/scrutiny
|
||||
COPY --from=backendbuild /go/src/github.com/analogj/scrutiny/scrutiny-collector-selftest /opt/scrutiny/bin/
|
||||
COPY --from=backendbuild /go/src/github.com/analogj/scrutiny/scrutiny-collector-metrics /opt/scrutiny/bin/
|
||||
RUN chmod +x /opt/scrutiny/bin/scrutiny-collector-selftest && \
|
||||
chmod +x /opt/scrutiny/bin/scrutiny-collector-metrics && \
|
||||
RUN chmod +x /opt/scrutiny/bin/scrutiny-collector-metrics && \
|
||||
chmod +x /entrypoint-collector.sh && \
|
||||
chmod 0644 /etc/cron.d/scrutiny && \
|
||||
rm -f /etc/cron.daily/apt /etc/cron.daily/dpkg /etc/cron.daily/passwd
|
||||
|
||||
+10
-4
@@ -1,15 +1,21 @@
|
||||
########################################################################################################################
|
||||
# Web Image
|
||||
# NOTE: this image requires the `make binary-frontend` target to have been run before `docker build` The `dist` directory must exist.
|
||||
########################################################################################################################
|
||||
|
||||
|
||||
########
|
||||
FROM golang:1.17.10-buster as backendbuild
|
||||
FROM golang:1.18-bullseye as backendbuild
|
||||
|
||||
WORKDIR /go/src/github.com/analogj/scrutiny
|
||||
|
||||
COPY . /go/src/github.com/analogj/scrutiny
|
||||
|
||||
RUN go mod vendor && \
|
||||
go build -ldflags '-w -extldflags "-static"' -o scrutiny webapp/backend/cmd/scrutiny/scrutiny.go
|
||||
RUN make binary-clean binary-all WEB_BINARY_NAME=scrutiny
|
||||
|
||||
|
||||
########
|
||||
FROM ubuntu:latest as runtime
|
||||
FROM debian:bullseye-slim as runtime
|
||||
EXPOSE 8080
|
||||
WORKDIR /opt/scrutiny
|
||||
ENV PATH="/opt/scrutiny/bin:${PATH}"
|
||||
|
||||
@@ -1,7 +0,0 @@
|
||||
FROM techknowlogick/xgo:go-1.17.x
|
||||
|
||||
WORKDIR /go/src/github.com/analogj/scrutiny
|
||||
|
||||
COPY . /go/src/github.com/analogj/scrutiny
|
||||
|
||||
RUN make all
|
||||
Vendored
-18
@@ -1,18 +0,0 @@
|
||||
# This vagrant file is only used for local development & testing.
|
||||
|
||||
Vagrant.configure("2") do |config|
|
||||
config.vm.guest = :freebsd
|
||||
config.vm.synced_folder ".", "/vagrant", id: "vagrant-root", disabled: true
|
||||
config.vm.box = "freebsd/FreeBSD-11.0-CURRENT"
|
||||
config.ssh.shell = "sh"
|
||||
config.vm.base_mac = "080027D14C66"
|
||||
|
||||
config.vm.provider :virtualbox do |vb|
|
||||
vb.customize ["modifyvm", :id, "--memory", "1024"]
|
||||
vb.customize ["modifyvm", :id, "--cpus", "1"]
|
||||
vb.customize ["modifyvm", :id, "--hwvirtex", "on"]
|
||||
vb.customize ["modifyvm", :id, "--audio", "none"]
|
||||
vb.customize ["modifyvm", :id, "--nictype1", "virtio"]
|
||||
vb.customize ["modifyvm", :id, "--nictype2", "virtio"]
|
||||
end
|
||||
end
|
||||
@@ -0,0 +1,134 @@
|
||||
# Install collector on Synology
|
||||
|
||||
## Install Entware
|
||||
|
||||
This will allow you to install a newer version of smartmontools on your Synology. Follow the instructions here (This is tested on DSM7) - https://github.com/Entware/Entware/wiki/Install-on-Synology-NAS
|
||||
|
||||
**PLEASE NOTE THAT IF YOU UPDATE DSM FIRMWARE YOU MAY BORK THE EXISTING ENTWARE INSTALLATION, FOR ANYTHING THAT MAY RELATE TO ENTWARE PLEASE VISIT THEIR REPO**
|
||||
|
||||
## Collector Setup
|
||||
|
||||
**1. Run an update**
|
||||
|
||||
`sudo opkg update`
|
||||
|
||||
**2. Run an upgrade**
|
||||
|
||||
`sudo opkg upgrade`
|
||||
|
||||
**3. Install smartmontools**
|
||||
|
||||
`sudo opkg install smartmontools`
|
||||
|
||||
*It should install v7.2-2*
|
||||
|
||||
`Installing smartmontools (7.2-2) to root...`
|
||||
|
||||
**4. We will now create the directories.**
|
||||
|
||||
```
|
||||
mkdir -p /volume1/\@Entware/scrutiny/bin
|
||||
mkdir -p /volume1/\@Entware/scrutiny/conf
|
||||
```
|
||||
|
||||
**5. change into the bin directory**
|
||||
|
||||
`cd /volume1/\@Entware/scrutiny/bin`
|
||||
|
||||
**6. Download the collector binary for your architecture and make it executable**
|
||||
|
||||
`wget https://github.com/AnalogJ/scrutiny/releases/download/v0.4.12/scrutiny-collector-metrics-linux-arm64`
|
||||
|
||||
`chmod +x /volume1/\@Entware/scrutiny/bin/scrutiny-collector-metrics-linux-arm64`
|
||||
|
||||
**7. Create a config file for the collector**
|
||||
|
||||
```
|
||||
cd /volume1/\@Entware/scrutiny/conf
|
||||
wget https://raw.githubusercontent.com/AnalogJ/scrutiny/master/example.collector.yaml
|
||||
mv example.collector.yaml collector.yaml
|
||||
```
|
||||
|
||||
**8. Lets make some changes in the [collector config file](../example.collector.yaml), these are what i uncommented/added, please tweak the device paths to your needs**
|
||||
|
||||
```
|
||||
host:
|
||||
id: 'Server_Name'
|
||||
|
||||
|
||||
devices:
|
||||
# # example for forcing device type detection for a single disk
|
||||
- device: /dev/sda
|
||||
type: 'sat'
|
||||
- device: /dev/sdb
|
||||
type: 'sat'
|
||||
- device: /dev/sdc
|
||||
type: 'sat'
|
||||
- device: /dev/sdd
|
||||
type: 'sat'
|
||||
|
||||
api:
|
||||
endpoint: 'http://<url>:8080'
|
||||
```
|
||||
|
||||
**9. Let's update the smartd db**
|
||||
|
||||
```
|
||||
cd /volume1/\@Entware/scrutiny/bin/
|
||||
wget https://raw.githubusercontent.com/smartmontools/smartmontools/master/smartmontools/drivedb.h
|
||||
```
|
||||
|
||||
**10. I ran it like this but you can tweak to your liking, the most important part is the --drivedb, as this loads it into the aplication for future use**
|
||||
|
||||
`smartctl -d sat --all /dev/sda --drivedb=/volume1/\@Entware/scrutiny/bin/drivedb.h`
|
||||
|
||||
**11. Now lets create a small bash script, this will be used for the scheduled task inside Synology**
|
||||
|
||||
`vim /volume1/\@Entware/scrutiny/bin/run_collect.sh`
|
||||
|
||||
**The contents are below, copy and paste them in**
|
||||
|
||||
```
|
||||
#!/bin/bash
|
||||
|
||||
/volume1/\@Entware/scrutiny/bin/scrutiny-collector-metrics-linux-arm64 run --config /volume1/\@Entware/scrutiny/config/collector.yaml
|
||||
```
|
||||
|
||||
## Set up Synology to run a scheduled task.
|
||||
|
||||
Log in to DSM and do the following:
|
||||
|
||||
Goto: DSM > Control Panel > Task Scheduler
|
||||
|
||||
Create > Scheduled Task > User Defined Script
|
||||
|
||||
###### General
|
||||
|
||||
```
|
||||
Task: Scrutiny_Collector
|
||||
User: root
|
||||
Enabled: yes
|
||||
```
|
||||
|
||||
###### Schedule
|
||||
```
|
||||
Run on the following days: Daily
|
||||
```
|
||||
###### Time:
|
||||
|
||||
```
|
||||
Frequency: <Your desired frequency>
|
||||
```
|
||||
|
||||
###### Task Settings
|
||||
|
||||
**Run Command**
|
||||
|
||||
```
|
||||
. /opt/etc/profile; /volume1/\@Entware/scrutiny/bin/run_collect.sh
|
||||
```
|
||||
|
||||
|
||||
## Troubleshooting
|
||||
|
||||
If you have any issues with your devices being detected, or incorrect data, please take a look at [TROUBLESHOOTING_DEVICE_COLLECTOR.md](./TROUBLESHOOTING_DEVICE_COLLECTOR.md)
|
||||
@@ -1,16 +1,18 @@
|
||||
# Officially Supported NAS OS's
|
||||
# Officially Supported NAS/OS's
|
||||
|
||||
These are the officially supported NAS OS's (with documentation and setup guides).
|
||||
Once a guide is created (in `docs/guides/`) it will be linked here.
|
||||
These are the officially supported NAS OS's (with documentation and setup guides). Once a guide is created (
|
||||
in `docs/guides/` or elsewhere) it will be linked here.
|
||||
|
||||
- [ ] freenas/truenas
|
||||
- [x] [freenas/truenas](https://blog.stefandroid.com/2022/01/14/smart-scrutiny.html)
|
||||
- [x] [unraid](./INSTALL_UNRAID.md)
|
||||
- [ ] ESXI
|
||||
- [ ] Proxmox
|
||||
- [ ] Synology
|
||||
- [x] [Synology](./INSTALL_SYNOLOGY_COLLECTOR.md)
|
||||
- [ ] OMV
|
||||
- [ ] Amahi
|
||||
- [ ] Running in a LXC container
|
||||
- [x] [PFSense](./INSTALL_UNRAID.md)
|
||||
- [ ] QNAP
|
||||
|
||||
- [x] QNAP
|
||||
- [x] [RockStor](https://rockstor.com/docs/interface/docker-based-rock-ons/scrutiny.html)
|
||||
- [ ] Solaris/OmniOS CE Support
|
||||
- [ ] Kubernetes
|
||||
|
||||
@@ -0,0 +1,20 @@
|
||||
# Testers
|
||||
|
||||
Scrutiny supports many operating systems, CPU architectures and runtime environments. Unfortunately that makes it incredibly
|
||||
difficult to test.
|
||||
Thankfully the following users have been gracious enough to test/validate Scrutiny works on their system.
|
||||
|
||||
> NOTE: If you're interested in volunteering to test Scrutiny beta builds on your system, please [open an issue](https://github.com/AnalogJ/scrutiny/issues).
|
||||
|
||||
| Architecture Name | Binaries | Docker |
|
||||
| --- | --- | --- |
|
||||
| linux-amd64 | -- | @feroxy @rshxyz |
|
||||
| linux-arm-5 | -- | |
|
||||
| linux-arm-6 | -- | |
|
||||
| linux-arm-7 | @Zorlin | @martini1992 |
|
||||
| linux-arm64 | @SiM22 @Zorlin | @ViRb3 @agneevX @benamajin |
|
||||
| freebsd-amd64 | @BadCo-NZ @varunsridharan @martadinata666 @KenwoodFox @FingerlessGlov3s | |
|
||||
| macos-amd64 | -- | -- |
|
||||
| macos-arm64 | -- | -- |
|
||||
| windows-amd64 | @gabrielv33 | -- |
|
||||
| windows-arm64 | -- | -- |
|
||||
@@ -104,7 +104,7 @@ devices:
|
||||
As mentioned in the [README.md](/README.md), NVMe devices require both `--cap-add SYS_RAWIO` and `--cap-add SYS_ADMIN`
|
||||
to allow smartctl permission to query your NVMe device SMART data [#26](https://github.com/AnalogJ/scrutiny/issues/26)
|
||||
|
||||
When attaching NVMe devices using `--device=/dev/nvme..`, make sure to provide the device controller (`/dev/nvme0`)
|
||||
When attaching NVMe devices using `--device=/dev/nvme..`, make sure to provide the device controller (`/dev/nvme0`)
|
||||
instead of the block device (`/dev/nvme0n1`). See [#209](https://github.com/AnalogJ/scrutiny/issues/209).
|
||||
|
||||
> The character device /dev/nvme0 is the NVME device controller, and block devices like /dev/nvme0n1 are the NVME storage namespaces: the devices you use for actual storage, which will behave essentially as disks.
|
||||
@@ -113,15 +113,29 @@ instead of the block device (`/dev/nvme0n1`). See [#209](https://github.com/Anal
|
||||
|
||||
### ATA
|
||||
|
||||
### USB Devices
|
||||
|
||||
The following information is extracted from [#266](https://github.com/AnalogJ/scrutiny/issues/266)
|
||||
|
||||
External HDDs support two modes of operation usb-storage (old, slower, stable) and uas (new, faster, sometimes unstable)
|
||||
. On some external HDDs, uas mode does not properly pass through SMART information, or even causes hardware issues, so
|
||||
it has been disabled by the kernel. No amount of smartctl parameters will fix this, as it is being rejected by the
|
||||
kernel. This is especially true with Seagate HDDs. One solution is to force these devices into usb-storage mode, which
|
||||
will incur some performance penalty, but may work well enough for you. More info:
|
||||
|
||||
- https://smartmontools.org/wiki/Supported_USB-Devices
|
||||
- https://smartmontools.org/wiki/SAT-with-UAS-Linux
|
||||
- https://forums.raspberrypi.com/viewtopic.php?t=245931
|
||||
|
||||
### Exit Codes
|
||||
|
||||
If you see an error message similar to `smartctl returned an error code (2) while processing /dev/sda`, this means that
|
||||
`smartctl` (not Scrutiny) exited with an error code. Scrutiny will attempt to print a helpful error message to help you debug,
|
||||
but you can look at the table (and associated links) below to debug `smartctl`.
|
||||
`smartctl` (not Scrutiny) exited with an error code. Scrutiny will attempt to print a helpful error message to help you
|
||||
debug, but you can look at the table (and associated links) below to debug `smartctl`.
|
||||
|
||||
> smartctl Return Values
|
||||
> The return values of smartctl are defined by a bitmask. If all is well with the disk, the return value (exit status) of
|
||||
> smartctl is 0 (all bits turned off). If a problem occurs, or an error, potential error, or fault is detected, then
|
||||
> The return values of smartctl are defined by a bitmask. If all is well with the disk, the return value (exit status) of
|
||||
> smartctl is 0 (all bits turned off). If a problem occurs, or an error, potential error, or fault is detected, then
|
||||
> a non-zero status is returned. In this case, the eight different bits in the return value have the following meanings
|
||||
> for ATA disks; some of these values may also be returned for SCSI disks.
|
||||
>
|
||||
@@ -180,6 +194,72 @@ If Scrutiny detects that an attribute corresponds with a high rate of failure us
|
||||
This can cause some confusion when comparing Scrutiny's dashboard against other SMART analysis tools.
|
||||
If you hover over the "failed" label beside an attribute, Scrutiny will tell you if the failure was due to SMART or Scrutiny/BackBlaze data.
|
||||
|
||||
### Device failed but Smart & Scrutiny passed
|
||||
|
||||
Device SMART results are the source of truth for Scrutiny, however we don't just take into account the current SMART results, but also historical analysis of a disk.
|
||||
This means that if a device is marked as failed at any point in its history, it will continue to be stored in the database as failed until the device is removed (or status is reset -- see below).
|
||||
|
||||
In some cases, this historical failure may have been due to attribute analysis/thresholds that have since been relaxed:
|
||||
|
||||
- NVME - Numb Error Log Entries (v0.4.7)
|
||||
- ATA - Power Cycle Count (v0.4.7)
|
||||
- ATA - Read Error Rate (v0.4.13)
|
||||
- ATA - Seek Error Rate (v0.4.13)
|
||||
|
||||
If you'd like to reset the status of a disk (to healthy) and allow the next run of the collector to determine the actual status, you can run the following command:
|
||||
|
||||
```bash
|
||||
# connect to scrutiny docker container
|
||||
docker exec -it scrutiny bash
|
||||
|
||||
# install sqlite CLI tools (inside container)
|
||||
apt update && apt install -y sqlite3
|
||||
|
||||
# connect to the scrutiny database
|
||||
sqlite3 /opt/scrutiny/config/scrutiny.db
|
||||
|
||||
# reset/update the devices table, unset the failure status.
|
||||
UPDATE devices SET device_status = null;
|
||||
|
||||
# exit sqlite CLI
|
||||
.exit
|
||||
```
|
||||
|
||||
### Seagate Drives Failing
|
||||
|
||||
As thoroughly discussed in [#255](https://github.com/AnalogJ/scrutiny/issues/255), Seagate (Ironwolf & others) drives are almost always marked as failed by Scrutiny.
|
||||
|
||||
> The `Seek Error Rate` & `Read Error Rate` attribute raw values are typically very high, and the
|
||||
> normalised values (Current / Worst / Threshold) are usually quite low. Despite this, the numbers in most cases are perfectly OK
|
||||
>
|
||||
> The anxiety arises because we intuitively expect that the normalised values should reflect a "health" score, with
|
||||
> 100 being the ideal value. Similarly, we would expect that the raw values should reflect an error count, in
|
||||
> which case a value of 0 would be most desirable. However, Seagate calculates and applies these attribute values
|
||||
> in a counterintuitive way.
|
||||
>
|
||||
> http://www.users.on.net/~fzabkar/HDD/Seagate_SER_RRER_HEC.html
|
||||
|
||||
Some analysis has been done which shows that Seagate drives break the common SMART conventions, which also causes Scrutiny's
|
||||
comparison against BackBlaze data to detect these drives as failed.
|
||||
|
||||
**So what's the Solution?**
|
||||
|
||||
After taking a look at the BackBlaze data for the relevant Attributes (`Seek Error Rate` & `Read Error Rate`), I've decided
|
||||
to disable Scrutiny analysis for them. Both are non-critical, and have low-correlation with failure.
|
||||
|
||||
> Please note: SMART failures for these attributes will still cause the drive to be marked as failed. Only BackBlaze analysis has been disabled
|
||||
|
||||
If this is effecting your drives, you'll need to do the following:
|
||||
|
||||
1. Upgrade to v0.4.13+
|
||||
2. Reset your drive status using the SQLite script in [#device-failed-but-smart--scrutiny-passed](https://github.com/AnalogJ/scrutiny/blob/master/docs/TROUBLESHOOTING_DEVICE_COLLECTOR.md#device-failed-but-smart--scrutiny-passed)
|
||||
3. Wait for (or manually start) the collector.
|
||||
|
||||
If you'd like to learn more about how the Seagate Ironwolf SMART attributes work under the hood, and how they differ from
|
||||
other drives, please read the following:
|
||||
|
||||
- http://www.users.on.net/~fzabkar/HDD/Seagate_SER_RRER_HEC.html
|
||||
- https://www.truenas.com/community/threads/seagate-ironwolf-smart-test-raw_read_error_rate-seek_error_rate.68634/
|
||||
|
||||
## Hub & Spoke model, with multiple Hosts.
|
||||
|
||||
@@ -201,4 +281,4 @@ Or if you're not using docker, you can pass CLI arguments to the collector durin
|
||||
|
||||
```bash
|
||||
scrutiny-collector-metrics run --debug --log-file /tmp/collector.log
|
||||
```
|
||||
```
|
||||
|
||||
@@ -0,0 +1,19 @@
|
||||
# Docker Images `master-omnibus` vs `latest`
|
||||
|
||||
> TL;DR; The `master-omnibus` and `latest` tags are almost semantically identical, as I follow a `golden master`
|
||||
development process. However if you want to ensure you're only using the latest release, you can change to `latest`
|
||||
|
||||
The CI script used to orchestrate the docker image builds can be found here: https://github.com/AnalogJ/scrutiny/blob/master/.github/workflows/docker-build.yaml#L166-L184
|
||||
|
||||
In general Scrutiny follows a `golden master` development process, which means that the `master` branch is not directly updated (unless its for documentation changes),
|
||||
instead development is done in a feature branch, or committed to the `beta` branch.
|
||||
|
||||
As development progresses, and we're satisfied that a feature is complete, and the quality is acceptable,
|
||||
I merge the changes to `master` and trigger the creation of a new release -- ie, when master is updated, a new release
|
||||
is almost immediately created (and tagged with `latest`)
|
||||
|
||||
So changing from `master-omnibus -> latest` will be the same thing for all intents and purposes.
|
||||
|
||||
> NOTE: Previously, there was a `automated cron build` that ran on the `master` and `beta` branches.
|
||||
They used to trigger a `nightly` build, even if nothing has changed on the branch. This has a couple of benefits, but one is to
|
||||
ensure that there's no broken external dependencies in our (unchanged) code. This `nightly` build no longer updates the `master-omnibus` tag.
|
||||
@@ -54,15 +54,331 @@ time="2022-05-13T14:38:05Z" level=info msg="Successfully connected to scrutiny s
|
||||
panic: a username and password is required for a setup
|
||||
```
|
||||
|
||||
or
|
||||
|
||||
```
|
||||
Start the scrutiny server
|
||||
time="2022-06-11T10:35:04-04:00" level=info msg="Trying to connect to scrutiny sqlite db: \n"
|
||||
time="2022-06-11T10:35:04-04:00" level=info msg="Successfully connected to scrutiny sqlite db: \n"
|
||||
panic: failed to check influxdb setup status - parse "://:": missing protocol scheme
|
||||
```
|
||||
|
||||
As discussed in [#248](https://github.com/AnalogJ/scrutiny/issues/248) and [#234](https://github.com/AnalogJ/scrutiny/issues/234),
|
||||
this usually related to either:
|
||||
|
||||
- Upgrading from the LSIO Scrutiny image to the Official Scrutiny image, without removing LSIO specific environmental variables
|
||||
- remove the `SCRUTINY_WEB=true` and `SCRUTINY_COLLECTOR=true` environmental variables. They were used by the LSIO image, but are unnecessary and cause issues with the official Scrutiny image.
|
||||
- Updated versions of the [LSIO Scrutiny images are broken](https://github.com/linuxserver/docker-scrutiny/issues/22), as they have not installed InfluxDB which is a required dependency of Scrutiny v0.4.x
|
||||
- You can revert to an earlier version of the LSIO image (`lscr.io/linuxserver/scrutiny:060ac7b8-ls34`), or just change to the official Scrutiny image (`ghcr.io/analogj/scrutiny:master-omnibus`)
|
||||
- Upgrading from the LSIO Scrutiny image to the Official Scrutiny image, without removing LSIO specific environmental
|
||||
variables
|
||||
- remove the `SCRUTINY_WEB=true` and `SCRUTINY_COLLECTOR=true` environmental variables. They were used by the LSIO
|
||||
image, but are unnecessary and cause issues with the official Scrutiny image.
|
||||
- Updated versions of the [LSIO Scrutiny images are broken](https://github.com/linuxserver/docker-scrutiny/issues/22),
|
||||
as they have not installed InfluxDB which is a required dependency of Scrutiny v0.4.x
|
||||
- You can revert to an earlier version of the LSIO image (`lscr.io/linuxserver/scrutiny:060ac7b8-ls34`), or just
|
||||
change to the official Scrutiny image (`ghcr.io/analogj/scrutiny:master-omnibus`)
|
||||
|
||||
Here's a couple of confirmed working docker-compose files that you may want to look at:
|
||||
|
||||
- https://github.com/AnalogJ/scrutiny/blob/master/docker/example.hubspoke.docker-compose.yml
|
||||
- https://github.com/AnalogJ/scrutiny/blob/master/docker/example.omnibus.docker-compose.yml
|
||||
|
||||
## Bring your own InfluxDB
|
||||
|
||||
> WARNING: Most users should not follow these steps. This is ONLY for users who have an EXISTING InfluxDB installation which contains data from multiple services.
|
||||
> The Scrutiny Docker omnibus image includes an empty InfluxDB instance which it can configure.
|
||||
> If you're deploying manually or via Hub/Spoke, you can just follow the installation instructions, Scrutiny knows how
|
||||
> to run the first-time setup automatically.
|
||||
|
||||
The goal here is to create an InfluxDB API key with minimal permissions for use by Scrutiny.
|
||||
|
||||
- Create Scrutiny buckets (`metrics`, `metrics_weekly`, `metrics_monthly`, `metrics_yearly`) with placeholder config
|
||||
- Create Downsampling tasks (`tsk-weekly-aggr`, `tsk-monthly-aggr`, `tsk-yearly-aggr`) with placeholder script.
|
||||
- Create API token with restricted scope
|
||||
- NOTE: Placeholder bucket & task configuration will be replaced automatically by Scrutiny during startup
|
||||
|
||||
The placeholder buckets and tasks need to be created before the API token can be created, as the resource ID's need to
|
||||
exist for the scope restriction to work.
|
||||
|
||||
Scopes:
|
||||
|
||||
- `orgs`: read - required for scrutiny to find it's configured org_id
|
||||
- `tasks`: scrutiny specific read/write access - Scrutiny only needs access to the downsampling tasks you created above
|
||||
- `buckets`: scrutiny specific read/write access - Scrutiny only needs access to the buckets you created above
|
||||
|
||||
### Setup Environmental Variables
|
||||
|
||||
```bash
|
||||
# replace the following values with correct values for your InfluxDB installation
|
||||
export INFLUXDB_ADMIN_TOKEN=pCqRq7xxxxxx-FZgNLfstIs0w==
|
||||
export INFLUXDB_ORG_ID=b2495xxxxx
|
||||
export INFLUXDB_HOSTNAME=http://localhost:8086
|
||||
|
||||
# if you want to change the bucket name prefix below, you'll also need to update the setting in the scrutiny.yaml config file.
|
||||
export INFLUXDB_SCRUTINY_BUCKET_BASENAME=metrics
|
||||
```
|
||||
|
||||
### Create placeholder buckets
|
||||
|
||||
<details>
|
||||
<summary>Click to expand!</summary>
|
||||
|
||||
```bash
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/buckets \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"name": "${INFLUXDB_SCRUTINY_BUCKET_BASENAME}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"retentionRules": []
|
||||
}
|
||||
EOF
|
||||
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/buckets \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"name": "${INFLUXDB_SCRUTINY_BUCKET_BASENAME}_weekly",
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"retentionRules": []
|
||||
}
|
||||
EOF
|
||||
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/buckets \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"name": "${INFLUXDB_SCRUTINY_BUCKET_BASENAME}_monthly",
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"retentionRules": []
|
||||
}
|
||||
EOF
|
||||
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/buckets \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"name": "${INFLUXDB_SCRUTINY_BUCKET_BASENAME}_yearly",
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"retentionRules": []
|
||||
}
|
||||
EOF
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Create placeholder tasks
|
||||
|
||||
<details>
|
||||
<summary>Click to expand!</summary>
|
||||
|
||||
```bash
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/tasks \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"flux": "option task = {name: \"tsk-weekly-aggr\", every: 1y} \nyield now()"
|
||||
}
|
||||
EOF
|
||||
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/tasks \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"flux": "option task = {name: \"tsk-monthly-aggr\", every: 1y} \nyield now()"
|
||||
}
|
||||
EOF
|
||||
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/tasks \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"flux": "option task = {name: \"tsk-yearly-aggr\", every: 1y} \nyield now()"
|
||||
}
|
||||
EOF
|
||||
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Create InfluxDB API Token
|
||||
|
||||
<details>
|
||||
<summary>Click to expand!</summary>
|
||||
|
||||
```bash
|
||||
# replace these values with placeholder bucket and task ids from your InfluxDB installation.
|
||||
export INFLUXDB_SCRUTINY_BASE_BUCKET_ID=1e0709xxxx
|
||||
export INFLUXDB_SCRUTINY_WEEKLY_BUCKET_ID=1af03dexxxxx
|
||||
export INFLUXDB_SCRUTINY_MONTHLY_BUCKET_ID=b3c59c7xxxxx
|
||||
export INFLUXDB_SCRUTINY_YEARLY_BUCKET_ID=f381d8cxxxxx
|
||||
|
||||
export INFLUXDB_SCRUTINY_WEEKLY_TASK_ID=09a64ecxxxxx
|
||||
export INFLUXDB_SCRUTINY_MONTHLY_TASK_ID=09a64xxxxx
|
||||
export INFLUXDB_SCRUTINY_YEARLY_TASK_ID=09a64ecxxxxx
|
||||
|
||||
|
||||
curl -sS -X POST ${INFLUXDB_HOSTNAME}/api/v2/authorizations \
|
||||
-H "Content-Type: application/json" \
|
||||
-H "Authorization: Token ${INFLUXDB_ADMIN_TOKEN}" \
|
||||
--data-binary @- << EOF
|
||||
{
|
||||
"description": "scrutiny - restricted scope token",
|
||||
"orgID": "${INFLUXDB_ORG_ID}",
|
||||
"permissions": [
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "orgs"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "tasks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "tasks",
|
||||
"id": "${INFLUXDB_SCRUTINY_WEEKLY_TASK_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "tasks",
|
||||
"id": "${INFLUXDB_SCRUTINY_MONTHLY_TASK_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "tasks",
|
||||
"id": "${INFLUXDB_SCRUTINY_YEARLY_TASK_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_BASE_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_BASE_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_WEEKLY_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_WEEKLY_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_MONTHLY_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_MONTHLY_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_YEARLY_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "buckets",
|
||||
"id": "${INFLUXDB_SCRUTINY_YEARLY_BUCKET_ID}",
|
||||
"orgID": "${INFLUXDB_ORG_ID}"
|
||||
}
|
||||
}
|
||||
]
|
||||
}
|
||||
EOF
|
||||
```
|
||||
|
||||
</details>
|
||||
|
||||
### Save InfluxDB API Token
|
||||
|
||||
After running the Curl command above, you'll see a JSON response that looks like the following:
|
||||
|
||||
```json
|
||||
{
|
||||
"token": "ksVU2t5SkQwYkvIxxxxxxxYt2xUt0uRKSbSF1Po0UQ==",
|
||||
"status": "active",
|
||||
"description": "scrutiny - restricted scope token",
|
||||
"orgID": "b2495586xxxx",
|
||||
"org": "my-org",
|
||||
"user": "admin",
|
||||
"permissions": [
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "orgs"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "read",
|
||||
"resource": {
|
||||
"type": "tasks"
|
||||
}
|
||||
},
|
||||
{
|
||||
"action": "write",
|
||||
"resource": {
|
||||
"type": "tasks",
|
||||
"id": "09a64exxxxx",
|
||||
"orgID": "b24955860xxxxx",
|
||||
"org": "my-org"
|
||||
}
|
||||
},
|
||||
...
|
||||
]
|
||||
}
|
||||
```
|
||||
|
||||
You must copy the token field from the JSON response, and save it in your `scrutiny.yaml` config file. After that's
|
||||
done, you can start the Scrutiny server
|
||||
|
||||
|
||||
@@ -73,6 +73,7 @@ devices:
|
||||
|
||||
# example to show how to override the smartctl command args globally
|
||||
#commands:
|
||||
# metrics_smartctl_bin: 'smartctl' # change to provide custom `smartctl` binary path, eg. `/usr/sbin/smartctl`
|
||||
# metrics_scan_args: '--scan --json' # used to detect devices
|
||||
# metrics_info_args: '--info --json' # used to determine device unique ID & register device with Scrutiny
|
||||
# metrics_smart_args: '--xall --json' # used to retrieve smart data for each device.
|
||||
@@ -86,9 +87,6 @@ devices:
|
||||
########################################################################################################################
|
||||
|
||||
#collect:
|
||||
# metric:
|
||||
# enable: true
|
||||
# command: '-a -o on -S on'
|
||||
# long:
|
||||
# enable: false
|
||||
# command: ''
|
||||
|
||||
@@ -73,6 +73,8 @@ log:
|
||||
# - "join://shoutrrr:api-key@join/?devices=device1[,device2, ...][&icon=icon][&title=title]"
|
||||
# - "script:///file/path/on/disk"
|
||||
# - "https://www.example.com/path"
|
||||
# filter_attributes: 'all' # options: 'all' or 'critical'
|
||||
# level: 'fail' # options: 'fail', 'fail_scrutiny', 'fail_smart'
|
||||
|
||||
########################################################################################################################
|
||||
# FEATURES COMING SOON
|
||||
|
||||
@@ -1,64 +1,64 @@
|
||||
module github.com/analogj/scrutiny
|
||||
|
||||
go 1.17
|
||||
go 1.18
|
||||
|
||||
require (
|
||||
github.com/analogj/go-util v0.0.0-20190301173314-5295e364eb14
|
||||
github.com/containrrr/shoutrrr v0.4.4
|
||||
github.com/fatih/color v1.10.0
|
||||
github.com/gin-gonic/gin v1.6.3
|
||||
github.com/glebarez/sqlite v1.4.5
|
||||
github.com/go-gormigrate/gormigrate/v2 v2.0.0
|
||||
github.com/golang/mock v1.4.3
|
||||
github.com/influxdata/influxdb-client-go/v2 v2.9.0
|
||||
github.com/jaypipes/ghw v0.6.1
|
||||
github.com/jinzhu/gorm v1.9.16
|
||||
github.com/mitchellh/mapstructure v1.2.2
|
||||
github.com/samber/lo v1.25.0
|
||||
github.com/sirupsen/logrus v1.4.2
|
||||
github.com/spf13/viper v1.7.0
|
||||
github.com/stretchr/testify v1.5.1
|
||||
github.com/stretchr/testify v1.7.1
|
||||
github.com/urfave/cli/v2 v2.2.0
|
||||
golang.org/x/sync v0.0.0-20201020160332-67f06af15bc9
|
||||
gorm.io/driver/sqlite v1.1.3
|
||||
gorm.io/gorm v1.20.2
|
||||
gorm.io/gorm v1.23.5
|
||||
)
|
||||
|
||||
require (
|
||||
github.com/StackExchange/wmi v0.0.0-20190523213315-cbe66965904d // indirect
|
||||
github.com/citilinkru/libudev v1.0.0 // indirect
|
||||
github.com/cpuguy83/go-md2man/v2 v2.0.0 // indirect
|
||||
github.com/davecgh/go-spew v1.1.1 // indirect
|
||||
github.com/deepmap/oapi-codegen v1.8.2 // indirect
|
||||
github.com/fsnotify/fsnotify v1.4.9 // indirect
|
||||
github.com/ghodss/yaml v1.0.0 // indirect
|
||||
github.com/gin-contrib/sse v0.1.0 // indirect
|
||||
github.com/glebarez/go-sqlite v1.17.2 // indirect
|
||||
github.com/go-ole/go-ole v1.2.4 // indirect
|
||||
github.com/go-playground/locales v0.13.0 // indirect
|
||||
github.com/go-playground/universal-translator v0.17.0 // indirect
|
||||
github.com/go-playground/validator/v10 v10.2.0 // indirect
|
||||
github.com/golang/protobuf v1.4.2 // indirect
|
||||
github.com/google/uuid v1.2.0 // indirect
|
||||
github.com/google/uuid v1.3.0 // indirect
|
||||
github.com/hashicorp/hcl v1.0.0 // indirect
|
||||
github.com/influxdata/line-protocol v0.0.0-20200327222509-2487e7298839 // indirect
|
||||
github.com/jaypipes/pcidb v0.5.0 // indirect
|
||||
github.com/jinzhu/inflection v1.0.0 // indirect
|
||||
github.com/jinzhu/now v1.1.1 // indirect
|
||||
github.com/jinzhu/now v1.1.4 // indirect
|
||||
github.com/json-iterator/go v1.1.9 // indirect
|
||||
github.com/klauspost/compress v1.12.1 // indirect
|
||||
github.com/klauspost/compress v1.11.7 // indirect
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2 // indirect
|
||||
github.com/kvz/logstreamer v0.0.0-20150507115422-a635b98146f0 // indirect
|
||||
github.com/kvz/logstreamer v0.0.0-20201023134116-02d20f4338f5 // indirect
|
||||
github.com/leodido/go-urn v1.2.0 // indirect
|
||||
github.com/magiconair/properties v1.8.1 // indirect
|
||||
github.com/mattn/go-colorable v0.1.8 // indirect
|
||||
github.com/mattn/go-isatty v0.0.12 // indirect
|
||||
github.com/mattn/go-sqlite3 v1.14.4 // indirect
|
||||
github.com/mattn/go-isatty v0.0.14 // indirect
|
||||
github.com/mitchellh/go-homedir v1.1.0 // indirect
|
||||
github.com/modern-go/concurrent v0.0.0-20180306012644-bacd9c7ef1dd // indirect
|
||||
github.com/modern-go/reflect2 v1.0.1 // indirect
|
||||
github.com/nxadm/tail v1.4.8 // indirect
|
||||
github.com/onsi/ginkgo v1.16.1 // indirect
|
||||
github.com/nxadm/tail v1.4.6 // indirect
|
||||
github.com/onsi/ginkgo v1.14.2 // indirect
|
||||
github.com/pelletier/go-toml v1.7.0 // indirect
|
||||
github.com/pkg/errors v0.9.1 // indirect
|
||||
github.com/pmezard/go-difflib v1.0.0 // indirect
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 // indirect
|
||||
github.com/russross/blackfriday/v2 v2.0.1 // indirect
|
||||
github.com/shurcooL/sanitized_anchor_name v1.0.0 // indirect
|
||||
github.com/spf13/afero v1.2.2 // indirect
|
||||
@@ -68,8 +68,9 @@ require (
|
||||
github.com/subosito/gotenv v1.2.0 // indirect
|
||||
github.com/ugorji/go/codec v1.1.7 // indirect
|
||||
golang.org/x/crypto v0.0.0-20201221181555-eec23a3978ad // indirect
|
||||
golang.org/x/exp v0.0.0-20220303212507-bbda1eaf7a17 // indirect
|
||||
golang.org/x/net v0.0.0-20210119194325-5f4716e94777 // indirect
|
||||
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7 // indirect
|
||||
golang.org/x/sys v0.0.0-20220405052023-b1e9470b6e64 // indirect
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 // indirect
|
||||
golang.org/x/text v0.3.5 // indirect
|
||||
golang.org/x/xerrors v0.0.0-20200804184101-5ec99f83aff1 // indirect
|
||||
@@ -77,7 +78,12 @@ require (
|
||||
gopkg.in/ini.v1 v1.55.0 // indirect
|
||||
gopkg.in/tomb.v1 v1.0.0-20141024135613-dd632973f1e7 // indirect
|
||||
gopkg.in/yaml.v2 v2.3.0 // indirect
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b // indirect
|
||||
gosrc.io/xmpp v0.5.1 // indirect
|
||||
howett.net/plist v0.0.0-20181124034731-591f970eefbb // indirect
|
||||
nhooyr.io/websocket v1.8.7 // indirect
|
||||
modernc.org/libc v1.16.8 // indirect
|
||||
modernc.org/mathutil v1.4.1 // indirect
|
||||
modernc.org/memory v1.1.1 // indirect
|
||||
modernc.org/sqlite v1.17.2 // indirect
|
||||
nhooyr.io/websocket v1.8.6 // indirect
|
||||
)
|
||||
|
||||
@@ -39,8 +39,6 @@ github.com/chromedp/cdproto v0.0.0-20190812224334-39ef923dcb8d/go.mod h1:0YChpVz
|
||||
github.com/chromedp/cdproto v0.0.0-20190926234355-1b4886c6fad6/go.mod h1:0YChpVzuLJC5CPr+x3xkHN6Z8KOSXjNbL7qV8Wc4GW0=
|
||||
github.com/chromedp/chromedp v0.3.1-0.20190619195644-fd957a4d2901/go.mod h1:mJdvfrVn594N9tfiPecUidF6W5jPRKHymqHfzbobPsM=
|
||||
github.com/chromedp/chromedp v0.4.0/go.mod h1:DC3QUn4mJ24dwjcaGQLoZrhm4X/uPHZ6spDbS2uFhm4=
|
||||
github.com/citilinkru/libudev v1.0.0 h1:upErSdhsJGdiKxwxPmvcz43fwJJD9R+y1j8BqU4wHog=
|
||||
github.com/citilinkru/libudev v1.0.0/go.mod h1:yaNdhdtfJMs5flqeXzUOMO0mT9QnyNh/U/jdY4WhA/I=
|
||||
github.com/client9/misspell v0.3.4/go.mod h1:qj6jICC3Q7zFZvVWo7KLAzC3yx5G7kyvSDkc90ppPyw=
|
||||
github.com/cockroachdb/apd v1.1.0/go.mod h1:8Sl8LxpKi29FqWXR16WEFZRNSz3SoPzUzeMeY4+DwBQ=
|
||||
github.com/containrrr/shoutrrr v0.4.4 h1:vHZ4E/76pKVY+Jyn/qhBz3X540Bn8NI5ppPHK4PyILY=
|
||||
@@ -63,13 +61,12 @@ github.com/davecgh/go-spew v1.1.1 h1:vj9j/u1bqnvCEfJOwUhtlOARqs3+rkHYY13jYWTU97c
|
||||
github.com/davecgh/go-spew v1.1.1/go.mod h1:J7Y8YcW2NihsgmVo/mv3lAwl/skON4iLHjSsI+c5H38=
|
||||
github.com/deepmap/oapi-codegen v1.8.2 h1:SegyeYGcdi0jLLrpbCMoJxnUUn8GBXHsvr4rbzjuhfU=
|
||||
github.com/deepmap/oapi-codegen v1.8.2/go.mod h1:YLgSKSDv/bZQB7N4ws6luhozi3cEdRktEqrX88CvjIw=
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20191124224453-732737034ffd/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20200428022330-06a60b6afbbc h1:VRRKCwnzqk8QCaRC4os14xoKDdbHqqlJtJA0oc1ZAjg=
|
||||
github.com/denisenkom/go-mssqldb v0.0.0-20200428022330-06a60b6afbbc/go.mod h1:xbL0rPBG9cCiLr28tMa8zpbdarY27NDyej4t/EjAShU=
|
||||
github.com/dgrijalva/jwt-go v3.2.0+incompatible/go.mod h1:E3ru+11k8xSBh+hMPgOLZmtrrCbhqsmaPHjLKYnJCaQ=
|
||||
github.com/dgryski/go-sip13 v0.0.0-20181026042036-e10d5fee7954/go.mod h1:vAd38F8PWV+bWy6jNmig1y/TA+kYO4g3RSRF0IAv0no=
|
||||
github.com/dustin/go-humanize v1.0.0/go.mod h1:HtrtbFcZ19U5GC7JDqmcUSB87Iq5E25KnS6fMYU6eOk=
|
||||
github.com/edsrzf/mmap-go v1.0.0/go.mod h1:YO35OhQPt3KJa3ryjFM5Bs14WD66h8eGKpfaBNrHW5M=
|
||||
github.com/erikstmartin/go-testdb v0.0.0-20160219214506-8d10e4a1bae5/go.mod h1:a2zkGnVExMxdzMo3M0Hi/3sEU+cWnZpSni0O6/Yb/P0=
|
||||
github.com/fatih/color v1.6.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fatih/color v1.7.0/go.mod h1:Zm6kSWBoL9eyXnKyktHP6abPY2pDugNf5KwzbycvMj4=
|
||||
github.com/fatih/color v1.10.0 h1:s36xzo75JdqLaaWoiEHk767eHiwo0598uUxyfiPkDsg=
|
||||
@@ -84,6 +81,10 @@ github.com/gin-contrib/sse v0.1.0 h1:Y/yl/+YNO8GZSjAhjMsSuLt29uWRFHdHYUb5lYOV9qE
|
||||
github.com/gin-contrib/sse v0.1.0/go.mod h1:RHrZQHXnP2xjPF+u1gW/2HnVO7nvIa9PG3Gm+fLHvGI=
|
||||
github.com/gin-gonic/gin v1.6.3 h1:ahKqKTFpO5KTPHxWZjEdPScmYaGtLo8Y4DMHoEsnp14=
|
||||
github.com/gin-gonic/gin v1.6.3/go.mod h1:75u5sXoLsGZoRN5Sgbi1eraJ4GU3++wFwWzhwvtwp4M=
|
||||
github.com/glebarez/go-sqlite v1.17.2 h1:gyTyFr2RFFQd2gp6fOOdfnTvUn99zwvVOrQFHA4S+DY=
|
||||
github.com/glebarez/go-sqlite v1.17.2/go.mod h1:lakPjzvnJ6uSIARV+5dPALDuSLL3879PlzHFMEpbceM=
|
||||
github.com/glebarez/sqlite v1.4.5 h1:oaJupO4X9iTn4sXRvP5Vs15BNvKh9dx5AQfciKlDvV4=
|
||||
github.com/glebarez/sqlite v1.4.5/go.mod h1:6D+bB+DdXlEC4mO+pUFJWixVcnrHTIAJ9U6Ynnn4Lxk=
|
||||
github.com/go-chi/chi/v5 v5.0.0/go.mod h1:BBug9lr0cqtdAhsu6R4AAdvufI0/XBzAQSsUqJpoZOs=
|
||||
github.com/go-gl/glfw v0.0.0-20190409004039-e6da0acd62b1/go.mod h1:vR7hzQXu2zJy9AVAgeJqvqgH9Q5CA+iKCZ2gyEVpxRU=
|
||||
github.com/go-gormigrate/gormigrate/v2 v2.0.0 h1:e2A3Uznk4viUC4UuemuVgsNnvYZyOA8B3awlYk3UioU=
|
||||
@@ -108,7 +109,6 @@ github.com/go-playground/validator/v10 v10.2.0/go.mod h1:uOYAAleCW8F/7oMFd6aG0GO
|
||||
github.com/go-sql-driver/mysql v1.5.0 h1:ozyZYNQW3x3HtqT1jira07DN2PArx2v7/mN66gGcHOs=
|
||||
github.com/go-sql-driver/mysql v1.5.0/go.mod h1:DCzpHaOWr8IXmIStZouvnhqoel9Qv2LBy8hT2VhHyBg=
|
||||
github.com/go-stack/stack v1.8.0/go.mod h1:v0f6uXyyMGvRgIKkXu+yp6POWl0qKG85gN/melR3HDY=
|
||||
github.com/go-task/slim-sprig v0.0.0-20210107165309-348f09dbbbc0/go.mod h1:fyg7847qk6SyHyPtNmDHnmrv/HOrqktSC+C9fM+CJOE=
|
||||
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee h1:s+21KNqlpePfkah2I+gwHF8xmJWRjooY+5248k6m4A0=
|
||||
github.com/gobwas/httphead v0.0.0-20180130184737-2c6c146eadee/go.mod h1:L0fX3K22YWvt/FAX9NnzrNzcI4wNYi9Yku4O0LKYflo=
|
||||
github.com/gobwas/pool v0.2.0 h1:QEmUOlnSjWtnpRGHF3SauEiOsy82Cup83Vf2LcMlnc8=
|
||||
@@ -139,15 +139,15 @@ github.com/golang/protobuf v1.4.0-rc.4.0.20200313231945-b860323f09d0/go.mod h1:W
|
||||
github.com/golang/protobuf v1.4.0/go.mod h1:jodUvKwWbYaEsadDk5Fwe5c77LiNKVO9IDvqG2KuDX0=
|
||||
github.com/golang/protobuf v1.4.2 h1:+Z5KGCizgyZCbGh1KZqA0fcLLkwbsjIzS4aV2v7wJX0=
|
||||
github.com/golang/protobuf v1.4.2/go.mod h1:oDoupMAO8OvCJWAcko0GGGIgR6R6ocIYbsSw735rRwI=
|
||||
github.com/golang/snappy v0.0.3/go.mod h1:/XxbfmMg8lxefKM7IXC3fBNl/7bRcc72aCRzEWrmP2Q=
|
||||
github.com/golangci/lint-1 v0.0.0-20181222135242-d2cdd8c08219/go.mod h1:/X8TswGSh1pIozq4ZwCfxS0WA5JGXguxk94ar/4c87Y=
|
||||
github.com/google/btree v0.0.0-20180813153112-4030bb1f1f0c/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/btree v1.0.0/go.mod h1:lNA+9X1NB3Zf8V7Ke586lFgjr2dZNuvo3lPJSGZ5JPQ=
|
||||
github.com/google/go-cmp v0.2.0/go.mod h1:oXzfMopK8JAjlY9xF4vHSVASa0yLyX7SntLO5aqRK0M=
|
||||
github.com/google/go-cmp v0.3.0/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.3.1/go.mod h1:8QqcDgzrUqlUb/G2PQTWiueGozuR1884gddMywk6iLU=
|
||||
github.com/google/go-cmp v0.4.0 h1:xsAVV57WRhGj6kEIi8ReJzQlHHqcBYCElAvkovg3B/4=
|
||||
github.com/google/go-cmp v0.4.0/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/go-cmp v0.5.3 h1:x95R7cp+rSeeqAMI2knLtQ0DKlaBhv2NrtrOvafPHRo=
|
||||
github.com/google/go-cmp v0.5.3/go.mod h1:v8dTdLbMG2kIc/vJvl+f65V22dbkXbowE6jgT/gNBxE=
|
||||
github.com/google/gofuzz v1.0.0/go.mod h1:dBl0BpW6vV/+mYPU4Po3pmUjxk6FQPldtuIdl/M65Eg=
|
||||
github.com/google/martian v2.1.0+incompatible/go.mod h1:9I4somxYTbIHy5NJKHRl3wXiIaQGbYVAs8BPL6v8lEs=
|
||||
github.com/google/pprof v0.0.0-20181206194817-3ea8567a2e57/go.mod h1:zfwlbNMJ+OItoe0UupaVj+oy1omPYYDuagoSzA8v9mc=
|
||||
@@ -156,8 +156,8 @@ github.com/google/pprof v0.0.0-20190908185732-236ed259b199/go.mod h1:zfwlbNMJ+OI
|
||||
github.com/google/renameio v0.1.0/go.mod h1:KWCgfxg9yswjAJkECMjeO8J8rahYeXnNhOm40UhjYkI=
|
||||
github.com/google/uuid v1.1.1/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.1.5/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.2.0 h1:qJYtXnJRWmpe7m/3XlyhrsLrEURqHRM2kxzoxXqyUDs=
|
||||
github.com/google/uuid v1.2.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/google/uuid v1.3.0 h1:t6JiXgmwXMjEs8VusXIJk2BXHsn+wx8BZdTaoZ5fu7I=
|
||||
github.com/google/uuid v1.3.0/go.mod h1:TIyPZe4MgqvfeYDBFedMoGGpEw/LqOeaOT+nhxU+yHo=
|
||||
github.com/googleapis/gax-go/v2 v2.0.4/go.mod h1:0Wqv26UfaUD9n4G6kQubkQ+KchISgw+vpHVxEJEs9eg=
|
||||
github.com/googleapis/gax-go/v2 v2.0.5/go.mod h1:DWXyrwAJ9X0FpwwEdw+IPEYBICEFu5mhpdKc/us6bOk=
|
||||
github.com/gopherjs/gopherjs v0.0.0-20181017120253-0766667cb4d1 h1:EGx4pi6eqNxGaHF6qqu48+N2wcFQ5qg5FXgOdqsJ5d8=
|
||||
@@ -254,13 +254,11 @@ github.com/jaypipes/ghw v0.6.1/go.mod h1:QOXppNRCLGYR1H+hu09FxZPqjNt09bqUZUnOL3R
|
||||
github.com/jaypipes/pcidb v0.5.0 h1:4W5gZ+G7QxydevI8/MmmKdnIPJpURqJ2JNXTzfLxF5c=
|
||||
github.com/jaypipes/pcidb v0.5.0/go.mod h1:L2RGk04sfRhp5wvHO0gfRAMoLY/F3PKv/nwJeVoho0o=
|
||||
github.com/jessevdk/go-flags v1.4.0/go.mod h1:4FA24M0QyGHXBuZZK/XkWh8h0e1EYbRYJSGM75WSRxI=
|
||||
github.com/jinzhu/gorm v1.9.16 h1:+IyIjPEABKRpsu/F8OvDPy9fyQlgsg2luMV2ZIH5i5o=
|
||||
github.com/jinzhu/gorm v1.9.16/go.mod h1:G3LB3wezTOWM2ITLzPxEXgSkOXAntiLHS7UdBefADcs=
|
||||
github.com/jinzhu/inflection v1.0.0 h1:K317FqzuhWc8YvSVlFMCCUb36O/S9MCKRDI7QkRKD/E=
|
||||
github.com/jinzhu/inflection v1.0.0/go.mod h1:h+uFLlag+Qp1Va5pdKtLDYj+kHp5pxUVkryuEj+Srlc=
|
||||
github.com/jinzhu/now v1.0.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jinzhu/now v1.1.1 h1:g39TucaRWyV3dwDO++eEc6qf8TVIQ/Da48WmqjZ3i7E=
|
||||
github.com/jinzhu/now v1.1.1/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/jinzhu/now v1.1.4 h1:tHnRBy1i5F2Dh8BAFxqFzxKqqvezXrL2OW1TnX+Mlas=
|
||||
github.com/jinzhu/now v1.1.4/go.mod h1:d3SSVoowX0Lcu0IBviAWJpolVfI5UJVZZ7cO71lE/z8=
|
||||
github.com/joho/godotenv v1.3.0 h1:Zjp+RcGpHhGlrMbJzXTrZZPrWj+1vfm90La1wgB6Bhc=
|
||||
github.com/joho/godotenv v1.3.0/go.mod h1:7hK45KPybAkOC6peb+G5yklZfMxEjkZhHbwpqxOKXbg=
|
||||
github.com/jonboulle/clockwork v0.1.0/go.mod h1:Ii8DK3G1RaLaWxj9trq07+26W01tbo22gdxWY5EU2bo=
|
||||
@@ -271,32 +269,30 @@ github.com/jstemmer/go-junit-report v0.0.0-20190106144839-af01ea7f8024/go.mod h1
|
||||
github.com/jtolds/gls v4.20.0+incompatible h1:xdiiI2gbIgH/gLH7ADydsJ1uDOEzR8yvV7C0MuV77Wo=
|
||||
github.com/jtolds/gls v4.20.0+incompatible/go.mod h1:QJZ7F/aHp+rZTRtaJ1ow/lLfFfVYBRgL+9YlvaHOwJU=
|
||||
github.com/julienschmidt/httprouter v1.2.0/go.mod h1:SYymIcj16QtmaHHD7aYtjjsJG7VTCxuUUipMqKk8s4w=
|
||||
github.com/kballard/go-shellquote v0.0.0-20180428030007-95032a82bc51/go.mod h1:CzGEWj7cYgsdH8dAjBGEr58BoE7ScuLd+fwFZ44+/x8=
|
||||
github.com/kisielk/errcheck v1.1.0/go.mod h1:EZBBE59ingxPouuu3KfxchcWSUPOHkagtvWXihfKN4Q=
|
||||
github.com/kisielk/gotool v1.0.0/go.mod h1:XhKaO+MFFWcvkIS/tQcRk01m1F5IRFswLeQ+oQHNcck=
|
||||
github.com/klauspost/compress v1.10.3/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
||||
github.com/klauspost/compress v1.11.7 h1:0hzRabrMN4tSTvMfnL3SCv1ZGeAP23ynzodBgaHeMeg=
|
||||
github.com/klauspost/compress v1.11.7/go.mod h1:aoV0uJVorq1K+umq18yTdKaF57EivdYsUV+/s2qKfXs=
|
||||
github.com/klauspost/compress v1.12.1 h1:/+xsCsk06wE38cyiqOR/o7U2fSftcH72xD+BQXmja/g=
|
||||
github.com/klauspost/compress v1.12.1/go.mod h1:8dP1Hq4DHOhN9w426knH3Rhby4rFm6D8eO+e+Dq5Gzg=
|
||||
github.com/knq/sysutil v0.0.0-20181215143952-f05b59f0f307/go.mod h1:BjPj+aVjl9FW/cCGiF3nGh5v+9Gd3VCgBQbod/GlMaQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.1/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2 h1:DB17ag19krx9CFsz4o3enTrPXyIXCl+2iCXH/aMAp9s=
|
||||
github.com/konsorten/go-windows-terminal-sequences v1.0.2/go.mod h1:T0+1ngSBFLxvqU3pZ+m/2kptfBszLMUkC4ZK/EgS/cQ=
|
||||
github.com/kr/logfmt v0.0.0-20140226030751-b84e30acd515/go.mod h1:+0opPa2QZZtGFBFZlji/RkVcI2GknAs/DXo4wKdlNEc=
|
||||
github.com/kr/pretty v0.1.0 h1:L/CwN0zerZDmRFUapSPitk6f+Q3+0za1rQkzVuMiMFI=
|
||||
github.com/kr/pretty v0.1.0/go.mod h1:dAy3ld7l9f0ibDNOQOHHMYYIIbhfbHSm3C4ZsoJORNo=
|
||||
github.com/kr/pty v1.1.1/go.mod h1:pFQYn66WHrOpPYNljwOMqo10TkYh1fy3cYio2l3bCsQ=
|
||||
github.com/kr/pty v1.1.8/go.mod h1:O1sed60cT9XZ5uDucP5qwvh+TE3NnUj51EiZO/lmSfw=
|
||||
github.com/kr/text v0.1.0 h1:45sCR5RtlFHMR4UwH9sdQ5TC8v0qDQCHnXt+kaKSTVE=
|
||||
github.com/kr/text v0.1.0/go.mod h1:4Jbv+DJW3UT/LiOwJeYQe1efqtUx/iVham/4vfdArNI=
|
||||
github.com/kvz/logstreamer v0.0.0-20150507115422-a635b98146f0 h1:3tLzEnUizyN9YLWFTT9loC30lSBvh2y70LTDcZOTs1s=
|
||||
github.com/kvz/logstreamer v0.0.0-20150507115422-a635b98146f0/go.mod h1:8/LTPeDLaklcUjgSQBHbhBF1ibKAFxzS5o+H7USfMSA=
|
||||
github.com/kr/text v0.2.0 h1:5Nx0Ya0ZqY2ygV366QzturHI13Jq95ApcVaJBhpS+AY=
|
||||
github.com/kvz/logstreamer v0.0.0-20201023134116-02d20f4338f5 h1:dkCjlgGN81ahDFtM9R1x16gFGTa7ZvgZfdtAfM9lWOs=
|
||||
github.com/kvz/logstreamer v0.0.0-20201023134116-02d20f4338f5/go.mod h1:8/LTPeDLaklcUjgSQBHbhBF1ibKAFxzS5o+H7USfMSA=
|
||||
github.com/labstack/echo/v4 v4.2.1/go.mod h1:AA49e0DZ8kk5jTOOCKNuPR6oTnBS0dYiM4FW1e6jwpg=
|
||||
github.com/labstack/gommon v0.3.0/go.mod h1:MULnywXg0yavhxWKc+lOruYdAhDwPK9wf0OL7NoOu+k=
|
||||
github.com/leodido/go-urn v1.2.0 h1:hpXL4XnriNwQ/ABnpepYM/1vCLWNDfUNts8dX3xTG6Y=
|
||||
github.com/leodido/go-urn v1.2.0/go.mod h1:+8+nEpDfqqsY+g338gtMEUOtuK+4dEMhiQEgxpxOKII=
|
||||
github.com/lib/pq v1.0.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.1.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.1.1/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.2.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/lib/pq v1.3.0/go.mod h1:5WUZQaWbwv1U+lTReE5YruASi9Al49XbQIvNi/34Woo=
|
||||
github.com/magiconair/properties v1.8.0/go.mod h1:PppfXfuXeibc/6YijjN8zIbojt8czPbwD3XqdrwzmxQ=
|
||||
@@ -320,12 +316,12 @@ github.com/mattn/go-isatty v0.0.5/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hd
|
||||
github.com/mattn/go-isatty v0.0.7/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-isatty v0.0.8/go.mod h1:Iq45c/XA43vh69/j3iqttzPXn0bhXyGjM0Hdxcsrc5s=
|
||||
github.com/mattn/go-isatty v0.0.9/go.mod h1:YNRxwqDuOph6SZLI9vUUz6OYw3QyUt7WiY2yME+cCiQ=
|
||||
github.com/mattn/go-isatty v0.0.12 h1:wuysRhFDzyxgEmMf5xjvJ2M9dZoWAXNNr5LSBS7uHXY=
|
||||
github.com/mattn/go-isatty v0.0.12/go.mod h1:cbi8OIDigv2wuxKPP5vlRcQ1OAZbq2CE4Kysco4FUpU=
|
||||
github.com/mattn/go-isatty v0.0.14 h1:yVuAays6BHfxijgZPzw+3Zlu5yQgKGP2/hcQbHb7S9Y=
|
||||
github.com/mattn/go-isatty v0.0.14/go.mod h1:7GGIvUiUoEMVVmxf/4nioHXj79iQHKdU27kJ6hsGG94=
|
||||
github.com/mattn/go-sqlite3 v1.14.0/go.mod h1:JIl7NbARA7phWnGvh0LKTyg7S9BA+6gx71ShQilpsus=
|
||||
github.com/mattn/go-sqlite3 v1.14.3/go.mod h1:WVKg1VTActs4Qso6iwGbiFih2UIHo0ENGwNd0Lj+XmI=
|
||||
github.com/mattn/go-sqlite3 v1.14.4 h1:4rQjbDxdu9fSgI/r3KN72G3c2goxknAqHHgPWWs8UlI=
|
||||
github.com/mattn/go-sqlite3 v1.14.4/go.mod h1:WVKg1VTActs4Qso6iwGbiFih2UIHo0ENGwNd0Lj+XmI=
|
||||
github.com/mattn/go-sqlite3 v1.14.12 h1:TJ1bhYJPV44phC+IMu1u2K/i5RriLTPe+yc68XDJ1Z0=
|
||||
github.com/mattn/go-sqlite3 v1.14.12/go.mod h1:NyWgC/yNuGj7Q9rpYnZvas74GogHl5/Z4A/KQRfk6bU=
|
||||
github.com/matttproud/golang_protobuf_extensions v1.0.1/go.mod h1:D8He9yQNgCq6Z5Ld7szi9bcBfOoFv/3dc6xSMkL2PC0=
|
||||
github.com/miekg/dns v1.0.14/go.mod h1:W1PPwlIAgtquWBMBEV9nkV9Cazfe8ScdGz/Lj7v3Nrg=
|
||||
github.com/mitchellh/cli v1.0.0/go.mod h1:hNIlj7HEI86fIcpObd7a0FcrxTWetlwJDGcceTlRvqc=
|
||||
@@ -346,17 +342,16 @@ github.com/modern-go/reflect2 v0.0.0-20180701023420-4b7aa43c6742/go.mod h1:bx2lN
|
||||
github.com/modern-go/reflect2 v1.0.1 h1:9f412s+6RmYXLWZSEzVVgPGK7C2PphHj5RJrvfx9AWI=
|
||||
github.com/modern-go/reflect2 v1.0.1/go.mod h1:bx2lNnkwVCuqBIxFjflWJWanXIb3RllmbCylyMrvgv0=
|
||||
github.com/mwitkow/go-conntrack v0.0.0-20161129095857-cc309e4a2223/go.mod h1:qRWi+5nqEBWmkhHvq77mSJWrCKwh8bxhgT7d/eI7P4U=
|
||||
github.com/niemeyer/pretty v0.0.0-20200227124842-a10e7caefd8e h1:fD57ERR4JtEqsWbfPhv4DMiApHyliiK5xCTNVSPiaAs=
|
||||
github.com/nxadm/tail v1.4.4/go.mod h1:kenIhsEOeOJmVchQTgglprH7qJGnHDVpk1VPCcaMI8A=
|
||||
github.com/nxadm/tail v1.4.6 h1:11TGpSHY7Esh/i/qnq02Jo5oVrI1Gue8Slbq0ujPZFQ=
|
||||
github.com/nxadm/tail v1.4.6/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/nxadm/tail v1.4.8 h1:nPr65rt6Y5JFSKQO7qToXr7pePgD6Gwiw05lkbyAQTE=
|
||||
github.com/nxadm/tail v1.4.8/go.mod h1:+ncqLTQzXmGhMZNUePPaPqPvBxHAIsmXswZKocGu+AU=
|
||||
github.com/oklog/ulid v1.3.1/go.mod h1:CirwcVhetQ6Lv90oh/F+FBtV6XMibvdAFo93nm5qn4U=
|
||||
github.com/onsi/ginkgo v1.6.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.8.0/go.mod h1:lLunBs/Ym6LB5Z9jYTR76FiuTmxDTDusOGeTQH+WWjE=
|
||||
github.com/onsi/ginkgo v1.12.1/go.mod h1:zj2OWP4+oCPe1qIXoGWkgMRwljMUYCdkwsT2108oapk=
|
||||
github.com/onsi/ginkgo v1.14.2 h1:8mVmC9kjFFmA8H4pKMUhcblgifdkOIXPvbhN1T36q1M=
|
||||
github.com/onsi/ginkgo v1.14.2/go.mod h1:iSB4RoI2tjJc9BBv4NKIKWKya62Rps+oPG/Lv9klQyY=
|
||||
github.com/onsi/ginkgo v1.16.1 h1:foqVmeWDD6yYpK+Yz3fHyNIxFYNxswxqNFjSKe+vI54=
|
||||
github.com/onsi/ginkgo v1.16.1/go.mod h1:CObGmKUOKaSC0RjmoAK7tKyn4Azo5P2IWuoMnvwxz1E=
|
||||
github.com/onsi/gomega v1.4.3/go.mod h1:ex+gbHU/CVuBBDIJjb2X0qEXbFg53c61hWP/1CpauHY=
|
||||
github.com/onsi/gomega v1.7.1/go.mod h1:XdKZgCCFLUoM/7CFJVPcG8C1xQ1AJ0vpAezJrB7JYyY=
|
||||
github.com/onsi/gomega v1.10.1 h1:o0+MgICZLuZ7xjH7Vx6zS/zcu93/BEp1VwkIW1mEXCE=
|
||||
@@ -381,6 +376,8 @@ github.com/prometheus/common v0.4.0/go.mod h1:TNfzLD0ON7rHzMJeJkieUDPYmFC7Snx/y8
|
||||
github.com/prometheus/procfs v0.0.0-20181005140218-185b4288413d/go.mod h1:c3At6R/oaqEKCNdg8wHV1ftS6bRYblBhIjjI8uT2IGk=
|
||||
github.com/prometheus/procfs v0.0.0-20190507164030-5867b95ac084/go.mod h1:TjEm7ze935MbeOT/UhFTIMYKhuLP4wbCsTZCD3I8kEA=
|
||||
github.com/prometheus/tsdb v0.7.1/go.mod h1:qhTCs0VvXwvX/y3TZrWD7rabWM+ijKTux40TwIPHuXU=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0 h1:OdAsTTz6OkFY5QxjkYwrChwuRruF69c169dPK26NUlk=
|
||||
github.com/remyoudompheng/bigfft v0.0.0-20200410134404-eec4a21b6bb0/go.mod h1:qqbHyh8v60DhA7CoWK5oRCqLrMHRGoxYCSS9EjAz6Eo=
|
||||
github.com/rogpeppe/fastuuid v0.0.0-20150106093220-6724a57986af/go.mod h1:XWv6SoW27p1b0cqNHllgS5HIMJraePCO15w5zCzIWYg=
|
||||
github.com/rogpeppe/go-internal v1.3.0/go.mod h1:M8bDsm7K2OlrFYOpmOWEs/qY81heoFRclV5y23lUDJ4=
|
||||
github.com/rs/xid v1.2.1/go.mod h1:+uKXf+4Djp6Md1KODXJxgGQPKngRmWyn10oCKFzNHOQ=
|
||||
@@ -389,6 +386,8 @@ github.com/rs/zerolog v1.15.0/go.mod h1:xYTKnLHcpfU2225ny5qZjxnj9NvkumZYjJHlAThC
|
||||
github.com/russross/blackfriday/v2 v2.0.1 h1:lPqVAte+HuHNfhJ/0LC98ESWRz8afy9tM/0RK8m9o+Q=
|
||||
github.com/russross/blackfriday/v2 v2.0.1/go.mod h1:+Rmxgy9KzJVeS9/2gXHxylqXiyQDYRxCVz55jmeOWTM=
|
||||
github.com/ryanuber/columnize v0.0.0-20160712163229-9b3edd62028f/go.mod h1:sm1tb6uqfes/u+d4ooFouqFdy9/2g9QGwK3SQygK0Ts=
|
||||
github.com/samber/lo v1.25.0 h1:H8F6cB0RotRdgcRCivTByAQePaYhGMdOTJIj2QFS2I0=
|
||||
github.com/samber/lo v1.25.0/go.mod h1:2I7tgIv8Q1SG2xEIkRq0F2i2zgxVpnyPOP0d3Gj2r+A=
|
||||
github.com/satori/go.uuid v1.2.0/go.mod h1:dA0hQrYB0VpLJoorglMZABFdXlWrHn1NEOzdhQKdks0=
|
||||
github.com/sean-/seed v0.0.0-20170313163322-e2103e2c3529/go.mod h1:DxrIzT+xaE7yg65j358z/aeFdxmN0P9QXhEzd20vsDc=
|
||||
github.com/shopspring/decimal v0.0.0-20180709203117-cd690d0c9e24/go.mod h1:M+9NzErvs504Cn4c5DxATwIqPbtswREoFCre64PpcG4=
|
||||
@@ -432,14 +431,15 @@ github.com/stretchr/objx v0.2.0/go.mod h1:qt09Ya8vawLte6SNmTgCsAVtYtaKzEcn8ATUoH
|
||||
github.com/stretchr/testify v1.2.2/go.mod h1:a8OnRcib4nhh0OaRAV+Yts87kKdq0PP7pXfy6kDkUVs=
|
||||
github.com/stretchr/testify v1.3.0/go.mod h1:M5WIy9Dh21IEIfnGCwXGc5bZfKNJtfHm1UVUgZn+9EI=
|
||||
github.com/stretchr/testify v1.4.0/go.mod h1:j7eGeouHqKxXV5pUuKE4zz7dFj8WfuZ+81PSLYec5m4=
|
||||
github.com/stretchr/testify v1.5.1 h1:nOGnQDM7FYENwehXlg/kFVnos3rEvtKTjRvOWSzb6H4=
|
||||
github.com/stretchr/testify v1.5.1/go.mod h1:5W2xD1RspED5o8YsWQXVCued0rvSQ+mT+I5cxcmMvtA=
|
||||
github.com/stretchr/testify v1.7.1 h1:5TQK59W5E3v0r2duFAb7P95B6hEeOyEnHRa8MjYSMTY=
|
||||
github.com/stretchr/testify v1.7.1/go.mod h1:6Fq8oRcR53rry900zMqJjRRixrwX3KX962/h/Wwjteg=
|
||||
github.com/subosito/gotenv v1.2.0 h1:Slr1R9HxAlEKefgq5jn9U+DnETlIUa6HfgEzj0g5d7s=
|
||||
github.com/subosito/gotenv v1.2.0/go.mod h1:N0PQaV/YGNqwC0u51sEeR/aUtSLEXKX9iv69rRypqCw=
|
||||
github.com/thoas/go-funk v0.9.1 h1:O549iLZqPpTUQ10ykd26sZhzD+rmR5pWhuElrhbC20M=
|
||||
github.com/tmc/grpc-websocket-proxy v0.0.0-20190109142713-0ad062ec5ee5/go.mod h1:ncp9v5uamzpCO7NfCPTXjqaC+bZgJeR0sMTm6dMHP7U=
|
||||
github.com/twitchyliquid64/golang-asm v0.0.0-20190126203739-365674df15fc/go.mod h1:NoCfSFWosfqMqmmD7hApkirIK9ozpHjxRnRxs1l413A=
|
||||
github.com/ugorji/go v1.1.4/go.mod h1:uQMGLiO92mf5W77hV/PUCpI3pbzQx3CRekS0kk+RGrc=
|
||||
github.com/ugorji/go v1.1.7 h1:/68gy2h+1mWMrwZFeD1kQialdSzAb432dtpeJ42ovdo=
|
||||
github.com/ugorji/go v1.1.7/go.mod h1:kZn38zHttfInRq0xu/PH0az30d+z6vm202qpg1oXVMw=
|
||||
github.com/ugorji/go/codec v1.1.7 h1:2SvQaVZ1ouYrrKKwoSk2pzd4A9evlKJb9oTL+OaLUSs=
|
||||
github.com/ugorji/go/codec v1.1.7/go.mod h1:Ax+UKWsSmolVDwsd+7N3ZtXu+yMGCf907BLYF3GoBXY=
|
||||
@@ -475,7 +475,6 @@ golang.org/x/crypto v0.0.0-20190605123033-f99c8df09eb5/go.mod h1:yigFU9vqHzYiE8U
|
||||
golang.org/x/crypto v0.0.0-20190820162420-60c769a6c586/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20190911031432-227b76d455e7/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191011191535-87dc89f01550/go.mod h1:yigFU9vqHzYiE8UmvKecakEJjdnWj3jj499lnFckfCI=
|
||||
golang.org/x/crypto v0.0.0-20191205180655-e7c4368fe9dd/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200323165209-0ec3e9974c59/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200622213623-75b288015ac9/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
golang.org/x/crypto v0.0.0-20200820211705-5c72a883971a/go.mod h1:LzIPMQfyMNhhGPhUkYOs5KpL4U8rLKemX1yGLhDgUto=
|
||||
@@ -486,6 +485,8 @@ golang.org/x/exp v0.0.0-20190306152737-a1d7652674e8/go.mod h1:CJ0aWSM057203Lf6IL
|
||||
golang.org/x/exp v0.0.0-20190510132918-efd6b22b2522/go.mod h1:ZjyILWgesfNpC6sMxTJOJm9Kp84zZh5NQWvqDGG3Qr8=
|
||||
golang.org/x/exp v0.0.0-20190829153037-c13cbed26979/go.mod h1:86+5VVa7VpoJ4kLfm080zCjGlMRFzhUhsZKEZO7MGek=
|
||||
golang.org/x/exp v0.0.0-20191030013958-a1ab85dbe136/go.mod h1:JXzH8nQsPlswgeRAPE3MuO9GYsAcnJvJ4vnMwN/5qkY=
|
||||
golang.org/x/exp v0.0.0-20220303212507-bbda1eaf7a17 h1:3MTrJm4PyNL9NBqvYDSj3DHl46qQakyfqfWo4jgfaEM=
|
||||
golang.org/x/exp v0.0.0-20220303212507-bbda1eaf7a17/go.mod h1:lgLbSvA5ygNOMpwM/9anMpWVlVJ7Z+cHWq/eFuinpGE=
|
||||
golang.org/x/image v0.0.0-20190227222117-0694c2d4d067/go.mod h1:kZ7UVZpmo3dzQBMxlp+ypCbDeSB+sBbTgSJuh5dn5js=
|
||||
golang.org/x/image v0.0.0-20190802002840-cff245a6509b/go.mod h1:FeLwcggjj3mMvU+oOTbSwawSJRM1uh48EjtB4UJZlP0=
|
||||
golang.org/x/lint v0.0.0-20181026193005-c67002cb31c3/go.mod h1:UVdnD1Gm6xHRNCYTkRU2/jEulfH38KcIWyp/GAMgvoE=
|
||||
@@ -572,11 +573,12 @@ golang.org/x/sys v0.0.0-20200519105757-fe76b779f299/go.mod h1:h1NjWce9XRLGQEsW7w
|
||||
golang.org/x/sys v0.0.0-20200826173525-f9321e4c35a6/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20200930185726-fdedc70b468f/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20201119102817-f84b799fce68/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210112080510-489259a85091/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210113181707-4bcb84eeeb78/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210124154548-22da62e12c0c/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7 h1:iGu644GcxtEcrInvDsQRCwJjtCIOlT2V7IRt6ah2Whw=
|
||||
golang.org/x/sys v0.0.0-20210423185535-09eb48e85fd7/go.mod h1:h1NjWce9XRLGQEsW7wpKNCjG9DtNlClVuFLEZdDNbEs=
|
||||
golang.org/x/sys v0.0.0-20210630005230-0f9fa26af87c/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20211007075335-d3039528d8ac/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/sys v0.0.0-20220405052023-b1e9470b6e64 h1:D1v9ucDTYBtbz5vNuBbAhIMAGhQhJ6Ym5ah3maMVNX4=
|
||||
golang.org/x/sys v0.0.0-20220405052023-b1e9470b6e64/go.mod h1:oPkhp1MJrh7nUepCBck5+mAzfO9JrbApNNgaTdGDITg=
|
||||
golang.org/x/term v0.0.0-20201117132131-f5c789dd3221/go.mod h1:Nr5EML6q2oocZ2LXRh80K7BxOlk5/8JxuGnuhpl+muw=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1 h1:v+OssWQX+hTHEmOBgwxdZxK4zHq3yOs8F9J7mk0PY8E=
|
||||
golang.org/x/term v0.0.0-20201126162022-7de9c90e9dd1/go.mod h1:bj7SfCRtBDWHUb9snDiAeCFNEtKQo2Wmx5Cou7ajbmo=
|
||||
@@ -616,7 +618,7 @@ golang.org/x/tools v0.0.0-20191029190741-b9c20aec41a5/go.mod h1:b+2E5dAYhXwXZwtn
|
||||
golang.org/x/tools v0.0.0-20191112195655-aa38f8e97acc/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191119224855-298f0cb1881e/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20191125144606-a911d9008d1f/go.mod h1:b+2E5dAYhXwXZwtnZ6UAqBI28+e2cm9otk0dWdXHAEo=
|
||||
golang.org/x/tools v0.0.0-20201224043029-2b0845dc783e/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/tools v0.0.0-20201124115921-2c860bdd6e78/go.mod h1:emZCQorbCU4vsT4fOWvOPXz4eW1wZW4PmDk9uLelYpA=
|
||||
golang.org/x/xerrors v0.0.0-20190410155217-1f06c39b4373/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190513163551-3ee3066db522/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
golang.org/x/xerrors v0.0.0-20190717185122-a985d3407aa7/go.mod h1:I/5z698sn9Ka8TeJc9MKroUUfqBBauWjQqLJ2OPfmY0=
|
||||
@@ -656,8 +658,8 @@ google.golang.org/protobuf v1.23.0/go.mod h1:EGpADcykh3NcUnDUJcl1+ZksZNG86OlYog2
|
||||
gopkg.in/airbrake/gobrake.v2 v2.0.9/go.mod h1:/h5ZAUhDkGaJfjzjKLSjv6zCL6O0LLBxU4K+aSYdM/U=
|
||||
gopkg.in/alecthomas/kingpin.v2 v2.2.6/go.mod h1:FMv+mEhP44yOT+4EoQTLFTRgOQ1FBLkstjWtayDeSgw=
|
||||
gopkg.in/check.v1 v0.0.0-20161208181325-20d25e280405/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127 h1:qIbj1fsPNlZgppZ+VLlY7N33q108Sa+fhmuc+sWQYwY=
|
||||
gopkg.in/check.v1 v1.0.0-20180628173108-788fd7840127/go.mod h1:Co6ibVJAznAaIkqp8huTwlJQCZ016jof/cbN4VW5Yz0=
|
||||
gopkg.in/check.v1 v1.0.0-20200227125254-8fa46927fb4f h1:BLraFXnmrev5lT+xlilqcH8XK9/i0At2xKjWk4p6zsU=
|
||||
gopkg.in/errgo.v2 v2.1.0/go.mod h1:hNsd1EY+bozCKY1Ytp96fpM3vjJbqLJn88ws8XvfDNI=
|
||||
gopkg.in/fsnotify.v1 v1.4.7/go.mod h1:Tz8NjZHkW78fSQdbUxIjBTcgA1z1m8ZHf0WmKUhAMys=
|
||||
gopkg.in/gemnasium/logrus-airbrake-hook.v2 v2.1.2/go.mod h1:Xk6kEKp8OKb+X14hQBKWaSkCsqBpgog8nAV2xsGOxlo=
|
||||
@@ -675,20 +677,21 @@ gopkg.in/yaml.v2 v2.2.4/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.2.8/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v2 v2.3.0 h1:clyUAQHOM3G0M3f5vQj7LuJrETvjVot3Z5el9nffUtU=
|
||||
gopkg.in/yaml.v2 v2.3.0/go.mod h1:hI93XBmqTisBFMUTm0b8Fm+jr3Dg1NNxqwp+5A1VGuI=
|
||||
gopkg.in/yaml.v3 v3.0.0-20200313102051-9f266ea9e77c/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b h1:h8qDotaEPuJATrMmW04NCwg7v22aHH28wwpauUhK9Oo=
|
||||
gopkg.in/yaml.v3 v3.0.0-20210107192922-496545a6307b/go.mod h1:K4uyk7z7BCEPqu6E+C64Yfv1cQ7kz7rIZviUmN+EgEM=
|
||||
gorm.io/driver/mysql v1.0.1 h1:omJoilUzyrAp0xNoio88lGJCroGdIOen9hq2A/+3ifw=
|
||||
gorm.io/driver/mysql v1.0.1/go.mod h1:KtqSthtg55lFp3S5kUXqlGaelnWpKitn4k1xZTnoiPw=
|
||||
gorm.io/driver/postgres v1.0.0 h1:Yh4jyFQ0a7F+JPU0Gtiam/eKmpT/XFc1FKxotGqc6FM=
|
||||
gorm.io/driver/postgres v1.0.0/go.mod h1:wtMFcOzmuA5QigNsgEIb7O5lhvH1tHAF1RbWmLWV4to=
|
||||
gorm.io/driver/sqlite v1.1.1 h1:qtWqNAEUyi7gYSUAJXeiAMz0lUOdakZF5ia9Fqnp5G4=
|
||||
gorm.io/driver/sqlite v1.1.1/go.mod h1:hm2olEcl8Tmsc6eZyxYSeznnsDaMqamBvEXLNtBg4cI=
|
||||
gorm.io/driver/sqlite v1.1.3 h1:BYfdVuZB5He/u9dt4qDpZqiqDJ6KhPqs5QUqsr/Eeuc=
|
||||
gorm.io/driver/sqlite v1.1.3/go.mod h1:AKDgRWk8lcSQSw+9kxCJnX/yySj8G3rdwYlU57cB45c=
|
||||
gorm.io/driver/sqlserver v1.0.2 h1:FzxAlw0/7hntMzSiNfotpYCo9Lz8dqWQGdmCGqIiFGo=
|
||||
gorm.io/driver/sqlserver v1.0.2/go.mod h1:gb0Y9QePGgqjzrVyTQUZeh9zkd5v0iz71cM1B4ZycEY=
|
||||
gorm.io/gorm v1.9.19/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
|
||||
gorm.io/gorm v1.20.0/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
|
||||
gorm.io/gorm v1.20.1/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
|
||||
gorm.io/gorm v1.20.2 h1:bZzSEnq7NDGsrd+n3evOOedDrY5oLM5QPlCjZJUK2ro=
|
||||
gorm.io/gorm v1.20.2/go.mod h1:0HFTzE/SqkGTzK6TlDPPQbAYCluiVvhzoA1+aVyzenw=
|
||||
gorm.io/gorm v1.23.5 h1:TnlF26wScKSvknUC/Rn8t0NLLM22fypYBlvj1+aH6dM=
|
||||
gorm.io/gorm v1.23.5/go.mod h1:l2lP/RyAtc1ynaTjFksBde/O8v9oOGIApu2/xRitmZk=
|
||||
gosrc.io/xmpp v0.5.1 h1:Rgrm5s2rt+npGggJH3HakQxQXR8ZZz3+QRzakRQqaq4=
|
||||
gosrc.io/xmpp v0.5.1/go.mod h1:L3NFMqYOxyLz3JGmgFyWf7r9htE91zVGiK40oW4RwdY=
|
||||
gotest.tools v2.1.0+incompatible/go.mod h1:DsYFclhRJ6vuDpmuTbkuFWG+y2sxOXAzmJt81HFBacw=
|
||||
@@ -699,11 +702,36 @@ honnef.co/go/tools v0.0.0-20190418001031-e561f6794a2a/go.mod h1:rf3lG4BRIbNafJWh
|
||||
honnef.co/go/tools v0.0.1-2019.2.3/go.mod h1:a3bituU0lyd329TUQxRnasdCoJDkEUEAqEt0JzvZhAg=
|
||||
howett.net/plist v0.0.0-20181124034731-591f970eefbb h1:jhnBjNi9UFpfpl8YZhA9CrOqpnJdvzuiHsl/dnxl11M=
|
||||
howett.net/plist v0.0.0-20181124034731-591f970eefbb/go.mod h1:vMygbs4qMhSZSc4lCUl2OEE+rDiIIJAIdR4m7MiMcm0=
|
||||
lukechampine.com/uint128 v1.1.1/go.mod h1:c4eWIwlEGaxC/+H1VguhU4PHXNWDCDMUlWdIWl2j1gk=
|
||||
modernc.org/cc/v3 v3.36.0/go.mod h1:NFUHyPn4ekoC/JHeZFfZurN6ixxawE1BnVonP/oahEI=
|
||||
modernc.org/ccgo/v3 v3.0.0-20220428102840-41399a37e894/go.mod h1:eI31LL8EwEBKPpNpA4bU1/i+sKOwOrQy8D87zWUcRZc=
|
||||
modernc.org/ccgo/v3 v3.0.0-20220430103911-bc99d88307be/go.mod h1:bwdAnOoaIt8Ax9YdWGjxWsdkPcZyRPHqrOvJxaKAKGw=
|
||||
modernc.org/ccgo/v3 v3.16.4/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ=
|
||||
modernc.org/ccgo/v3 v3.16.6/go.mod h1:tGtX0gE9Jn7hdZFeU88slbTh1UtCYKusWOoCJuvkWsQ=
|
||||
modernc.org/ccorpus v1.11.6/go.mod h1:2gEUTrWqdpH2pXsmTM1ZkjeSrUWDpjMu2T6m29L/ErQ=
|
||||
modernc.org/httpfs v1.0.6/go.mod h1:7dosgurJGp0sPaRanU53W4xZYKh14wfzX420oZADeHM=
|
||||
modernc.org/libc v0.0.0-20220428101251-2d5f3daf273b/go.mod h1:p7Mg4+koNjc8jkqwcoFBJx7tXkpj00G77X7A72jXPXA=
|
||||
modernc.org/libc v1.16.0/go.mod h1:N4LD6DBE9cf+Dzf9buBlzVJndKr/iJHG97vGLHYnb5A=
|
||||
modernc.org/libc v1.16.1/go.mod h1:JjJE0eu4yeK7tab2n4S1w8tlWd9MxXLRzheaRnAKymU=
|
||||
modernc.org/libc v1.16.7/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU=
|
||||
modernc.org/libc v1.16.8 h1:Ux98PaOMvolgoFX/YwusFOHBnanXdGRmWgI8ciI2z4o=
|
||||
modernc.org/libc v1.16.8/go.mod h1:hYIV5VZczAmGZAnG15Vdngn5HSF5cSkbvfz2B7GRuVU=
|
||||
modernc.org/mathutil v1.2.2/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/mathutil v1.4.1 h1:ij3fYGe8zBF4Vu+g0oT7mB06r8sqGWKuJu1yXeR4by8=
|
||||
modernc.org/mathutil v1.4.1/go.mod h1:mZW8CKdRPY1v87qxC/wUdX5O1qDzXMP5TH3wjfpga6E=
|
||||
modernc.org/memory v1.1.1 h1:bDOL0DIDLQv7bWhP3gMvIrnoFw+Eo6F7a2QK9HPDiFU=
|
||||
modernc.org/memory v1.1.1/go.mod h1:/0wo5ibyrQiaoUoH7f9D8dnglAmILJ5/cxZlRECf+Nw=
|
||||
modernc.org/opt v0.1.1/go.mod h1:WdSiB5evDcignE70guQKxYUl14mgWtbClRi5wmkkTX0=
|
||||
modernc.org/sqlite v1.17.2 h1:TjmF36Wi5QcPYqRoAacV1cAyJ7xB/CD0ExpVUEMebnw=
|
||||
modernc.org/sqlite v1.17.2/go.mod h1:GOQmuiXd6pTTes1Fi2s9apiCcD/wbKQtBZ0Nw6/etjM=
|
||||
modernc.org/strutil v1.1.1/go.mod h1:DE+MQQ/hjKBZS2zNInV5hhcipt5rLPWkmpbGeW5mmdw=
|
||||
modernc.org/tcl v1.13.1/go.mod h1:XOLfOwzhkljL4itZkK6T72ckMgvj0BDsnKNdZVUOecw=
|
||||
modernc.org/token v1.0.0/go.mod h1:UGzOrNV1mAFSEB63lOFHIpNRUVMvYTc6yu1SMY/XTDM=
|
||||
modernc.org/z v1.5.1/go.mod h1:eWFB510QWW5Th9YGZT81s+LwvaAs3Q2yr4sP0rmLkv8=
|
||||
mvdan.cc/sh v2.6.4+incompatible/go.mod h1:IeeQbZq+x2SUGBensq/jge5lLQbS3XT2ktyp3wrt4x8=
|
||||
nhooyr.io/websocket v1.6.5/go.mod h1:F259lAzPRAH0htX2y3ehpJe09ih1aSHN7udWki1defY=
|
||||
nhooyr.io/websocket v1.8.6 h1:s+C3xAMLwGmlI31Nyn/eAehUlZPwfYZu2JXM621Q5/k=
|
||||
nhooyr.io/websocket v1.8.6/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0=
|
||||
nhooyr.io/websocket v1.8.7 h1:usjR2uOr/zjjkVMy0lW+PPohFok7PCow5sDjLgX4P4g=
|
||||
nhooyr.io/websocket v1.8.7/go.mod h1:B70DZP8IakI65RVQ51MsWP/8jndNma26DVA/nFSCgW0=
|
||||
rsc.io/binaryregexp v0.2.0/go.mod h1:qTv7/COck+e2FymRvadv62gMdZztPaShugOCi3I+8D8=
|
||||
rsc.io/quote/v3 v3.1.0/go.mod h1:yEA65RcK8LyAZtP9Kv3t0HmxON59tX3rD+tICJqUlj0=
|
||||
rsc.io/sampler v1.3.0/go.mod h1:T1hPZKmBbMNahiBKFy5HrXp6adAjACjK9JXDnKaTXpA=
|
||||
|
||||
@@ -2,6 +2,7 @@ package config
|
||||
|
||||
import (
|
||||
"github.com/analogj/go-util/utils"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/errors"
|
||||
"github.com/spf13/viper"
|
||||
"log"
|
||||
@@ -38,6 +39,8 @@ func (c *configuration) Init() error {
|
||||
c.SetDefault("log.file", "")
|
||||
|
||||
c.SetDefault("notify.urls", []string{})
|
||||
c.SetDefault("notify.filter_attributes", pkg.NotifyFilterAttributesAll)
|
||||
c.SetDefault("notify.level", pkg.NotifyLevelFail)
|
||||
|
||||
c.SetDefault("web.influxdb.scheme", "http")
|
||||
c.SetDefault("web.influxdb.host", "localhost")
|
||||
|
||||
@@ -4,8 +4,15 @@ const DeviceProtocolAta = "ATA"
|
||||
const DeviceProtocolScsi = "SCSI"
|
||||
const DeviceProtocolNvme = "NVMe"
|
||||
|
||||
type AttributeStatus uint8
|
||||
const NotifyFilterAttributesAll = "all"
|
||||
const NotifyFilterAttributesCritical = "critical"
|
||||
|
||||
const NotifyLevelFail = "fail"
|
||||
const NotifyLevelFailScrutiny = "fail_scrutiny"
|
||||
const NotifyLevelFailSmart = "fail_smart"
|
||||
|
||||
//go:generate stringer -type=AttributeStatus
|
||||
type AttributeStatus uint8
|
||||
const (
|
||||
// AttributeStatusPassed binary, 1,2,4,8,16,32,etc
|
||||
AttributeStatusPassed AttributeStatus = 0
|
||||
@@ -22,8 +29,8 @@ func AttributeStatusClear(b, flag AttributeStatus) AttributeStatus { return b &
|
||||
func AttributeStatusToggle(b, flag AttributeStatus) AttributeStatus { return b ^ flag }
|
||||
func AttributeStatusHas(b, flag AttributeStatus) bool { return b&flag != 0 }
|
||||
|
||||
//go:generate stringer -type=DeviceStatus
|
||||
type DeviceStatus uint8
|
||||
|
||||
const (
|
||||
// DeviceStatusPassed binary, 1,2,4,8,16,32,etc
|
||||
DeviceStatusPassed DeviceStatus = 0
|
||||
|
||||
@@ -0,0 +1,12 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
|
||||
"sort"
|
||||
)
|
||||
|
||||
func sortSmartMeasurementsDesc(smartResults []measurements.Smart) {
|
||||
sort.SliceStable(smartResults, func(i, j int) bool {
|
||||
return smartResults[i].Date.After(smartResults[j].Date)
|
||||
})
|
||||
}
|
||||
@@ -0,0 +1,30 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
"time"
|
||||
)
|
||||
|
||||
func Test_sortSmartMeasurementsDesc_LatestFirst(t *testing.T) {
|
||||
//setup
|
||||
timeNow := time.Now()
|
||||
smartResults := []measurements.Smart{
|
||||
{
|
||||
Date: timeNow.AddDate(0, 0, -2),
|
||||
},
|
||||
{
|
||||
Date: timeNow,
|
||||
},
|
||||
{
|
||||
Date: timeNow.AddDate(0, 0, -1),
|
||||
},
|
||||
}
|
||||
|
||||
//test
|
||||
sortSmartMeasurementsDesc(smartResults)
|
||||
|
||||
//assert
|
||||
require.Equal(t, smartResults[0].Date, timeNow)
|
||||
}
|
||||
@@ -6,11 +6,11 @@ import (
|
||||
"fmt"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/config"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
|
||||
"github.com/glebarez/sqlite"
|
||||
influxdb2 "github.com/influxdata/influxdb-client-go/v2"
|
||||
"github.com/influxdata/influxdb-client-go/v2/api"
|
||||
"github.com/influxdata/influxdb-client-go/v2/domain"
|
||||
"github.com/sirupsen/logrus"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
"io/ioutil"
|
||||
"net/http"
|
||||
@@ -242,21 +242,29 @@ func (sr *scrutinyRepository) EnsureBuckets(ctx context.Context, org *domain.Org
|
||||
|
||||
//create buckets (used for downsampling)
|
||||
weeklyBucket := fmt.Sprintf("%s_weekly", sr.appConfig.GetString("web.influxdb.bucket"))
|
||||
if _, foundErr := sr.influxClient.BucketsAPI().FindBucketByName(ctx, weeklyBucket); foundErr != nil {
|
||||
if foundWeeklyBucket, foundErr := sr.influxClient.BucketsAPI().FindBucketByName(ctx, weeklyBucket); foundErr != nil {
|
||||
// metrics_weekly bucket will have a retention period of 8+1 weeks (since it will be down-sampled once a month)
|
||||
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, weeklyBucket, weeklyBucketRetentionRule)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if sr.appConfig.GetBool("web.influxdb.retention_policy") {
|
||||
//correctly set the retention period for the bucket (may not be able to do it during setup/creation)
|
||||
foundWeeklyBucket.RetentionRules = domain.RetentionRules{weeklyBucketRetentionRule}
|
||||
sr.influxClient.BucketsAPI().UpdateBucket(ctx, foundWeeklyBucket)
|
||||
}
|
||||
|
||||
monthlyBucket := fmt.Sprintf("%s_monthly", sr.appConfig.GetString("web.influxdb.bucket"))
|
||||
if _, foundErr := sr.influxClient.BucketsAPI().FindBucketByName(ctx, monthlyBucket); foundErr != nil {
|
||||
if foundMonthlyBucket, foundErr := sr.influxClient.BucketsAPI().FindBucketByName(ctx, monthlyBucket); foundErr != nil {
|
||||
// metrics_monthly bucket will have a retention period of 24+1 months (since it will be down-sampled once a year)
|
||||
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, monthlyBucket, monthlyBucketRetentionRule)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if sr.appConfig.GetBool("web.influxdb.retention_policy") {
|
||||
//correctly set the retention period for the bucket (may not be able to do it during setup/creation)
|
||||
foundMonthlyBucket.RetentionRules = domain.RetentionRules{monthlyBucketRetentionRule}
|
||||
sr.influxClient.BucketsAPI().UpdateBucket(ctx, foundMonthlyBucket)
|
||||
}
|
||||
|
||||
yearlyBucket := fmt.Sprintf("%s_yearly", sr.appConfig.GetString("web.influxdb.bucket"))
|
||||
|
||||
@@ -29,6 +29,7 @@ func (sr *scrutinyRepository) SaveSmartAttributes(ctx context.Context, wwn strin
|
||||
return deviceSmartData, sr.saveDatapoint(sr.influxWriteApi, "smart", tags, fields, deviceSmartData.Date, ctx)
|
||||
}
|
||||
|
||||
// GetSmartAttributeHistory MUST return in sorted order, where newest entries are at the beginning of the list, and oldest are at the end.
|
||||
func (sr *scrutinyRepository) GetSmartAttributeHistory(ctx context.Context, wwn string, durationKey string, attributes []string) ([]measurements.Smart, error) {
|
||||
// Get SMartResults from InfluxDB
|
||||
|
||||
@@ -64,6 +65,9 @@ func (sr *scrutinyRepository) GetSmartAttributeHistory(ctx context.Context, wwn
|
||||
return nil, err
|
||||
}
|
||||
|
||||
//we have to sort the smartResults again, because the `union` command will return multiple 'tables' and only sort the records in each table.
|
||||
sortSmartMeasurementsDesc(smartResults)
|
||||
|
||||
return smartResults, nil
|
||||
|
||||
//if err := device.SquashHistory(); err != nil {
|
||||
|
||||
@@ -10,9 +10,9 @@ import (
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
|
||||
_ "github.com/glebarez/sqlite"
|
||||
"github.com/go-gormigrate/gormigrate/v2"
|
||||
"github.com/influxdata/influxdb-client-go/v2/api/http"
|
||||
_ "github.com/jinzhu/gorm/dialects/sqlite"
|
||||
log "github.com/sirupsen/logrus"
|
||||
"gorm.io/gorm"
|
||||
"strconv"
|
||||
@@ -267,6 +267,14 @@ func (sr *scrutinyRepository) Migrate(ctx context.Context) error {
|
||||
return tx.AutoMigrate(m20220509170100.Device{})
|
||||
},
|
||||
},
|
||||
{
|
||||
ID: "m20220709181300",
|
||||
Migrate: func(tx *gorm.DB) error {
|
||||
|
||||
// delete devices with empty `wwn` field (they are impossible to delete manually), and are invalid.
|
||||
return tx.Where("wwn = ?", "").Delete(&models.Device{}).Error
|
||||
},
|
||||
},
|
||||
})
|
||||
|
||||
if err := m.Migrate(); err != nil {
|
||||
|
||||
@@ -11,35 +11,71 @@ import (
|
||||
////////////////////////////////////////////////////////////////////////////////////////////////////////////////////
|
||||
func (sr *scrutinyRepository) EnsureTasks(ctx context.Context, orgID string) error {
|
||||
weeklyTaskName := "tsk-weekly-aggr"
|
||||
weeklyTaskScript := sr.DownsampleScript("weekly", weeklyTaskName, "0 1 * * 0")
|
||||
if found, findErr := sr.influxTaskApi.FindTasks(ctx, &api.TaskFilter{Name: weeklyTaskName}); findErr == nil && len(found) == 0 {
|
||||
//weekly on Sunday at 1:00am
|
||||
_, err := sr.influxTaskApi.CreateTaskWithCron(ctx, weeklyTaskName, sr.DownsampleScript("weekly"), "0 1 * * 0", orgID)
|
||||
_, err := sr.influxTaskApi.CreateTaskByFlux(ctx, weeklyTaskScript, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if len(found) == 1 {
|
||||
//check if we should update
|
||||
task := &found[0]
|
||||
if weeklyTaskScript != task.Flux {
|
||||
sr.logger.Infoln("updating weekly task script")
|
||||
task.Flux = weeklyTaskScript
|
||||
_, err := sr.influxTaskApi.UpdateTask(ctx, task)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
monthlyTaskName := "tsk-monthly-aggr"
|
||||
monthlyTaskScript := sr.DownsampleScript("monthly", monthlyTaskName, "30 1 1 * *")
|
||||
if found, findErr := sr.influxTaskApi.FindTasks(ctx, &api.TaskFilter{Name: monthlyTaskName}); findErr == nil && len(found) == 0 {
|
||||
//monthly on first day of the month at 1:30am
|
||||
_, err := sr.influxTaskApi.CreateTaskWithCron(ctx, monthlyTaskName, sr.DownsampleScript("monthly"), "30 1 1 * *", orgID)
|
||||
_, err := sr.influxTaskApi.CreateTaskByFlux(ctx, monthlyTaskScript, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if len(found) == 1 {
|
||||
//check if we should update
|
||||
task := &found[0]
|
||||
if monthlyTaskScript != task.Flux {
|
||||
sr.logger.Infoln("updating monthly task script")
|
||||
task.Flux = monthlyTaskScript
|
||||
_, err := sr.influxTaskApi.UpdateTask(ctx, task)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
yearlyTaskName := "tsk-yearly-aggr"
|
||||
yearlyTaskScript := sr.DownsampleScript("yearly", yearlyTaskName, "0 2 1 1 *")
|
||||
if found, findErr := sr.influxTaskApi.FindTasks(ctx, &api.TaskFilter{Name: yearlyTaskName}); findErr == nil && len(found) == 0 {
|
||||
//yearly on the first day of the year at 2:00am
|
||||
_, err := sr.influxTaskApi.CreateTaskWithCron(ctx, yearlyTaskName, sr.DownsampleScript("yearly"), "0 2 1 1 *", orgID)
|
||||
_, err := sr.influxTaskApi.CreateTaskByFlux(ctx, yearlyTaskScript, orgID)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
} else if len(found) == 1 {
|
||||
//check if we should update
|
||||
task := &found[0]
|
||||
if yearlyTaskScript != task.Flux {
|
||||
sr.logger.Infoln("updating yearly task script")
|
||||
task.Flux = yearlyTaskScript
|
||||
_, err := sr.influxTaskApi.UpdateTask(ctx, task)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (sr *scrutinyRepository) DownsampleScript(aggregationType string) string {
|
||||
func (sr *scrutinyRepository) DownsampleScript(aggregationType string, name string, cron string) string {
|
||||
var sourceBucket string // the source of the data
|
||||
var destBucket string // the destination for the aggregated data
|
||||
var rangeStart string
|
||||
@@ -88,30 +124,37 @@ func (sr *scrutinyRepository) DownsampleScript(aggregationType string) string {
|
||||
*/
|
||||
|
||||
return fmt.Sprintf(`
|
||||
sourceBucket = "%s"
|
||||
rangeStart = %s
|
||||
rangeEnd = %s
|
||||
aggWindow = %s
|
||||
destBucket = "%s"
|
||||
destOrg = "%s"
|
||||
option task = {
|
||||
name: "%s",
|
||||
cron: "%s",
|
||||
}
|
||||
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "smart" )
|
||||
|> group(columns: ["device_wwn", "_field"])
|
||||
|> aggregateWindow(every: aggWindow, fn: last, createEmpty: false)
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
sourceBucket = "%s"
|
||||
rangeStart = %s
|
||||
rangeEnd = %s
|
||||
aggWindow = %s
|
||||
destBucket = "%s"
|
||||
destOrg = "%s"
|
||||
|
||||
temp_data = from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp")
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "smart" )
|
||||
|> group(columns: ["device_wwn", "_field"])
|
||||
|> aggregateWindow(every: aggWindow, fn: last, createEmpty: false)
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
|
||||
temp_data
|
||||
|> aggregateWindow(fn: mean, every: aggWindow)
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp")
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|> aggregateWindow(fn: mean, every: aggWindow, createEmpty: false)
|
||||
|> set(key: "_measurement", value: "temp")
|
||||
|> set(key: "_field", value: "temp")
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
`,
|
||||
name,
|
||||
cron,
|
||||
sourceBucket,
|
||||
rangeStart,
|
||||
rangeEnd,
|
||||
|
||||
@@ -0,0 +1,164 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
mock_config "github.com/analogj/scrutiny/webapp/backend/pkg/config/mock"
|
||||
"github.com/golang/mock/gomock"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test_DownsampleScript_Weekly(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
|
||||
|
||||
deviceRepo := scrutinyRepository{
|
||||
appConfig: fakeConfig,
|
||||
}
|
||||
|
||||
aggregationType := "weekly"
|
||||
|
||||
//test
|
||||
influxDbScript := deviceRepo.DownsampleScript(aggregationType, "tsk-weekly-aggr", "0 1 * * 0")
|
||||
|
||||
//assert
|
||||
require.Equal(t, `
|
||||
option task = {
|
||||
name: "tsk-weekly-aggr",
|
||||
cron: "0 1 * * 0",
|
||||
}
|
||||
|
||||
sourceBucket = "metrics"
|
||||
rangeStart = -2w
|
||||
rangeEnd = -1w
|
||||
aggWindow = 1w
|
||||
destBucket = "metrics_weekly"
|
||||
destOrg = "scrutiny"
|
||||
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "smart" )
|
||||
|> group(columns: ["device_wwn", "_field"])
|
||||
|> aggregateWindow(every: aggWindow, fn: last, createEmpty: false)
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp")
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|> aggregateWindow(fn: mean, every: aggWindow, createEmpty: false)
|
||||
|> set(key: "_measurement", value: "temp")
|
||||
|> set(key: "_field", value: "temp")
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
`, influxDbScript)
|
||||
}
|
||||
|
||||
func Test_DownsampleScript_Monthly(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
|
||||
|
||||
deviceRepo := scrutinyRepository{
|
||||
appConfig: fakeConfig,
|
||||
}
|
||||
|
||||
aggregationType := "monthly"
|
||||
|
||||
//test
|
||||
influxDbScript := deviceRepo.DownsampleScript(aggregationType, "tsk-monthly-aggr", "30 1 1 * *")
|
||||
|
||||
//assert
|
||||
require.Equal(t, `
|
||||
option task = {
|
||||
name: "tsk-monthly-aggr",
|
||||
cron: "30 1 1 * *",
|
||||
}
|
||||
|
||||
sourceBucket = "metrics_weekly"
|
||||
rangeStart = -2mo
|
||||
rangeEnd = -1mo
|
||||
aggWindow = 1mo
|
||||
destBucket = "metrics_monthly"
|
||||
destOrg = "scrutiny"
|
||||
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "smart" )
|
||||
|> group(columns: ["device_wwn", "_field"])
|
||||
|> aggregateWindow(every: aggWindow, fn: last, createEmpty: false)
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp")
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|> aggregateWindow(fn: mean, every: aggWindow, createEmpty: false)
|
||||
|> set(key: "_measurement", value: "temp")
|
||||
|> set(key: "_field", value: "temp")
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
`, influxDbScript)
|
||||
}
|
||||
|
||||
func Test_DownsampleScript_Yearly(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
|
||||
|
||||
deviceRepo := scrutinyRepository{
|
||||
appConfig: fakeConfig,
|
||||
}
|
||||
|
||||
aggregationType := "yearly"
|
||||
|
||||
//test
|
||||
influxDbScript := deviceRepo.DownsampleScript(aggregationType, "tsk-yearly-aggr", "0 2 1 1 *")
|
||||
|
||||
//assert
|
||||
require.Equal(t, `
|
||||
option task = {
|
||||
name: "tsk-yearly-aggr",
|
||||
cron: "0 2 1 1 *",
|
||||
}
|
||||
|
||||
sourceBucket = "metrics_monthly"
|
||||
rangeStart = -2y
|
||||
rangeEnd = -1y
|
||||
aggWindow = 1y
|
||||
destBucket = "metrics_yearly"
|
||||
destOrg = "scrutiny"
|
||||
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "smart" )
|
||||
|> group(columns: ["device_wwn", "_field"])
|
||||
|> aggregateWindow(every: aggWindow, fn: last, createEmpty: false)
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
|
||||
from(bucket: sourceBucket)
|
||||
|> range(start: rangeStart, stop: rangeEnd)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp")
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|> aggregateWindow(fn: mean, every: aggWindow, createEmpty: false)
|
||||
|> set(key: "_measurement", value: "temp")
|
||||
|> set(key: "_field", value: "temp")
|
||||
|> to(bucket: destBucket, org: destOrg)
|
||||
`, influxDbScript)
|
||||
}
|
||||
@@ -0,0 +1,185 @@
|
||||
package database
|
||||
|
||||
import (
|
||||
mock_config "github.com/analogj/scrutiny/webapp/backend/pkg/config/mock"
|
||||
"github.com/golang/mock/gomock"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func Test_aggregateTempQuery_Week(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
|
||||
|
||||
deviceRepo := scrutinyRepository{
|
||||
appConfig: fakeConfig,
|
||||
}
|
||||
|
||||
aggregationType := DURATION_KEY_WEEK
|
||||
|
||||
//test
|
||||
influxDbScript := deviceRepo.aggregateTempQuery(aggregationType)
|
||||
|
||||
//assert
|
||||
require.Equal(t, `import "influxdata/influxdb/schema"
|
||||
weekData = from(bucket: "metrics")
|
||||
|> range(start: -1w, stop: now())
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
weekData
|
||||
|> schema.fieldsAsCols()
|
||||
|> yield()`, influxDbScript)
|
||||
}
|
||||
|
||||
func Test_aggregateTempQuery_Month(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
|
||||
|
||||
deviceRepo := scrutinyRepository{
|
||||
appConfig: fakeConfig,
|
||||
}
|
||||
|
||||
aggregationType := DURATION_KEY_MONTH
|
||||
|
||||
//test
|
||||
influxDbScript := deviceRepo.aggregateTempQuery(aggregationType)
|
||||
|
||||
//assert
|
||||
require.Equal(t, `import "influxdata/influxdb/schema"
|
||||
weekData = from(bucket: "metrics")
|
||||
|> range(start: -1w, stop: now())
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
monthData = from(bucket: "metrics_weekly")
|
||||
|> range(start: -1mo, stop: -1w)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
union(tables: [weekData, monthData])
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> sort(columns: ["_time"], desc: false)
|
||||
|> schema.fieldsAsCols()`, influxDbScript)
|
||||
}
|
||||
|
||||
func Test_aggregateTempQuery_Year(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
|
||||
|
||||
deviceRepo := scrutinyRepository{
|
||||
appConfig: fakeConfig,
|
||||
}
|
||||
|
||||
aggregationType := DURATION_KEY_YEAR
|
||||
|
||||
//test
|
||||
influxDbScript := deviceRepo.aggregateTempQuery(aggregationType)
|
||||
|
||||
//assert
|
||||
require.Equal(t, `import "influxdata/influxdb/schema"
|
||||
weekData = from(bucket: "metrics")
|
||||
|> range(start: -1w, stop: now())
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
monthData = from(bucket: "metrics_weekly")
|
||||
|> range(start: -1mo, stop: -1w)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
yearData = from(bucket: "metrics_monthly")
|
||||
|> range(start: -1y, stop: -1mo)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
union(tables: [weekData, monthData, yearData])
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> sort(columns: ["_time"], desc: false)
|
||||
|> schema.fieldsAsCols()`, influxDbScript)
|
||||
}
|
||||
|
||||
func Test_aggregateTempQuery_Forever(t *testing.T) {
|
||||
t.Parallel()
|
||||
|
||||
//setup
|
||||
mockCtrl := gomock.NewController(t)
|
||||
defer mockCtrl.Finish()
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.org").Return("scrutiny").AnyTimes()
|
||||
|
||||
deviceRepo := scrutinyRepository{
|
||||
appConfig: fakeConfig,
|
||||
}
|
||||
|
||||
aggregationType := DURATION_KEY_FOREVER
|
||||
|
||||
//test
|
||||
influxDbScript := deviceRepo.aggregateTempQuery(aggregationType)
|
||||
|
||||
//assert
|
||||
require.Equal(t, `import "influxdata/influxdb/schema"
|
||||
weekData = from(bucket: "metrics")
|
||||
|> range(start: -1w, stop: now())
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
monthData = from(bucket: "metrics_weekly")
|
||||
|> range(start: -1mo, stop: -1w)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
yearData = from(bucket: "metrics_monthly")
|
||||
|> range(start: -1y, stop: -1mo)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
foreverData = from(bucket: "metrics_yearly")
|
||||
|> range(start: -10y, stop: -1y)
|
||||
|> filter(fn: (r) => r["_measurement"] == "temp" )
|
||||
|> aggregateWindow(every: 1h, fn: mean, createEmpty: false)
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> toInt()
|
||||
|
||||
union(tables: [weekData, monthData, yearData, foreverData])
|
||||
|> group(columns: ["device_wwn"])
|
||||
|> sort(columns: ["_time"], desc: false)
|
||||
|> schema.fieldsAsCols()`, influxDbScript)
|
||||
}
|
||||
@@ -6,7 +6,11 @@ import (
|
||||
"errors"
|
||||
"fmt"
|
||||
"github.com/analogj/go-util/utils"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/config"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/thresholds"
|
||||
"github.com/containrrr/shoutrrr"
|
||||
shoutrrrTypes "github.com/containrrr/shoutrrr/pkg/types"
|
||||
"github.com/sirupsen/logrus"
|
||||
@@ -14,28 +18,130 @@ import (
|
||||
"net/http"
|
||||
"net/url"
|
||||
"os"
|
||||
"strconv"
|
||||
"strings"
|
||||
"time"
|
||||
)
|
||||
|
||||
const NotifyFailureTypeEmailTest = "EmailTest"
|
||||
const NotifyFailureTypeSmartPrefail = "SmartPreFailure"
|
||||
const NotifyFailureTypeBothFailure = "SmartFailure" //SmartFailure always takes precedence when Scrutiny & Smart failed.
|
||||
const NotifyFailureTypeSmartFailure = "SmartFailure"
|
||||
const NotifyFailureTypeSmartErrorLog = "SmartErrorLog"
|
||||
const NotifyFailureTypeSmartSelfTest = "SmartSelfTestLog"
|
||||
const NotifyFailureTypeScrutinyFailure = "ScrutinyFailure"
|
||||
|
||||
// ShouldNotify check if the error Message should be filtered (level mismatch or filtered_attributes)
|
||||
func ShouldNotify(device models.Device, smartAttrs measurements.Smart, notifyLevel string, notifyFilterAttributes string) bool {
|
||||
// 1. check if the device is healthy
|
||||
if device.DeviceStatus == pkg.DeviceStatusPassed {
|
||||
return false
|
||||
}
|
||||
|
||||
// setup constants for comparison
|
||||
var requiredDeviceStatus pkg.DeviceStatus
|
||||
var requiredAttrStatus pkg.AttributeStatus
|
||||
if notifyLevel == pkg.NotifyLevelFail {
|
||||
// either scrutiny or smart failures should trigger an email
|
||||
requiredDeviceStatus = pkg.DeviceStatusSet(pkg.DeviceStatusFailedSmart, pkg.DeviceStatusFailedScrutiny)
|
||||
requiredAttrStatus = pkg.AttributeStatusSet(pkg.AttributeStatusFailedSmart, pkg.AttributeStatusFailedScrutiny)
|
||||
} else if notifyLevel == pkg.NotifyLevelFailSmart {
|
||||
//only smart failures
|
||||
requiredDeviceStatus = pkg.DeviceStatusFailedSmart
|
||||
requiredAttrStatus = pkg.AttributeStatusFailedSmart
|
||||
} else {
|
||||
requiredDeviceStatus = pkg.DeviceStatusFailedScrutiny
|
||||
requiredAttrStatus = pkg.AttributeStatusFailedScrutiny
|
||||
}
|
||||
|
||||
// 2. check if the attributes that are failing should be filtered (non-critical)
|
||||
// 3. for any unfiltered attribute, store the failure reason (Smart or Scrutiny)
|
||||
if notifyFilterAttributes == pkg.NotifyFilterAttributesCritical {
|
||||
hasFailingCriticalAttr := false
|
||||
var statusFailingCrtiticalAttr pkg.AttributeStatus
|
||||
|
||||
for attrId, attrData := range smartAttrs.Attributes {
|
||||
//find failing attribute
|
||||
if attrData.GetStatus() == pkg.AttributeStatusPassed {
|
||||
continue //skip all passing attributes
|
||||
}
|
||||
|
||||
// merge the status's of all critical attributes
|
||||
statusFailingCrtiticalAttr = pkg.AttributeStatusSet(statusFailingCrtiticalAttr, attrData.GetStatus())
|
||||
|
||||
//found a failing attribute, see if its critical
|
||||
if device.IsScsi() && thresholds.ScsiMetadata[attrId].Critical {
|
||||
hasFailingCriticalAttr = true
|
||||
} else if device.IsNvme() && thresholds.NmveMetadata[attrId].Critical {
|
||||
hasFailingCriticalAttr = true
|
||||
} else {
|
||||
//this is ATA
|
||||
attrIdInt, err := strconv.Atoi(attrId)
|
||||
if err != nil {
|
||||
continue
|
||||
}
|
||||
if thresholds.AtaMetadata[attrIdInt].Critical {
|
||||
hasFailingCriticalAttr = true
|
||||
}
|
||||
}
|
||||
|
||||
}
|
||||
|
||||
if !hasFailingCriticalAttr {
|
||||
//no critical attributes are failing, and notifyFilterAttributes == "critical"
|
||||
return false
|
||||
} else {
|
||||
// check if any of the critical attributes have a status that we're looking for
|
||||
return pkg.AttributeStatusHas(statusFailingCrtiticalAttr, requiredAttrStatus)
|
||||
}
|
||||
|
||||
} else {
|
||||
// 2. SKIP - we are processing every attribute.
|
||||
// 3. check if the device failure level matches the wanted failure level.
|
||||
return pkg.DeviceStatusHas(device.DeviceStatus, requiredDeviceStatus)
|
||||
}
|
||||
}
|
||||
|
||||
// TODO: include host and/or user label for device.
|
||||
type Payload struct {
|
||||
Date string `json:"date"` //populated by Send function.
|
||||
FailureType string `json:"failure_type"` //EmailTest, SmartFail, ScrutinyFail
|
||||
DeviceType string `json:"device_type"` //ATA/SCSI/NVMe
|
||||
DeviceName string `json:"device_name"` //dev/sda
|
||||
DeviceSerial string `json:"device_serial"` //WDDJ324KSO
|
||||
Test bool `json:"test"` // false
|
||||
|
||||
//should not be populated
|
||||
Subject string `json:"subject"`
|
||||
Message string `json:"message"`
|
||||
//private, populated during init (marked as Public for JSON serialization)
|
||||
Date string `json:"date"` //populated by Send function.
|
||||
FailureType string `json:"failure_type"` //EmailTest, BothFail, SmartFail, ScrutinyFail
|
||||
Subject string `json:"subject"`
|
||||
Message string `json:"message"`
|
||||
}
|
||||
|
||||
func NewPayload(device models.Device, test bool) Payload {
|
||||
payload := Payload{
|
||||
DeviceType: device.DeviceType,
|
||||
DeviceName: device.DeviceName,
|
||||
DeviceSerial: device.SerialNumber,
|
||||
Test: test,
|
||||
}
|
||||
|
||||
//validate that the Payload is populated
|
||||
sendDate := time.Now()
|
||||
payload.Date = sendDate.Format(time.RFC3339)
|
||||
payload.FailureType = payload.GenerateFailureType(device.DeviceStatus)
|
||||
payload.Subject = payload.GenerateSubject()
|
||||
payload.Message = payload.GenerateMessage()
|
||||
return payload
|
||||
}
|
||||
|
||||
func (p *Payload) GenerateFailureType(deviceStatus pkg.DeviceStatus) string {
|
||||
//generate a failure type, given Test and DeviceStatus
|
||||
if p.Test {
|
||||
return NotifyFailureTypeEmailTest // must be an email test if "Test" is true
|
||||
}
|
||||
if pkg.DeviceStatusHas(deviceStatus, pkg.DeviceStatusFailedSmart) && pkg.DeviceStatusHas(deviceStatus, pkg.DeviceStatusFailedScrutiny) {
|
||||
return NotifyFailureTypeBothFailure //both failed
|
||||
} else if pkg.DeviceStatusHas(deviceStatus, pkg.DeviceStatusFailedSmart) {
|
||||
return NotifyFailureTypeSmartFailure //only SMART failed
|
||||
} else {
|
||||
return NotifyFailureTypeScrutinyFailure //only Scrutiny failed
|
||||
}
|
||||
}
|
||||
|
||||
func (p *Payload) GenerateSubject() string {
|
||||
@@ -61,6 +167,14 @@ Date: %s`, p.DeviceName, p.FailureType, p.DeviceName, p.DeviceSerial, p.DeviceTy
|
||||
return message
|
||||
}
|
||||
|
||||
func New(logger logrus.FieldLogger, appconfig config.Interface, device models.Device, test bool) Notify {
|
||||
return Notify{
|
||||
Logger: logger,
|
||||
Config: appconfig,
|
||||
Payload: NewPayload(device, test),
|
||||
}
|
||||
}
|
||||
|
||||
type Notify struct {
|
||||
Logger logrus.FieldLogger
|
||||
Config config.Interface
|
||||
@@ -68,11 +182,6 @@ type Notify struct {
|
||||
}
|
||||
|
||||
func (n *Notify) Send() error {
|
||||
//validate that the Payload is populated
|
||||
sendDate := time.Now()
|
||||
n.Payload.Date = sendDate.Format(time.RFC3339)
|
||||
n.Payload.Subject = n.Payload.GenerateSubject()
|
||||
n.Payload.Message = n.Payload.GenerateMessage()
|
||||
|
||||
//retrieve list of notification endpoints from config file
|
||||
configUrls := n.Config.GetStringSlice("notify.urls")
|
||||
|
||||
@@ -0,0 +1,161 @@
|
||||
package notify
|
||||
|
||||
import (
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models/measurements"
|
||||
"github.com/stretchr/testify/require"
|
||||
"testing"
|
||||
)
|
||||
|
||||
func TestShouldNotify_MustSkipPassingDevices(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusPassed,
|
||||
}
|
||||
smartAttrs := measurements.Smart{}
|
||||
notifyLevel := pkg.NotifyLevelFail
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesAll
|
||||
|
||||
//assert
|
||||
require.False(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyLevelFail_FailingSmartDevice(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{}
|
||||
notifyLevel := pkg.NotifyLevelFail
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesAll
|
||||
|
||||
//assert
|
||||
require.True(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyLevelFailSmart_FailingSmartDevice(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{}
|
||||
notifyLevel := pkg.NotifyLevelFailSmart
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesAll
|
||||
|
||||
//assert
|
||||
require.True(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyLevelFailScrutiny_FailingSmartDevice(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{}
|
||||
notifyLevel := pkg.NotifyLevelFailScrutiny
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesAll
|
||||
|
||||
//assert
|
||||
require.False(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyFilterAttributesCritical_WithCriticalAttrs(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{Attributes: map[string]measurements.SmartAttribute{
|
||||
"5": &measurements.SmartAtaAttribute{
|
||||
Status: pkg.AttributeStatusFailedSmart,
|
||||
},
|
||||
}}
|
||||
notifyLevel := pkg.NotifyLevelFail
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesCritical
|
||||
|
||||
//assert
|
||||
require.True(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyFilterAttributesCritical_WithMultipleCriticalAttrs(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{Attributes: map[string]measurements.SmartAttribute{
|
||||
"5": &measurements.SmartAtaAttribute{
|
||||
Status: pkg.AttributeStatusPassed,
|
||||
},
|
||||
"10": &measurements.SmartAtaAttribute{
|
||||
Status: pkg.AttributeStatusFailedScrutiny,
|
||||
},
|
||||
}}
|
||||
notifyLevel := pkg.NotifyLevelFail
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesCritical
|
||||
|
||||
//assert
|
||||
require.True(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyFilterAttributesCritical_WithNoCriticalAttrs(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{Attributes: map[string]measurements.SmartAttribute{
|
||||
"1": &measurements.SmartAtaAttribute{
|
||||
Status: pkg.AttributeStatusFailedSmart,
|
||||
},
|
||||
}}
|
||||
notifyLevel := pkg.NotifyLevelFail
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesCritical
|
||||
|
||||
//assert
|
||||
require.False(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyFilterAttributesCritical_WithNoFailingCriticalAttrs(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{Attributes: map[string]measurements.SmartAttribute{
|
||||
"5": &measurements.SmartAtaAttribute{
|
||||
Status: pkg.AttributeStatusPassed,
|
||||
},
|
||||
}}
|
||||
notifyLevel := pkg.NotifyLevelFail
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesCritical
|
||||
|
||||
//assert
|
||||
require.False(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
|
||||
func TestShouldNotify_NotifyFilterAttributesCritical_NotifyLevelFailSmart_WithCriticalAttrsFailingScrutiny(t *testing.T) {
|
||||
t.Parallel()
|
||||
//setup
|
||||
device := models.Device{
|
||||
DeviceStatus: pkg.DeviceStatusFailedSmart,
|
||||
}
|
||||
smartAttrs := measurements.Smart{Attributes: map[string]measurements.SmartAttribute{
|
||||
"5": &measurements.SmartAtaAttribute{
|
||||
Status: pkg.AttributeStatusPassed,
|
||||
},
|
||||
"10": &measurements.SmartAtaAttribute{
|
||||
Status: pkg.AttributeStatusFailedScrutiny,
|
||||
},
|
||||
}}
|
||||
notifyLevel := pkg.NotifyLevelFailSmart
|
||||
notifyFilterAttributes := pkg.NotifyFilterAttributesCritical
|
||||
|
||||
//assert
|
||||
require.False(t, ShouldNotify(device, smartAttrs, notifyLevel, notifyFilterAttributes))
|
||||
}
|
||||
@@ -36,56 +36,6 @@ var AtaMetadata = map[int]AtaAttributeMetadata{
|
||||
Ideal: ObservedThresholdIdealLow,
|
||||
Critical: false,
|
||||
Description: "(Vendor specific raw value.) Stores data related to the rate of hardware read errors that occurred when reading data from a disk surface. The raw value has different structure for different vendors and is often not meaningful as a decimal number.",
|
||||
ObservedThresholds: []ObservedThreshold{
|
||||
{
|
||||
Low: 80,
|
||||
High: 95,
|
||||
AnnualFailureRate: 0.8879749768303985,
|
||||
ErrorInterval: []float64{0.682344353388663, 1.136105732920724},
|
||||
},
|
||||
{
|
||||
Low: 95,
|
||||
High: 110,
|
||||
AnnualFailureRate: 0.034155719633986996,
|
||||
ErrorInterval: []float64{0.030188482024981093, 0.038499386872354435},
|
||||
},
|
||||
{
|
||||
Low: 110,
|
||||
High: 125,
|
||||
AnnualFailureRate: 0.06390002135229157,
|
||||
ErrorInterval: []float64{0.05852004676110847, 0.06964160930553712},
|
||||
},
|
||||
{
|
||||
Low: 125,
|
||||
High: 140,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 140,
|
||||
High: 155,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 155,
|
||||
High: 170,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 170,
|
||||
High: 185,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 185,
|
||||
High: 200,
|
||||
AnnualFailureRate: 0.044823775021490854,
|
||||
ErrorInterval: []float64{0.032022762038723306, 0.06103725943096589},
|
||||
},
|
||||
},
|
||||
},
|
||||
2: {
|
||||
ID: 2,
|
||||
@@ -290,56 +240,6 @@ var AtaMetadata = map[int]AtaAttributeMetadata{
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "(Vendor specific raw value.) Rate of seek errors of the magnetic heads. If there is a partial failure in the mechanical positioning system, then seek errors will arise. Such a failure may be due to numerous factors, such as damage to a servo, or thermal widening of the hard disk. The raw value has different structure for different vendors and is often not meaningful as a decimal number.",
|
||||
ObservedThresholds: []ObservedThreshold{
|
||||
{
|
||||
Low: 58,
|
||||
High: 76,
|
||||
AnnualFailureRate: 0.2040131025936549,
|
||||
ErrorInterval: []float64{0.17032852883286412, 0.2424096283327138},
|
||||
},
|
||||
{
|
||||
Low: 76,
|
||||
High: 94,
|
||||
AnnualFailureRate: 0.08725919610118257,
|
||||
ErrorInterval: []float64{0.08077138510999876, 0.09412943212007528},
|
||||
},
|
||||
{
|
||||
Low: 94,
|
||||
High: 112,
|
||||
AnnualFailureRate: 0.01087335627722523,
|
||||
ErrorInterval: []float64{0.008732197944943352, 0.013380600544561905},
|
||||
},
|
||||
{
|
||||
Low: 112,
|
||||
High: 130,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 130,
|
||||
High: 148,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 148,
|
||||
High: 166,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 166,
|
||||
High: 184,
|
||||
AnnualFailureRate: 0,
|
||||
ErrorInterval: []float64{0, 0},
|
||||
},
|
||||
{
|
||||
Low: 184,
|
||||
High: 202,
|
||||
AnnualFailureRate: 0.05316285755900475,
|
||||
ErrorInterval: []float64{0.03370069132942804, 0.07977038905848267},
|
||||
},
|
||||
},
|
||||
},
|
||||
8: {
|
||||
ID: 8,
|
||||
|
||||
@@ -19,7 +19,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "low",
|
||||
Critical: true,
|
||||
Description: "",
|
||||
Description: "The grown defect count shows the amount of swapped (defective) blocks since the drive was shipped by it's vendor. Each additional defective block increases the count by one.",
|
||||
},
|
||||
"read_errors_corrected_by_eccfast": {
|
||||
ID: "read_errors_corrected_by_eccfast",
|
||||
@@ -27,7 +27,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "An error correction was applied to get perfect data (a.k.a. ECC on-the-fly). \"Without substantial delay\" means the correction did not postpone reading of later sectors (e.g. a revolution was not lost). The counter is incremented once for each logical block that requires correction. Two different blocks corrected during the same command are counted as two events.",
|
||||
},
|
||||
"read_errors_corrected_by_eccdelayed": {
|
||||
ID: "read_errors_corrected_by_eccdelayed",
|
||||
@@ -35,7 +35,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "An error code or algorithm (e.g. ECC, checksum) is applied in order to get perfect data with substantial delay. \"With possible delay\" means the correction took longer than a sector time so that reading/writing of subsequent sectors was delayed (e.g. a lost revolution). The counter is incremented once for each logical block that requires correction. A block with a double error that is correctable counts as one event and two different blocks corrected during the same command count as two events. ",
|
||||
},
|
||||
"read_errors_corrected_by_rereads_rewrites": {
|
||||
ID: "read_errors_corrected_by_rereads_rewrites",
|
||||
@@ -43,7 +43,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "low",
|
||||
Critical: true,
|
||||
Description: "",
|
||||
Description: "This parameter code specifies the counter counting the number of errors that are corrected by applying retries. This counts errors recovered, not the number of retries. If five retries were required to recover one block of data, the counter increments by one, not five. The counter is incremented once for each logical block that is recovered using retries. If an error is not recoverable while applying retries and is recovered by ECC, it isn't counted by this counter; it will be counted by the counter specified by parameter code 01h - Errors Corrected With Possible Delays. ",
|
||||
},
|
||||
"read_total_errors_corrected": {
|
||||
ID: "read_total_errors_corrected",
|
||||
@@ -51,7 +51,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "This counter counts the total of parameter code errors 00h, 01h and 02h (i.e. error corrected by ECC: fast and delayed plus errors corrected by rereads and rewrites). There is no \"double counting\" of data errors among these three counters. The sum of all correctable errors can be reached by adding parameter code 01h and 02h errors, not by using this total.",
|
||||
},
|
||||
"read_correction_algorithm_invocations": {
|
||||
ID: "read_correction_algorithm_invocations",
|
||||
@@ -59,7 +59,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "This parameter code specifies the counter that counts the total number of retries, or \"times the retry algorithm is invoked\". If after five attempts a counter 02h type error is recovered, then five is added to this counter. If three retries are required to get stable ECC syndrome before a counter 01h type error is corrected, then those three retries are also counted here. The number of retries applied to unsuccessfully recover an error (counter 06h type error) are also counted by this counter. ",
|
||||
},
|
||||
"read_total_uncorrected_errors": {
|
||||
ID: "read_total_uncorrected_errors",
|
||||
@@ -67,7 +67,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "low",
|
||||
Critical: true,
|
||||
Description: "",
|
||||
Description: "This parameter code specifies the counter that contains the total number of blocks for which an uncorrected data error has occurred. ",
|
||||
},
|
||||
"write_errors_corrected_by_eccfast": {
|
||||
ID: "write_errors_corrected_by_eccfast",
|
||||
@@ -75,7 +75,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "An error correction was applied to get perfect data (a.k.a. ECC on-the-fly). \"Without substantial delay\" means the correction did not postpone reading of later sectors (e.g. a revolution was not lost). The counter is incremented once for each logical block that requires correction. Two different blocks corrected during the same command are counted as two events. ",
|
||||
},
|
||||
"write_errors_corrected_by_eccdelayed": {
|
||||
ID: "write_errors_corrected_by_eccdelayed",
|
||||
@@ -83,7 +83,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "An error code or algorithm (e.g. ECC, checksum) is applied in order to get perfect data with substantial delay. \"With possible delay\" means the correction took longer than a sector time so that reading/writing of subsequent sectors was delayed (e.g. a lost revolution). The counter is incremented once for each logical block that requires correction. A block with a double error that is correctable counts as one event and two different blocks corrected during the same command count as two events. ",
|
||||
},
|
||||
"write_errors_corrected_by_rereads_rewrites": {
|
||||
ID: "write_errors_corrected_by_rereads_rewrites",
|
||||
@@ -91,7 +91,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "low",
|
||||
Critical: true,
|
||||
Description: "",
|
||||
Description: "This parameter code specifies the counter counting the number of errors that are corrected by applying retries. This counts errors recovered, not the number of retries. If five retries were required to recover one block of data, the counter increments by one, not five. The counter is incremented once for each logical block that is recovered using retries. If an error is not recoverable while applying retries and is recovered by ECC, it isn't counted by this counter; it will be counted by the counter specified by parameter code 01h - Errors Corrected With Possible Delays.",
|
||||
},
|
||||
"write_total_errors_corrected": {
|
||||
ID: "write_total_errors_corrected",
|
||||
@@ -99,7 +99,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "This counter counts the total of parameter code errors 00h, 01h and 02h (i.e. error corrected by ECC: fast and delayed plus errors corrected by rereads and rewrites). There is no \"double counting\" of data errors among these three counters. The sum of all correctable errors can be reached by adding parameter code 01h and 02h errors, not by using this total.",
|
||||
},
|
||||
"write_correction_algorithm_invocations": {
|
||||
ID: "write_correction_algorithm_invocations",
|
||||
@@ -107,7 +107,7 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "",
|
||||
Critical: false,
|
||||
Description: "",
|
||||
Description: "This parameter code specifies the counter that counts the total number of retries, or \"times the retry algorithm is invoked\". If after five attempts a counter 02h type error is recovered, then five is added to this counter. If three retries are required to get stable ECC syndrome before a counter 01h type error is corrected, then those three retries are also counted here. The number of retries applied to unsuccessfully recover an error (counter 06h type error) are also counted by this counter. ",
|
||||
},
|
||||
"write_total_uncorrected_errors": {
|
||||
ID: "write_total_uncorrected_errors",
|
||||
@@ -115,6 +115,6 @@ var ScsiMetadata = map[string]ScsiAttributeMetadata{
|
||||
DisplayType: "",
|
||||
Ideal: "low",
|
||||
Critical: true,
|
||||
Description: "",
|
||||
Description: " This parameter code specifies the counter that contains the total number of blocks for which an uncorrected data error has occurred.",
|
||||
},
|
||||
}
|
||||
|
||||
@@ -2,4 +2,4 @@ package version
|
||||
|
||||
// VERSION is the app-global version string, which will be replaced with a
|
||||
// new value during packaging
|
||||
const VERSION = "0.4.9"
|
||||
const VERSION = "0.4.16"
|
||||
|
||||
@@ -18,6 +18,7 @@ func GetDevicesSummary(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
//this must match DeviceSummaryWrapper (webapp/backend/pkg/models/device_summary.go)
|
||||
c.JSON(http.StatusOK, gin.H{
|
||||
"success": true,
|
||||
"data": map[string]interface{}{
|
||||
|
||||
@@ -4,6 +4,7 @@ import (
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/database"
|
||||
"github.com/analogj/scrutiny/webapp/backend/pkg/models"
|
||||
"github.com/gin-gonic/gin"
|
||||
"github.com/samber/lo"
|
||||
"github.com/sirupsen/logrus"
|
||||
"net/http"
|
||||
)
|
||||
@@ -22,8 +23,13 @@ func RegisterDevices(c *gin.Context) {
|
||||
return
|
||||
}
|
||||
|
||||
//filter any device with empty wwn (they are invalid)
|
||||
detectedStorageDevices := lo.Filter[models.Device](collectorDeviceWrapper.Data, func(dev models.Device, _ int) bool {
|
||||
return len(dev.WWN) > 0
|
||||
})
|
||||
|
||||
errs := []error{}
|
||||
for _, dev := range collectorDeviceWrapper.Data {
|
||||
for _, dev := range detectedStorageDevices {
|
||||
//insert devices into DB (and update specified columns if device is already registered)
|
||||
// update device fields that may change: (DeviceType, HostID)
|
||||
if err := deviceRepo.RegisterDevice(c, dev); err != nil {
|
||||
@@ -40,7 +46,7 @@ func RegisterDevices(c *gin.Context) {
|
||||
} else {
|
||||
c.JSON(http.StatusOK, models.DeviceWrapper{
|
||||
Success: true,
|
||||
Data: collectorDeviceWrapper.Data,
|
||||
Data: detectedStorageDevices,
|
||||
})
|
||||
return
|
||||
}
|
||||
|
||||
@@ -15,17 +15,16 @@ func SendTestNotification(c *gin.Context) {
|
||||
appConfig := c.MustGet("CONFIG").(config.Interface)
|
||||
logger := c.MustGet("LOGGER").(logrus.FieldLogger)
|
||||
|
||||
testNotify := notify.Notify{
|
||||
Logger: logger,
|
||||
Config: appConfig,
|
||||
Payload: notify.Payload{
|
||||
FailureType: "EmailTest",
|
||||
DeviceSerial: "FAKEWDDJ324KSO",
|
||||
testNotify := notify.New(
|
||||
logger,
|
||||
appConfig,
|
||||
models.Device{
|
||||
SerialNumber: "FAKEWDDJ324KSO",
|
||||
DeviceType: pkg.DeviceProtocolAta,
|
||||
DeviceName: "/dev/sda",
|
||||
Test: true,
|
||||
},
|
||||
}
|
||||
true,
|
||||
)
|
||||
err := testNotify.Send()
|
||||
if err != nil {
|
||||
logger.Errorln("An error occurred while sending test notification", err)
|
||||
|
||||
@@ -20,6 +20,10 @@ func UploadDeviceMetrics(c *gin.Context) {
|
||||
|
||||
//appConfig := c.MustGet("CONFIG").(config.Interface)
|
||||
|
||||
if c.Param("wwn") == "" {
|
||||
c.JSON(http.StatusBadRequest, gin.H{"success": false})
|
||||
}
|
||||
|
||||
var collectorSmartData collector.SmartInfo
|
||||
err := c.BindJSON(&collectorSmartData)
|
||||
if err != nil {
|
||||
@@ -63,20 +67,16 @@ func UploadDeviceMetrics(c *gin.Context) {
|
||||
}
|
||||
|
||||
//check for error
|
||||
if updatedDevice.DeviceStatus != pkg.DeviceStatusPassed {
|
||||
if notify.ShouldNotify(updatedDevice, smartData, appConfig.GetString("notify.level"), appConfig.GetString("notify.filter_attributes")) {
|
||||
//send notifications
|
||||
testNotify := notify.Notify{
|
||||
Config: appConfig,
|
||||
Payload: notify.Payload{
|
||||
FailureType: notify.NotifyFailureTypeSmartFailure,
|
||||
DeviceName: updatedDevice.DeviceName,
|
||||
DeviceType: updatedDevice.DeviceProtocol,
|
||||
DeviceSerial: updatedDevice.SerialNumber,
|
||||
Test: false,
|
||||
},
|
||||
Logger: logger,
|
||||
}
|
||||
_ = testNotify.Send() //we ignore error message when sending notifications.
|
||||
|
||||
liveNotify := notify.New(
|
||||
logger,
|
||||
appConfig,
|
||||
updatedDevice,
|
||||
false,
|
||||
)
|
||||
_ = liveNotify.Send() //we ignore error message when sending notifications.
|
||||
}
|
||||
|
||||
c.JSON(http.StatusOK, gin.H{"success": true})
|
||||
|
||||
@@ -186,6 +186,8 @@ func (suite *ServerTestSuite) TestUploadDeviceMetricsRoute() {
|
||||
} else {
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.host").Return("localhost").AnyTimes()
|
||||
}
|
||||
fakeConfig.EXPECT().GetString("notify.level").AnyTimes().Return(pkg.NotifyLevelFail)
|
||||
fakeConfig.EXPECT().GetString("notify.filter_attributes").AnyTimes().Return(pkg.NotifyFilterAttributesAll)
|
||||
|
||||
ae := web.AppEngine{
|
||||
Config: fakeConfig,
|
||||
@@ -219,6 +221,8 @@ func (suite *ServerTestSuite) TestPopulateMultiple() {
|
||||
fakeConfig := mock_config.NewMockInterface(mockCtrl)
|
||||
//fakeConfig.EXPECT().GetString("web.database.location").AnyTimes().Return("testdata/scrutiny_test.db")
|
||||
fakeConfig.EXPECT().GetStringSlice("notify.urls").Return([]string{}).AnyTimes()
|
||||
fakeConfig.EXPECT().GetString("notify.level").AnyTimes().Return(pkg.NotifyLevelFail)
|
||||
fakeConfig.EXPECT().GetString("notify.filter_attributes").AnyTimes().Return(pkg.NotifyFilterAttributesAll)
|
||||
fakeConfig.EXPECT().GetString("web.database.location").AnyTimes().Return(path.Join(parentPath, "scrutiny_test.db"))
|
||||
fakeConfig.EXPECT().GetString("web.src.frontend.path").AnyTimes().Return(parentPath)
|
||||
fakeConfig.EXPECT().GetString("web.listen.basepath").Return(suite.Basepath).AnyTimes()
|
||||
@@ -326,6 +330,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_WebhookFailure() {
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetBool("web.influxdb.retention_policy").Return(false).AnyTimes()
|
||||
fakeConfig.EXPECT().GetStringSlice("notify.urls").AnyTimes().Return([]string{"https://unroutable.domain.example.asdfghj"})
|
||||
fakeConfig.EXPECT().GetString("notify.level").AnyTimes().Return(pkg.NotifyLevelFail)
|
||||
fakeConfig.EXPECT().GetString("notify.filter_attributes").AnyTimes().Return(pkg.NotifyFilterAttributesAll)
|
||||
|
||||
if _, isGithubActions := os.LookupEnv("GITHUB_ACTIONS"); isGithubActions {
|
||||
// when running test suite in github actions, we run an influxdb service as a sidecar.
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.host").Return("influxdb").AnyTimes()
|
||||
@@ -365,6 +372,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_ScriptFailure() {
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetBool("web.influxdb.retention_policy").Return(false).AnyTimes()
|
||||
fakeConfig.EXPECT().GetStringSlice("notify.urls").AnyTimes().Return([]string{"script:///missing/path/on/disk"})
|
||||
fakeConfig.EXPECT().GetString("notify.level").AnyTimes().Return(pkg.NotifyLevelFail)
|
||||
fakeConfig.EXPECT().GetString("notify.filter_attributes").AnyTimes().Return(pkg.NotifyFilterAttributesAll)
|
||||
|
||||
if _, isGithubActions := os.LookupEnv("GITHUB_ACTIONS"); isGithubActions {
|
||||
// when running test suite in github actions, we run an influxdb service as a sidecar.
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.host").Return("influxdb").AnyTimes()
|
||||
@@ -404,6 +414,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_ScriptSuccess() {
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetBool("web.influxdb.retention_policy").Return(false).AnyTimes()
|
||||
fakeConfig.EXPECT().GetStringSlice("notify.urls").AnyTimes().Return([]string{"script:///usr/bin/env"})
|
||||
fakeConfig.EXPECT().GetString("notify.level").AnyTimes().Return(pkg.NotifyLevelFail)
|
||||
fakeConfig.EXPECT().GetString("notify.filter_attributes").AnyTimes().Return(pkg.NotifyFilterAttributesAll)
|
||||
|
||||
if _, isGithubActions := os.LookupEnv("GITHUB_ACTIONS"); isGithubActions {
|
||||
// when running test suite in github actions, we run an influxdb service as a sidecar.
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.host").Return("influxdb").AnyTimes()
|
||||
@@ -443,6 +456,9 @@ func (suite *ServerTestSuite) TestSendTestNotificationRoute_ShoutrrrFailure() {
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetBool("web.influxdb.retention_policy").Return(false).AnyTimes()
|
||||
fakeConfig.EXPECT().GetStringSlice("notify.urls").AnyTimes().Return([]string{"discord://invalidtoken@channel"})
|
||||
fakeConfig.EXPECT().GetString("notify.level").AnyTimes().Return(pkg.NotifyLevelFail)
|
||||
fakeConfig.EXPECT().GetString("notify.filter_attributes").AnyTimes().Return(pkg.NotifyFilterAttributesAll)
|
||||
|
||||
if _, isGithubActions := os.LookupEnv("GITHUB_ACTIONS"); isGithubActions {
|
||||
// when running test suite in github actions, we run an influxdb service as a sidecar.
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.host").Return("influxdb").AnyTimes()
|
||||
@@ -481,6 +497,8 @@ func (suite *ServerTestSuite) TestGetDevicesSummaryRoute_Nvme() {
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.bucket").Return("metrics").AnyTimes()
|
||||
fakeConfig.EXPECT().GetBool("web.influxdb.retention_policy").Return(false).AnyTimes()
|
||||
fakeConfig.EXPECT().GetStringSlice("notify.urls").AnyTimes().Return([]string{})
|
||||
fakeConfig.EXPECT().GetString("notify.level").AnyTimes().Return(pkg.NotifyLevelFail)
|
||||
fakeConfig.EXPECT().GetString("notify.filter_attributes").AnyTimes().Return(pkg.NotifyFilterAttributesAll)
|
||||
if _, isGithubActions := os.LookupEnv("GITHUB_ACTIONS"); isGithubActions {
|
||||
// when running test suite in github actions, we run an influxdb service as a sidecar.
|
||||
fakeConfig.EXPECT().GetString("web.influxdb.host").Return("influxdb").AnyTimes()
|
||||
|
||||
@@ -46,3 +46,5 @@ testem.log
|
||||
Thumbs.db
|
||||
|
||||
/dist
|
||||
|
||||
/coverage
|
||||
|
||||
@@ -91,6 +91,7 @@
|
||||
},
|
||||
"test": {
|
||||
"builder": "@angular-devkit/build-angular:karma",
|
||||
"defaultConfiguration": "production",
|
||||
"options": {
|
||||
"main": "src/test.ts",
|
||||
"polyfills": "src/polyfills.ts",
|
||||
@@ -101,10 +102,22 @@
|
||||
"src/favicon-32x32.png",
|
||||
"src/assets"
|
||||
],
|
||||
"stylePreprocessorOptions": {
|
||||
"includePaths": [
|
||||
"src/@treo/styles"
|
||||
]
|
||||
},
|
||||
"styles": [
|
||||
"src/styles.scss"
|
||||
"src/styles/vendors.scss",
|
||||
"src/@treo/styles/main.scss",
|
||||
"src/styles/styles.scss",
|
||||
"src/styles/tailwind.scss"
|
||||
],
|
||||
"scripts": []
|
||||
"scripts": [],
|
||||
"fileReplacements": [{
|
||||
"replace": "src/environments/environment.ts",
|
||||
"with": "src/environments/environment.prod.ts"
|
||||
}]
|
||||
}
|
||||
},
|
||||
"lint": {
|
||||
|
||||
@@ -17,8 +17,8 @@ module.exports = function (config)
|
||||
clearContext: false // leave Jasmine Spec Runner output visible in browser
|
||||
},
|
||||
coverageIstanbulReporter: {
|
||||
dir : require('path').join(__dirname, './coverage/treo'),
|
||||
reports : ['html', 'lcovonly', 'text-summary'],
|
||||
dir: require('path').join(__dirname, './coverage'),
|
||||
reports: ['html', 'lcovonly', 'text-summary'],
|
||||
fixWebpackSourcePaths: true
|
||||
},
|
||||
reporters : ['progress', 'kjhtml'],
|
||||
|
||||
Generated
+20
-20
@@ -29,13 +29,13 @@
|
||||
"@fullcalendar/rrule": "4.4.0",
|
||||
"@fullcalendar/timegrid": "4.4.0",
|
||||
"@types/humanize-duration": "^3.18.1",
|
||||
"apexcharts": "3.19.0",
|
||||
"apexcharts": "3.19.2",
|
||||
"crypto-js": "3.3.0",
|
||||
"highlight.js": "10.0.1",
|
||||
"humanize-duration": "^3.24.0",
|
||||
"lodash": "4.17.15",
|
||||
"moment": "2.24.0",
|
||||
"ng-apexcharts": "1.2.3",
|
||||
"ng-apexcharts": "1.5.12",
|
||||
"ngx-markdown": "9.0.0",
|
||||
"ngx-quill": "9.1.0",
|
||||
"perfect-scrollbar": "1.5.0",
|
||||
@@ -2964,9 +2964,9 @@
|
||||
}
|
||||
},
|
||||
"node_modules/apexcharts": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/apexcharts/-/apexcharts-3.19.0.tgz",
|
||||
"integrity": "sha512-fzupCGVDvOoU6kEzguLAfgRgrlHynHM5fnkkyCL85tYf9U8bw1hCijs4A+kWXurC/SNytJrArBc21kA/2wuHYg==",
|
||||
"version": "3.19.2",
|
||||
"resolved": "https://registry.npmjs.org/apexcharts/-/apexcharts-3.19.2.tgz",
|
||||
"integrity": "sha512-hMFLRE2Lyx4WrN9pYfQLvBDcn+HOodZrqRwc+kucxM+hcUmI2NHY4z+GI14+VcSFmD4aKiMbS3z3Q2jiBxUrcg==",
|
||||
"dependencies": {
|
||||
"svg.draggable.js": "^2.2.2",
|
||||
"svg.easing.js": "^2.0.0",
|
||||
@@ -9958,17 +9958,17 @@
|
||||
"dev": true
|
||||
},
|
||||
"node_modules/ng-apexcharts": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/ng-apexcharts/-/ng-apexcharts-1.2.3.tgz",
|
||||
"integrity": "sha512-4G+JRCWp8uSSBJKvYP9vKHEZIC0w6YuRLasumZS35fCCc7bzLY+L907n8khG9Xeoo4LBt7pVbmjb9P+lSWs/5g==",
|
||||
"version": "1.5.12",
|
||||
"resolved": "https://registry.npmjs.org/ng-apexcharts/-/ng-apexcharts-1.5.12.tgz",
|
||||
"integrity": "sha512-k82AdWNbZs5yqGCjiX7PGS11Cy1+1Oo/RGt2lT89xReD9N9Vvo1t34p1dmzS+U6W5wOFlLEKKVLGNQqENW8cTQ==",
|
||||
"dependencies": {
|
||||
"tslib": "^1.9.0"
|
||||
"tslib": "^1.10.0"
|
||||
},
|
||||
"peerDependencies": {
|
||||
"@angular/common": "^8.0.0",
|
||||
"@angular/core": "^8.0.0",
|
||||
"apexcharts": "^3.11.2",
|
||||
"rxjs": "^6.4.0"
|
||||
"@angular/common": ">=9.0.0 <13.0.0",
|
||||
"@angular/core": ">=9.0.0 <13.0.0",
|
||||
"apexcharts": "^3.19.2",
|
||||
"rxjs": "^6.5.5"
|
||||
}
|
||||
},
|
||||
"node_modules/ngx-markdown": {
|
||||
@@ -20708,9 +20708,9 @@
|
||||
}
|
||||
},
|
||||
"apexcharts": {
|
||||
"version": "3.19.0",
|
||||
"resolved": "https://registry.npmjs.org/apexcharts/-/apexcharts-3.19.0.tgz",
|
||||
"integrity": "sha512-fzupCGVDvOoU6kEzguLAfgRgrlHynHM5fnkkyCL85tYf9U8bw1hCijs4A+kWXurC/SNytJrArBc21kA/2wuHYg==",
|
||||
"version": "3.19.2",
|
||||
"resolved": "https://registry.npmjs.org/apexcharts/-/apexcharts-3.19.2.tgz",
|
||||
"integrity": "sha512-hMFLRE2Lyx4WrN9pYfQLvBDcn+HOodZrqRwc+kucxM+hcUmI2NHY4z+GI14+VcSFmD4aKiMbS3z3Q2jiBxUrcg==",
|
||||
"requires": {
|
||||
"svg.draggable.js": "^2.2.2",
|
||||
"svg.easing.js": "^2.0.0",
|
||||
@@ -26486,11 +26486,11 @@
|
||||
"dev": true
|
||||
},
|
||||
"ng-apexcharts": {
|
||||
"version": "1.2.3",
|
||||
"resolved": "https://registry.npmjs.org/ng-apexcharts/-/ng-apexcharts-1.2.3.tgz",
|
||||
"integrity": "sha512-4G+JRCWp8uSSBJKvYP9vKHEZIC0w6YuRLasumZS35fCCc7bzLY+L907n8khG9Xeoo4LBt7pVbmjb9P+lSWs/5g==",
|
||||
"version": "1.5.12",
|
||||
"resolved": "https://registry.npmjs.org/ng-apexcharts/-/ng-apexcharts-1.5.12.tgz",
|
||||
"integrity": "sha512-k82AdWNbZs5yqGCjiX7PGS11Cy1+1Oo/RGt2lT89xReD9N9Vvo1t34p1dmzS+U6W5wOFlLEKKVLGNQqENW8cTQ==",
|
||||
"requires": {
|
||||
"tslib": "^1.9.0"
|
||||
"tslib": "^1.10.0"
|
||||
}
|
||||
},
|
||||
"ngx-markdown": {
|
||||
|
||||
@@ -40,13 +40,13 @@
|
||||
"@fullcalendar/rrule": "4.4.0",
|
||||
"@fullcalendar/timegrid": "4.4.0",
|
||||
"@types/humanize-duration": "^3.18.1",
|
||||
"apexcharts": "3.19.0",
|
||||
"apexcharts": "3.19.2",
|
||||
"crypto-js": "3.3.0",
|
||||
"highlight.js": "10.0.1",
|
||||
"humanize-duration": "^3.24.0",
|
||||
"lodash": "4.17.15",
|
||||
"moment": "2.24.0",
|
||||
"ng-apexcharts": "1.2.3",
|
||||
"ng-apexcharts": "1.5.12",
|
||||
"ngx-markdown": "9.0.0",
|
||||
"ngx-quill": "9.1.0",
|
||||
"perfect-scrollbar": "1.5.0",
|
||||
|
||||
@@ -21,11 +21,11 @@ export class TreoConfigService
|
||||
{
|
||||
let currentScrutinyConfig = defaultConfig
|
||||
|
||||
let localConfigStr = localStorage.getItem(SCRUTINY_CONFIG_LOCAL_STORAGE_KEY)
|
||||
const localConfigStr = localStorage.getItem(SCRUTINY_CONFIG_LOCAL_STORAGE_KEY)
|
||||
if (localConfigStr){
|
||||
//check localstorage for a value
|
||||
let localConfig = JSON.parse(localConfigStr)
|
||||
currentScrutinyConfig = Object.assign({}, localConfig, currentScrutinyConfig) // make sure defaults are available if missing from localStorage.
|
||||
// check localstorage for a value
|
||||
const localConfig = JSON.parse(localConfigStr)
|
||||
currentScrutinyConfig = Object.assign({}, currentScrutinyConfig, localConfig) // make sure defaults are available if missing from localStorage.
|
||||
}
|
||||
// Set the private defaults
|
||||
this._config = new BehaviorSubject(currentScrutinyConfig);
|
||||
@@ -38,20 +38,20 @@ export class TreoConfigService
|
||||
/**
|
||||
* Setter and getter for config
|
||||
*/
|
||||
//Setter
|
||||
// Setter
|
||||
set config(value: any)
|
||||
{
|
||||
// Merge the new config over to the current config
|
||||
let config = _.merge({}, this._config.getValue(), value);
|
||||
const config = _.merge({}, this._config.getValue(), value);
|
||||
|
||||
//Store the config in localstorage
|
||||
// Store the config in localstorage
|
||||
localStorage.setItem(SCRUTINY_CONFIG_LOCAL_STORAGE_KEY, JSON.stringify(config));
|
||||
|
||||
// Execute the observable
|
||||
this._config.next(config);
|
||||
}
|
||||
|
||||
//Getter
|
||||
// Getter
|
||||
get config$(): Observable<any>
|
||||
{
|
||||
return this._config.asObservable();
|
||||
|
||||
@@ -1,22 +1,28 @@
|
||||
import { Layout } from "app/layout/layout.types";
|
||||
import {Layout} from 'app/layout/layout.types';
|
||||
|
||||
// Theme type
|
||||
export type Theme = "light" | "dark" | "system";
|
||||
export type Theme = 'light' | 'dark' | 'system';
|
||||
|
||||
// Device title to display on the dashboard
|
||||
export type DashboardDisplay = 'name' | 'serial_id' | 'uuid' | 'label'
|
||||
|
||||
export type DashboardSort = 'status' | 'title' | 'age'
|
||||
|
||||
export type TemperatureUnit = 'celsius' | 'fahrenheit'
|
||||
|
||||
/**
|
||||
* AppConfig interface. Update this interface to strictly type your config
|
||||
* object.
|
||||
*/
|
||||
export interface AppConfig
|
||||
{
|
||||
export interface AppConfig {
|
||||
theme: Theme;
|
||||
layout: Layout;
|
||||
|
||||
// Dashboard options
|
||||
dashboardDisplay: string;
|
||||
dashboardSort: string;
|
||||
dashboardDisplay: DashboardDisplay;
|
||||
dashboardSort: DashboardSort;
|
||||
|
||||
temperatureUnit: string;
|
||||
temperatureUnit: TemperatureUnit;
|
||||
}
|
||||
|
||||
/**
|
||||
@@ -28,12 +34,12 @@ export interface AppConfig
|
||||
* "ConfigService".
|
||||
*/
|
||||
export const appConfig: AppConfig = {
|
||||
theme : "light",
|
||||
layout: "material",
|
||||
theme : 'light',
|
||||
layout: 'material',
|
||||
|
||||
dashboardDisplay: "name",
|
||||
dashboardSort: "status",
|
||||
dashboardDisplay: 'name',
|
||||
dashboardSort: 'status',
|
||||
|
||||
temperatureUnit: "celsius",
|
||||
temperatureUnit: 'celsius',
|
||||
};
|
||||
|
||||
|
||||
@@ -0,0 +1,14 @@
|
||||
import {DeviceModel} from 'app/core/models/device-model';
|
||||
import {SmartModel} from 'app/core/models/measurements/smart-model';
|
||||
import {AttributeMetadataModel} from 'app/core/models/thresholds/attribute-metadata-model';
|
||||
|
||||
// maps to webapp/backend/pkg/models/device_summary.go
|
||||
export interface DeviceDetailsResponseWrapper {
|
||||
success: boolean;
|
||||
errors?: any[];
|
||||
data: {
|
||||
device: DeviceModel;
|
||||
smart_results: SmartModel[];
|
||||
},
|
||||
metadata: { [key: string]: AttributeMetadataModel } | { [key: number]: AttributeMetadataModel };
|
||||
}
|
||||
@@ -0,0 +1,26 @@
|
||||
// maps to webapp/backend/pkg/models/device.go
|
||||
export interface DeviceModel {
|
||||
wwn: string;
|
||||
device_name?: string;
|
||||
device_uuid?: string;
|
||||
device_serial_id?: string;
|
||||
device_label?: string;
|
||||
|
||||
manufacturer: string;
|
||||
model_name: string;
|
||||
interface_type: string;
|
||||
interface_speed: string;
|
||||
serial_number: string;
|
||||
firmware: string;
|
||||
rotational_speed: number;
|
||||
capacity: number;
|
||||
form_factor: string;
|
||||
smart_support: boolean;
|
||||
device_protocol: string;
|
||||
device_type: string;
|
||||
|
||||
label: string;
|
||||
host_id: string;
|
||||
|
||||
device_status: number;
|
||||
}
|
||||
@@ -0,0 +1,16 @@
|
||||
import {DeviceModel} from 'app/core/models/device-model';
|
||||
import {SmartTemperatureModel} from 'app/core/models/measurements/smart-temperature-model';
|
||||
|
||||
// maps to webapp/backend/pkg/models/device_summary.go
|
||||
export interface DeviceSummaryModel {
|
||||
device: DeviceModel;
|
||||
smart?: SmartSummary;
|
||||
temp_history?: SmartTemperatureModel[];
|
||||
}
|
||||
|
||||
export interface SmartSummary {
|
||||
collector_date?: string,
|
||||
temp?: number
|
||||
power_on_hours?: number
|
||||
}
|
||||
|
||||
@@ -0,0 +1,10 @@
|
||||
import {DeviceSummaryModel} from 'app/core/models/device-summary-model';
|
||||
|
||||
// maps to webapp/backend/pkg/models/device_summary.go
|
||||
export interface DeviceSummaryResponseWrapper {
|
||||
success: boolean;
|
||||
errors: any[];
|
||||
data: {
|
||||
summary: { [key: string]: DeviceSummaryModel }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,9 @@
|
||||
import {SmartTemperatureModel} from './measurements/smart-temperature-model';
|
||||
|
||||
export interface DeviceSummaryTempResponseWrapper {
|
||||
success: boolean;
|
||||
errors: any[];
|
||||
data: {
|
||||
temp_history: { [key: string]: SmartTemperatureModel[]; }
|
||||
}
|
||||
}
|
||||
@@ -0,0 +1,19 @@
|
||||
// maps to webapp/backend/pkg/models/measurements/smart_ata_attribute.go
|
||||
// maps to webapp/backend/pkg/models/measurements/smart_nvme_attribute.go
|
||||
// maps to webapp/backend/pkg/models/measurements/smart_scsi_attribute.go
|
||||
export interface SmartAttributeModel {
|
||||
attribute_id: number | string
|
||||
value: number
|
||||
thresh: number
|
||||
worst?: number
|
||||
raw_value?: number
|
||||
raw_string?: string
|
||||
when_failed?: string
|
||||
|
||||
transformed_value: number
|
||||
status: number
|
||||
status_reason?: string
|
||||
failure_rate?: number
|
||||
|
||||
chartData?: any[]
|
||||
}
|
||||
@@ -0,0 +1,13 @@
|
||||
// maps to webapp/backend/pkg/models/measurements/smart.go
|
||||
import {SmartAttributeModel} from './smart-attribute-model';
|
||||
|
||||
export interface SmartModel {
|
||||
date: string;
|
||||
device_wwn: string;
|
||||
device_protocol: string;
|
||||
|
||||
temp: number;
|
||||
power_on_hours: number;
|
||||
power_cycle_count: number
|
||||
attrs: { [key: string]: SmartAttributeModel }
|
||||
}
|
||||
@@ -0,0 +1,6 @@
|
||||
// maps to webapp/backend/pkg/models/measurements/smart_temperature.go
|
||||
export interface SmartTemperatureModel {
|
||||
date: string;
|
||||
temp: number;
|
||||
}
|
||||
|
||||
@@ -0,0 +1,13 @@
|
||||
// map to webapp/backend/pkg/thresholds/ata_attribute_metadata.go
|
||||
// map to webapp/backend/pkg/thresholds/nvme_attribute_metadata.go
|
||||
// map to webapp/backend/pkg/thresholds/scsi_attribute_metadata.go
|
||||
export interface AttributeMetadataModel {
|
||||
display_name: string
|
||||
ideal: string
|
||||
critical: boolean
|
||||
description: string
|
||||
|
||||
transform_value_unit?: string
|
||||
observed_thresholds?: any[]
|
||||
display_type: string
|
||||
}
|
||||
@@ -1,264 +1,264 @@
|
||||
export const sda = {
|
||||
"data": {
|
||||
"device": {
|
||||
"CreatedAt": "2021-06-24T21:17:31.301226-07:00",
|
||||
"UpdatedAt": "2021-10-24T16:37:56.981833-07:00",
|
||||
"DeletedAt": null,
|
||||
"wwn": "0x5002538e40a22954",
|
||||
"device_name": "sda",
|
||||
"manufacturer": "ATA",
|
||||
"model_name": "Samsung_SSD_860_EVO_500GB",
|
||||
"interface_type": "SCSI",
|
||||
"interface_speed": "",
|
||||
"serial_number": "S3YZNB0KBXXXXXX",
|
||||
"firmware": "002C",
|
||||
"rotational_speed": 0,
|
||||
"capacity": 500107862016,
|
||||
"form_factor": "",
|
||||
"smart_support": false,
|
||||
"device_protocol": "NVMe",
|
||||
"device_type": "",
|
||||
"label": "",
|
||||
"host_id": "",
|
||||
"device_status": 0
|
||||
'data': {
|
||||
'device': {
|
||||
'CreatedAt': '2021-06-24T21:17:31.301226-07:00',
|
||||
'UpdatedAt': '2021-10-24T16:37:56.981833-07:00',
|
||||
'DeletedAt': null,
|
||||
'wwn': '0x5002538e40a22954',
|
||||
'device_name': 'sda',
|
||||
'manufacturer': 'ATA',
|
||||
'model_name': 'Samsung_SSD_860_EVO_500GB',
|
||||
'interface_type': 'SCSI',
|
||||
'interface_speed': '',
|
||||
'serial_number': 'S3YZNB0KBXXXXXX',
|
||||
'firmware': '002C',
|
||||
'rotational_speed': 0,
|
||||
'capacity': 500107862016,
|
||||
'form_factor': '',
|
||||
'smart_support': false,
|
||||
'device_protocol': 'NVMe',
|
||||
'device_type': '',
|
||||
'label': '',
|
||||
'host_id': '',
|
||||
'device_status': 0
|
||||
},
|
||||
"smart_results": [{
|
||||
"date": "2021-10-24T23:20:44Z",
|
||||
"device_wwn": "0x5002538e40a22954",
|
||||
"device_protocol": "NVMe",
|
||||
"temp": 36,
|
||||
"power_on_hours": 2401,
|
||||
"power_cycle_count": 266,
|
||||
"attrs": {
|
||||
"available_spare": {
|
||||
"attribute_id": "available_spare",
|
||||
"value": 100,
|
||||
"thresh": 10,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'smart_results': [{
|
||||
'date': '2021-10-24T23:20:44Z',
|
||||
'device_wwn': '0x5002538e40a22954',
|
||||
'device_protocol': 'NVMe',
|
||||
'temp': 36,
|
||||
'power_on_hours': 2401,
|
||||
'power_cycle_count': 266,
|
||||
'attrs': {
|
||||
'available_spare': {
|
||||
'attribute_id': 'available_spare',
|
||||
'value': 100,
|
||||
'thresh': 10,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"controller_busy_time": {
|
||||
"attribute_id": "controller_busy_time",
|
||||
"value": 3060,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'controller_busy_time': {
|
||||
'attribute_id': 'controller_busy_time',
|
||||
'value': 3060,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"critical_comp_time": {
|
||||
"attribute_id": "critical_comp_time",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'critical_comp_time': {
|
||||
'attribute_id': 'critical_comp_time',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"critical_warning": {
|
||||
"attribute_id": "critical_warning",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'critical_warning': {
|
||||
'attribute_id': 'critical_warning',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"data_units_read": {
|
||||
"attribute_id": "data_units_read",
|
||||
"value": 9511859,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'data_units_read': {
|
||||
'attribute_id': 'data_units_read',
|
||||
'value': 9511859,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"data_units_written": {
|
||||
"attribute_id": "data_units_written",
|
||||
"value": 7773431,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'data_units_written': {
|
||||
'attribute_id': 'data_units_written',
|
||||
'value': 7773431,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"host_reads": {
|
||||
"attribute_id": "host_reads",
|
||||
"value": 111303174,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'host_reads': {
|
||||
'attribute_id': 'host_reads',
|
||||
'value': 111303174,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"host_writes": {
|
||||
"attribute_id": "host_writes",
|
||||
"value": 83170961,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'host_writes': {
|
||||
'attribute_id': 'host_writes',
|
||||
'value': 83170961,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"media_errors": {
|
||||
"attribute_id": "media_errors",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'media_errors': {
|
||||
'attribute_id': 'media_errors',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"num_err_log_entries": {
|
||||
"attribute_id": "num_err_log_entries",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'num_err_log_entries': {
|
||||
'attribute_id': 'num_err_log_entries',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"percentage_used": {
|
||||
"attribute_id": "percentage_used",
|
||||
"value": 0,
|
||||
"thresh": 100,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'percentage_used': {
|
||||
'attribute_id': 'percentage_used',
|
||||
'value': 0,
|
||||
'thresh': 100,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"power_cycles": {
|
||||
"attribute_id": "power_cycles",
|
||||
"value": 266,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'power_cycles': {
|
||||
'attribute_id': 'power_cycles',
|
||||
'value': 266,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"power_on_hours": {
|
||||
"attribute_id": "power_on_hours",
|
||||
"value": 2401,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'power_on_hours': {
|
||||
'attribute_id': 'power_on_hours',
|
||||
'value': 2401,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"temperature": {
|
||||
"attribute_id": "temperature",
|
||||
"value": 36,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'temperature': {
|
||||
'attribute_id': 'temperature',
|
||||
'value': 36,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"unsafe_shutdowns": {
|
||||
"attribute_id": "unsafe_shutdowns",
|
||||
"value": 43,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'unsafe_shutdowns': {
|
||||
'attribute_id': 'unsafe_shutdowns',
|
||||
'value': 43,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"warning_temp_time": {
|
||||
"attribute_id": "warning_temp_time",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'warning_temp_time': {
|
||||
'attribute_id': 'warning_temp_time',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
}
|
||||
},
|
||||
"Status": 0
|
||||
'Status': 0
|
||||
}]
|
||||
},
|
||||
"metadata": {
|
||||
"available_spare": {
|
||||
"display_name": "Available Spare",
|
||||
"ideal": "high",
|
||||
"critical": true,
|
||||
"description": "Contains a normalized percentage (0 to 100%) of the remaining spare capacity available.",
|
||||
"display_type": ""
|
||||
'metadata': {
|
||||
'available_spare': {
|
||||
'display_name': 'Available Spare',
|
||||
'ideal': 'high',
|
||||
'critical': true,
|
||||
'description': 'Contains a normalized percentage (0 to 100%) of the remaining spare capacity available.',
|
||||
'display_type': ''
|
||||
},
|
||||
"controller_busy_time": {
|
||||
"display_name": "Controller Busy Time",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the amount of time the controller is busy with I/O commands. The controller is busy when there is a command outstanding to an I/O Queue (specifically, a command was issued via an I/O Submission Queue Tail doorbell write and the corresponding completion queue entry has not been posted yet to the associated I/O Completion Queue). This value is reported in minutes.",
|
||||
"display_type": ""
|
||||
'controller_busy_time': {
|
||||
'display_name': 'Controller Busy Time',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the amount of time the controller is busy with I/O commands. The controller is busy when there is a command outstanding to an I/O Queue (specifically, a command was issued via an I/O Submission Queue Tail doorbell write and the corresponding completion queue entry has not been posted yet to the associated I/O Completion Queue). This value is reported in minutes.',
|
||||
'display_type': ''
|
||||
},
|
||||
"critical_comp_time": {
|
||||
"display_name": "Critical CompTime",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the amount of time in minutes that the controller is operational and the Composite Temperature is greater the Critical Composite Temperature Threshold (CCTEMP) field in the Identify Controller data structure.",
|
||||
"display_type": ""
|
||||
'critical_comp_time': {
|
||||
'display_name': 'Critical CompTime',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the amount of time in minutes that the controller is operational and the Composite Temperature is greater the Critical Composite Temperature Threshold (CCTEMP) field in the Identify Controller data structure.',
|
||||
'display_type': ''
|
||||
},
|
||||
"critical_warning": {
|
||||
"display_name": "Critical Warning",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "This field indicates critical warnings for the state of the controller. Each bit corresponds to a critical warning type; multiple bits may be set. If a bit is cleared to ‘0’, then that critical warning does not apply. Critical warnings may result in an asynchronous event notification to the host. Bits in this field represent the current associated state and are not persistent.",
|
||||
"display_type": ""
|
||||
'critical_warning': {
|
||||
'display_name': 'Critical Warning',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': 'This field indicates critical warnings for the state of the controller. Each bit corresponds to a critical warning type; multiple bits may be set. If a bit is cleared to ‘0’, then that critical warning does not apply. Critical warnings may result in an asynchronous event notification to the host. Bits in this field represent the current associated state and are not persistent.',
|
||||
'display_type': ''
|
||||
},
|
||||
"data_units_read": {
|
||||
"display_name": "Data Units Read",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the number of 512 byte data units the host has read from the controller; this value does not include metadata. This value is reported in thousands (i.e., a value of 1 corresponds to 1000 units of 512 bytes read) and is rounded up. When the LBA size is a value other than 512 bytes, the controller shall convert the amount of data read to 512 byte units.",
|
||||
"display_type": ""
|
||||
'data_units_read': {
|
||||
'display_name': 'Data Units Read',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the number of 512 byte data units the host has read from the controller; this value does not include metadata. This value is reported in thousands (i.e., a value of 1 corresponds to 1000 units of 512 bytes read) and is rounded up. When the LBA size is a value other than 512 bytes, the controller shall convert the amount of data read to 512 byte units.',
|
||||
'display_type': ''
|
||||
},
|
||||
"data_units_written": {
|
||||
"display_name": "Data Units Written",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the number of 512 byte data units the host has written to the controller; this value does not include metadata. This value is reported in thousands (i.e., a value of 1 corresponds to 1000 units of 512 bytes written) and is rounded up. When the LBA size is a value other than 512 bytes, the controller shall convert the amount of data written to 512 byte units.",
|
||||
"display_type": ""
|
||||
'data_units_written': {
|
||||
'display_name': 'Data Units Written',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the number of 512 byte data units the host has written to the controller; this value does not include metadata. This value is reported in thousands (i.e., a value of 1 corresponds to 1000 units of 512 bytes written) and is rounded up. When the LBA size is a value other than 512 bytes, the controller shall convert the amount of data written to 512 byte units.',
|
||||
'display_type': ''
|
||||
},
|
||||
"host_reads": {
|
||||
"display_name": "Host Reads",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the number of read commands completed by the controller",
|
||||
"display_type": ""
|
||||
'host_reads': {
|
||||
'display_name': 'Host Reads',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the number of read commands completed by the controller',
|
||||
'display_type': ''
|
||||
},
|
||||
"host_writes": {
|
||||
"display_name": "Host Writes",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the number of write commands completed by the controller",
|
||||
"display_type": ""
|
||||
'host_writes': {
|
||||
'display_name': 'Host Writes',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the number of write commands completed by the controller',
|
||||
'display_type': ''
|
||||
},
|
||||
"media_errors": {
|
||||
"display_name": "Media Errors",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "Contains the number of occurrences where the controller detected an unrecovered data integrity error. Errors such as uncorrectable ECC, CRC checksum failure, or LBA tag mismatch are included in this field.",
|
||||
"display_type": ""
|
||||
'media_errors': {
|
||||
'display_name': 'Media Errors',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': 'Contains the number of occurrences where the controller detected an unrecovered data integrity error. Errors such as uncorrectable ECC, CRC checksum failure, or LBA tag mismatch are included in this field.',
|
||||
'display_type': ''
|
||||
},
|
||||
"num_err_log_entries": {
|
||||
"display_name": "Numb Err Log Entries",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "Contains the number of Error Information log entries over the life of the controller.",
|
||||
"display_type": ""
|
||||
'num_err_log_entries': {
|
||||
'display_name': 'Numb Err Log Entries',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': 'Contains the number of Error Information log entries over the life of the controller.',
|
||||
'display_type': ''
|
||||
},
|
||||
"percentage_used": {
|
||||
"display_name": "Percentage Used",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "Contains a vendor specific estimate of the percentage of NVM subsystem life used based on the actual usage and the manufacturer’s prediction of NVM life. A value of 100 indicates that the estimated endurance of the NVM in the NVM subsystem has been consumed, but may not indicate an NVM subsystem failure. The value is allowed to exceed 100. Percentages greater than 254 shall be represented as 255. This value shall be updated once per power-on hour (when the controller is not in a sleep state).",
|
||||
"display_type": ""
|
||||
'percentage_used': {
|
||||
'display_name': 'Percentage Used',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': 'Contains a vendor specific estimate of the percentage of NVM subsystem life used based on the actual usage and the manufacturer’s prediction of NVM life. A value of 100 indicates that the estimated endurance of the NVM in the NVM subsystem has been consumed, but may not indicate an NVM subsystem failure. The value is allowed to exceed 100. Percentages greater than 254 shall be represented as 255. This value shall be updated once per power-on hour (when the controller is not in a sleep state).',
|
||||
'display_type': ''
|
||||
},
|
||||
"power_cycles": {
|
||||
"display_name": "Power Cycles",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the number of power cycles.",
|
||||
"display_type": ""
|
||||
'power_cycles': {
|
||||
'display_name': 'Power Cycles',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the number of power cycles.',
|
||||
'display_type': ''
|
||||
},
|
||||
"power_on_hours": {
|
||||
"display_name": "Power on Hours",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the number of power-on hours. Power on hours is always logging, even when in low power mode.",
|
||||
"display_type": ""
|
||||
'power_on_hours': {
|
||||
'display_name': 'Power on Hours',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the number of power-on hours. Power on hours is always logging, even when in low power mode.',
|
||||
'display_type': ''
|
||||
},
|
||||
"temperature": {
|
||||
"display_name": "Temperature",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'temperature': {
|
||||
'display_name': 'Temperature',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"unsafe_shutdowns": {
|
||||
"display_name": "Unsafe Shutdowns",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the number of unsafe shutdowns. This count is incremented when a shutdown notification (CC.SHN) is not received prior to loss of power.",
|
||||
"display_type": ""
|
||||
'unsafe_shutdowns': {
|
||||
'display_name': 'Unsafe Shutdowns',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the number of unsafe shutdowns. This count is incremented when a shutdown notification (CC.SHN) is not received prior to loss of power.',
|
||||
'display_type': ''
|
||||
},
|
||||
"warning_temp_time": {
|
||||
"display_name": "Warning Temp Time",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "Contains the amount of time in minutes that the controller is operational and the Composite Temperature is greater than or equal to the Warning Composite Temperature Threshold (WCTEMP) field and less than the Critical Composite Temperature Threshold (CCTEMP) field in the Identify Controller data structure.",
|
||||
"display_type": ""
|
||||
'warning_temp_time': {
|
||||
'display_name': 'Warning Temp Time',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': 'Contains the amount of time in minutes that the controller is operational and the Composite Temperature is greater than or equal to the Warning Composite Temperature Threshold (WCTEMP) field and less than the Critical Composite Temperature Threshold (CCTEMP) field in the Identify Controller data structure.',
|
||||
'display_type': ''
|
||||
}
|
||||
},
|
||||
"success": true
|
||||
'success': true
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
@@ -1,222 +1,222 @@
|
||||
export const sdd = {
|
||||
"data": {
|
||||
"device": {
|
||||
"CreatedAt": "2021-06-24T21:17:31.30374-07:00",
|
||||
"UpdatedAt": "2021-10-24T16:37:57.013758-07:00",
|
||||
"DeletedAt": null,
|
||||
"wwn": "0x5000cca252c859cc",
|
||||
"device_name": "sdd",
|
||||
"manufacturer": "ATA",
|
||||
"model_name": "WDC_WD80EFAX-68LHPN0",
|
||||
"interface_type": "SCSI",
|
||||
"interface_speed": "",
|
||||
"serial_number": "7SGLXXXXX",
|
||||
"firmware": "",
|
||||
"rotational_speed": 0,
|
||||
"capacity": 8001563222016,
|
||||
"form_factor": "",
|
||||
"smart_support": false,
|
||||
"device_protocol": "SCSI",
|
||||
"device_type": "",
|
||||
"label": "",
|
||||
"host_id": "",
|
||||
"device_status": 0
|
||||
'data': {
|
||||
'device': {
|
||||
'CreatedAt': '2021-06-24T21:17:31.30374-07:00',
|
||||
'UpdatedAt': '2021-10-24T16:37:57.013758-07:00',
|
||||
'DeletedAt': null,
|
||||
'wwn': '0x5000cca252c859cc',
|
||||
'device_name': 'sdd',
|
||||
'manufacturer': 'ATA',
|
||||
'model_name': 'WDC_WD80EFAX-68LHPN0',
|
||||
'interface_type': 'SCSI',
|
||||
'interface_speed': '',
|
||||
'serial_number': '7SGLXXXXX',
|
||||
'firmware': '',
|
||||
'rotational_speed': 0,
|
||||
'capacity': 8001563222016,
|
||||
'form_factor': '',
|
||||
'smart_support': false,
|
||||
'device_protocol': 'SCSI',
|
||||
'device_type': '',
|
||||
'label': '',
|
||||
'host_id': '',
|
||||
'device_status': 0
|
||||
},
|
||||
"smart_results": [{
|
||||
"date": "2021-10-24T23:20:44Z",
|
||||
"device_wwn": "0x5000cca252c859cc",
|
||||
"device_protocol": "SCSI",
|
||||
"temp": 34,
|
||||
"power_on_hours": 43549,
|
||||
"power_cycle_count": 0,
|
||||
"attrs": {
|
||||
"read_correction_algorithm_invocations": {
|
||||
"attribute_id": "read_correction_algorithm_invocations",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'smart_results': [{
|
||||
'date': '2021-10-24T23:20:44Z',
|
||||
'device_wwn': '0x5000cca252c859cc',
|
||||
'device_protocol': 'SCSI',
|
||||
'temp': 34,
|
||||
'power_on_hours': 43549,
|
||||
'power_cycle_count': 0,
|
||||
'attrs': {
|
||||
'read_correction_algorithm_invocations': {
|
||||
'attribute_id': 'read_correction_algorithm_invocations',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_errors_corrected_by_eccdelayed": {
|
||||
"attribute_id": "read_errors_corrected_by_eccdelayed",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_errors_corrected_by_eccdelayed': {
|
||||
'attribute_id': 'read_errors_corrected_by_eccdelayed',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_errors_corrected_by_eccfast": {
|
||||
"attribute_id": "read_errors_corrected_by_eccfast",
|
||||
"value": 300357663,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_errors_corrected_by_eccfast': {
|
||||
'attribute_id': 'read_errors_corrected_by_eccfast',
|
||||
'value': 300357663,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_errors_corrected_by_rereads_rewrites": {
|
||||
"attribute_id": "read_errors_corrected_by_rereads_rewrites",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_errors_corrected_by_rereads_rewrites': {
|
||||
'attribute_id': 'read_errors_corrected_by_rereads_rewrites',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_total_errors_corrected": {
|
||||
"attribute_id": "read_total_errors_corrected",
|
||||
"value": 300357663,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_total_errors_corrected': {
|
||||
'attribute_id': 'read_total_errors_corrected',
|
||||
'value': 300357663,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_total_uncorrected_errors": {
|
||||
"attribute_id": "read_total_uncorrected_errors",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_total_uncorrected_errors': {
|
||||
'attribute_id': 'read_total_uncorrected_errors',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"scsi_grown_defect_list": {
|
||||
"attribute_id": "scsi_grown_defect_list",
|
||||
"value": 56,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'scsi_grown_defect_list': {
|
||||
'attribute_id': 'scsi_grown_defect_list',
|
||||
'value': 56,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_correction_algorithm_invocations": {
|
||||
"attribute_id": "write_correction_algorithm_invocations",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_correction_algorithm_invocations': {
|
||||
'attribute_id': 'write_correction_algorithm_invocations',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_errors_corrected_by_eccdelayed": {
|
||||
"attribute_id": "write_errors_corrected_by_eccdelayed",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_errors_corrected_by_eccdelayed': {
|
||||
'attribute_id': 'write_errors_corrected_by_eccdelayed',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_errors_corrected_by_eccfast": {
|
||||
"attribute_id": "write_errors_corrected_by_eccfast",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_errors_corrected_by_eccfast': {
|
||||
'attribute_id': 'write_errors_corrected_by_eccfast',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_errors_corrected_by_rereads_rewrites": {
|
||||
"attribute_id": "write_errors_corrected_by_rereads_rewrites",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_errors_corrected_by_rereads_rewrites': {
|
||||
'attribute_id': 'write_errors_corrected_by_rereads_rewrites',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_total_errors_corrected": {
|
||||
"attribute_id": "write_total_errors_corrected",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_total_errors_corrected': {
|
||||
'attribute_id': 'write_total_errors_corrected',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_total_uncorrected_errors": {
|
||||
"attribute_id": "write_total_uncorrected_errors",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_total_uncorrected_errors': {
|
||||
'attribute_id': 'write_total_uncorrected_errors',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
}
|
||||
},
|
||||
"Status": 0
|
||||
'Status': 0
|
||||
}]
|
||||
},
|
||||
"metadata": {
|
||||
"read_correction_algorithm_invocations": {
|
||||
"display_name": "Read Correction Algorithm Invocations",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'metadata': {
|
||||
'read_correction_algorithm_invocations': {
|
||||
'display_name': 'Read Correction Algorithm Invocations',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_errors_corrected_by_eccdelayed": {
|
||||
"display_name": "Read Errors Corrected by ECC Delayed",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_errors_corrected_by_eccdelayed': {
|
||||
'display_name': 'Read Errors Corrected by ECC Delayed',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_errors_corrected_by_eccfast": {
|
||||
"display_name": "Read Errors Corrected by ECC Fast",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_errors_corrected_by_eccfast': {
|
||||
'display_name': 'Read Errors Corrected by ECC Fast',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_errors_corrected_by_rereads_rewrites": {
|
||||
"display_name": "Read Errors Corrected by ReReads/ReWrites",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_errors_corrected_by_rereads_rewrites': {
|
||||
'display_name': 'Read Errors Corrected by ReReads/ReWrites',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_total_errors_corrected": {
|
||||
"display_name": "Read Total Errors Corrected",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_total_errors_corrected': {
|
||||
'display_name': 'Read Total Errors Corrected',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_total_uncorrected_errors": {
|
||||
"display_name": "Read Total Uncorrected Errors",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_total_uncorrected_errors': {
|
||||
'display_name': 'Read Total Uncorrected Errors',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"scsi_grown_defect_list": {
|
||||
"display_name": "Grown Defect List",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'scsi_grown_defect_list': {
|
||||
'display_name': 'Grown Defect List',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_correction_algorithm_invocations": {
|
||||
"display_name": "Write Correction Algorithm Invocations",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_correction_algorithm_invocations': {
|
||||
'display_name': 'Write Correction Algorithm Invocations',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_errors_corrected_by_eccdelayed": {
|
||||
"display_name": "Write Errors Corrected by ECC Delayed",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_errors_corrected_by_eccdelayed': {
|
||||
'display_name': 'Write Errors Corrected by ECC Delayed',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_errors_corrected_by_eccfast": {
|
||||
"display_name": "Write Errors Corrected by ECC Fast",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_errors_corrected_by_eccfast': {
|
||||
'display_name': 'Write Errors Corrected by ECC Fast',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_errors_corrected_by_rereads_rewrites": {
|
||||
"display_name": "Write Errors Corrected by ReReads/ReWrites",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_errors_corrected_by_rereads_rewrites': {
|
||||
'display_name': 'Write Errors Corrected by ReReads/ReWrites',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_total_errors_corrected": {
|
||||
"display_name": "Write Total Errors Corrected",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_total_errors_corrected': {
|
||||
'display_name': 'Write Total Errors Corrected',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_total_uncorrected_errors": {
|
||||
"display_name": "Write Total Uncorrected Errors",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_total_uncorrected_errors': {
|
||||
'display_name': 'Write Total Uncorrected Errors',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
}
|
||||
},
|
||||
"success": true
|
||||
'success': true
|
||||
}
|
||||
|
||||
@@ -1,222 +1,222 @@
|
||||
export const sde = {
|
||||
"data": {
|
||||
"device": {
|
||||
"CreatedAt": "2021-06-24T21:17:31.304461-07:00",
|
||||
"UpdatedAt": "2021-10-24T16:40:16.495248-07:00",
|
||||
"DeletedAt": null,
|
||||
"wwn": "0x5000cca264ebc248",
|
||||
"device_name": "sde",
|
||||
"manufacturer": "ATA",
|
||||
"model_name": "WDC_WD140EDFZ-11A0VA0",
|
||||
"interface_type": "SCSI",
|
||||
"interface_speed": "",
|
||||
"serial_number": "9RK3XXXXX",
|
||||
"firmware": "",
|
||||
"rotational_speed": 0,
|
||||
"capacity": 14000519643136,
|
||||
"form_factor": "",
|
||||
"smart_support": false,
|
||||
"device_protocol": "SCSI",
|
||||
"device_type": "",
|
||||
"label": "",
|
||||
"host_id": "",
|
||||
"device_status": 0
|
||||
'data': {
|
||||
'device': {
|
||||
'CreatedAt': '2021-06-24T21:17:31.304461-07:00',
|
||||
'UpdatedAt': '2021-10-24T16:40:16.495248-07:00',
|
||||
'DeletedAt': null,
|
||||
'wwn': '0x5000cca264ebc248',
|
||||
'device_name': 'sde',
|
||||
'manufacturer': 'ATA',
|
||||
'model_name': 'WDC_WD140EDFZ-11A0VA0',
|
||||
'interface_type': 'SCSI',
|
||||
'interface_speed': '',
|
||||
'serial_number': '9RK3XXXXX',
|
||||
'firmware': '',
|
||||
'rotational_speed': 0,
|
||||
'capacity': 14000519643136,
|
||||
'form_factor': '',
|
||||
'smart_support': false,
|
||||
'device_protocol': 'SCSI',
|
||||
'device_type': '',
|
||||
'label': '',
|
||||
'host_id': '',
|
||||
'device_status': 0
|
||||
},
|
||||
"smart_results": [{
|
||||
"date": "2021-10-24T23:20:44Z",
|
||||
"device_wwn": "0x5000cca264ebc248",
|
||||
"device_protocol": "SCSI",
|
||||
"temp": 31,
|
||||
"power_on_hours": 5675,
|
||||
"power_cycle_count": 0,
|
||||
"attrs": {
|
||||
"read_correction_algorithm_invocations": {
|
||||
"attribute_id": "read_correction_algorithm_invocations",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'smart_results': [{
|
||||
'date': '2021-10-24T23:20:44Z',
|
||||
'device_wwn': '0x5000cca264ebc248',
|
||||
'device_protocol': 'SCSI',
|
||||
'temp': 31,
|
||||
'power_on_hours': 5675,
|
||||
'power_cycle_count': 0,
|
||||
'attrs': {
|
||||
'read_correction_algorithm_invocations': {
|
||||
'attribute_id': 'read_correction_algorithm_invocations',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_errors_corrected_by_eccdelayed": {
|
||||
"attribute_id": "read_errors_corrected_by_eccdelayed",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_errors_corrected_by_eccdelayed': {
|
||||
'attribute_id': 'read_errors_corrected_by_eccdelayed',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_errors_corrected_by_eccfast": {
|
||||
"attribute_id": "read_errors_corrected_by_eccfast",
|
||||
"value": 1410362924,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_errors_corrected_by_eccfast': {
|
||||
'attribute_id': 'read_errors_corrected_by_eccfast',
|
||||
'value': 1410362924,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_errors_corrected_by_rereads_rewrites": {
|
||||
"attribute_id": "read_errors_corrected_by_rereads_rewrites",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_errors_corrected_by_rereads_rewrites': {
|
||||
'attribute_id': 'read_errors_corrected_by_rereads_rewrites',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_total_errors_corrected": {
|
||||
"attribute_id": "read_total_errors_corrected",
|
||||
"value": 1410362924,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_total_errors_corrected': {
|
||||
'attribute_id': 'read_total_errors_corrected',
|
||||
'value': 1410362924,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"read_total_uncorrected_errors": {
|
||||
"attribute_id": "read_total_uncorrected_errors",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'read_total_uncorrected_errors': {
|
||||
'attribute_id': 'read_total_uncorrected_errors',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"scsi_grown_defect_list": {
|
||||
"attribute_id": "scsi_grown_defect_list",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'scsi_grown_defect_list': {
|
||||
'attribute_id': 'scsi_grown_defect_list',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_correction_algorithm_invocations": {
|
||||
"attribute_id": "write_correction_algorithm_invocations",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_correction_algorithm_invocations': {
|
||||
'attribute_id': 'write_correction_algorithm_invocations',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_errors_corrected_by_eccdelayed": {
|
||||
"attribute_id": "write_errors_corrected_by_eccdelayed",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_errors_corrected_by_eccdelayed': {
|
||||
'attribute_id': 'write_errors_corrected_by_eccdelayed',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_errors_corrected_by_eccfast": {
|
||||
"attribute_id": "write_errors_corrected_by_eccfast",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_errors_corrected_by_eccfast': {
|
||||
'attribute_id': 'write_errors_corrected_by_eccfast',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_errors_corrected_by_rereads_rewrites": {
|
||||
"attribute_id": "write_errors_corrected_by_rereads_rewrites",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_errors_corrected_by_rereads_rewrites': {
|
||||
'attribute_id': 'write_errors_corrected_by_rereads_rewrites',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_total_errors_corrected": {
|
||||
"attribute_id": "write_total_errors_corrected",
|
||||
"value": 0,
|
||||
"thresh": -1,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_total_errors_corrected': {
|
||||
'attribute_id': 'write_total_errors_corrected',
|
||||
'value': 0,
|
||||
'thresh': -1,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
},
|
||||
"write_total_uncorrected_errors": {
|
||||
"attribute_id": "write_total_uncorrected_errors",
|
||||
"value": 0,
|
||||
"thresh": 0,
|
||||
"transformed_value": 0,
|
||||
"status": 0
|
||||
'write_total_uncorrected_errors': {
|
||||
'attribute_id': 'write_total_uncorrected_errors',
|
||||
'value': 0,
|
||||
'thresh': 0,
|
||||
'transformed_value': 0,
|
||||
'status': 0
|
||||
}
|
||||
},
|
||||
"Status": 0
|
||||
'Status': 0
|
||||
}]
|
||||
},
|
||||
"metadata": {
|
||||
"read_correction_algorithm_invocations": {
|
||||
"display_name": "Read Correction Algorithm Invocations",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'metadata': {
|
||||
'read_correction_algorithm_invocations': {
|
||||
'display_name': 'Read Correction Algorithm Invocations',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_errors_corrected_by_eccdelayed": {
|
||||
"display_name": "Read Errors Corrected by ECC Delayed",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_errors_corrected_by_eccdelayed': {
|
||||
'display_name': 'Read Errors Corrected by ECC Delayed',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_errors_corrected_by_eccfast": {
|
||||
"display_name": "Read Errors Corrected by ECC Fast",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_errors_corrected_by_eccfast': {
|
||||
'display_name': 'Read Errors Corrected by ECC Fast',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_errors_corrected_by_rereads_rewrites": {
|
||||
"display_name": "Read Errors Corrected by ReReads/ReWrites",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_errors_corrected_by_rereads_rewrites': {
|
||||
'display_name': 'Read Errors Corrected by ReReads/ReWrites',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_total_errors_corrected": {
|
||||
"display_name": "Read Total Errors Corrected",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_total_errors_corrected': {
|
||||
'display_name': 'Read Total Errors Corrected',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"read_total_uncorrected_errors": {
|
||||
"display_name": "Read Total Uncorrected Errors",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'read_total_uncorrected_errors': {
|
||||
'display_name': 'Read Total Uncorrected Errors',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"scsi_grown_defect_list": {
|
||||
"display_name": "Grown Defect List",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'scsi_grown_defect_list': {
|
||||
'display_name': 'Grown Defect List',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_correction_algorithm_invocations": {
|
||||
"display_name": "Write Correction Algorithm Invocations",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_correction_algorithm_invocations': {
|
||||
'display_name': 'Write Correction Algorithm Invocations',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_errors_corrected_by_eccdelayed": {
|
||||
"display_name": "Write Errors Corrected by ECC Delayed",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_errors_corrected_by_eccdelayed': {
|
||||
'display_name': 'Write Errors Corrected by ECC Delayed',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_errors_corrected_by_eccfast": {
|
||||
"display_name": "Write Errors Corrected by ECC Fast",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_errors_corrected_by_eccfast': {
|
||||
'display_name': 'Write Errors Corrected by ECC Fast',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_errors_corrected_by_rereads_rewrites": {
|
||||
"display_name": "Write Errors Corrected by ReReads/ReWrites",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_errors_corrected_by_rereads_rewrites': {
|
||||
'display_name': 'Write Errors Corrected by ReReads/ReWrites',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_total_errors_corrected": {
|
||||
"display_name": "Write Total Errors Corrected",
|
||||
"ideal": "",
|
||||
"critical": false,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_total_errors_corrected': {
|
||||
'display_name': 'Write Total Errors Corrected',
|
||||
'ideal': '',
|
||||
'critical': false,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
},
|
||||
"write_total_uncorrected_errors": {
|
||||
"display_name": "Write Total Uncorrected Errors",
|
||||
"ideal": "low",
|
||||
"critical": true,
|
||||
"description": "",
|
||||
"display_type": ""
|
||||
'write_total_uncorrected_errors': {
|
||||
'display_name': 'Write Total Uncorrected Errors',
|
||||
'ideal': 'low',
|
||||
'critical': true,
|
||||
'description': '',
|
||||
'display_type': ''
|
||||
}
|
||||
},
|
||||
"success": true
|
||||
'success': true
|
||||
}
|
||||
|
||||
@@ -1,29 +1,29 @@
|
||||
export const sdf = {
|
||||
"data": {
|
||||
"device": {
|
||||
"CreatedAt": "2021-06-24T21:17:31.305246-07:00",
|
||||
"UpdatedAt": "2021-06-24T21:17:31.305246-07:00",
|
||||
"DeletedAt": null,
|
||||
"wwn": "0x50014ee20b2a72a9",
|
||||
"device_name": "sdf",
|
||||
"manufacturer": "ATA",
|
||||
"model_name": "WDC_WD60EFRX-68MYMN1",
|
||||
"interface_type": "SCSI",
|
||||
"interface_speed": "",
|
||||
"serial_number": "WD-WXL1HXXXXX",
|
||||
"firmware": "",
|
||||
"rotational_speed": 0,
|
||||
"capacity": 6001175126016,
|
||||
"form_factor": "",
|
||||
"smart_support": false,
|
||||
"device_protocol": "",
|
||||
"device_type": "",
|
||||
"label": "",
|
||||
"host_id": "",
|
||||
"device_status": 0
|
||||
'data': {
|
||||
'device': {
|
||||
'CreatedAt': '2021-06-24T21:17:31.305246-07:00',
|
||||
'UpdatedAt': '2021-06-24T21:17:31.305246-07:00',
|
||||
'DeletedAt': null,
|
||||
'wwn': '0x50014ee20b2a72a9',
|
||||
'device_name': 'sdf',
|
||||
'manufacturer': 'ATA',
|
||||
'model_name': 'WDC_WD60EFRX-68MYMN1',
|
||||
'interface_type': 'SCSI',
|
||||
'interface_speed': '',
|
||||
'serial_number': 'WD-WXL1HXXXXX',
|
||||
'firmware': '',
|
||||
'rotational_speed': 0,
|
||||
'capacity': 6001175126016,
|
||||
'form_factor': '',
|
||||
'smart_support': false,
|
||||
'device_protocol': '',
|
||||
'device_type': '',
|
||||
'label': '',
|
||||
'host_id': '',
|
||||
'device_status': 0
|
||||
},
|
||||
"smart_results": []
|
||||
'smart_results': []
|
||||
},
|
||||
"metadata": null,
|
||||
"success": true
|
||||
'metadata': null,
|
||||
'success': true
|
||||
}
|
||||
|
||||
File diff suppressed because it is too large
Load Diff
File diff suppressed because it is too large
Load Diff
+57
-18
@@ -1,25 +1,64 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
|
||||
|
||||
import {DashboardDeviceDeleteDialogComponent} from './dashboard-device-delete-dialog.component';
|
||||
import {HttpClientModule} from '@angular/common/http';
|
||||
import {MAT_DIALOG_DATA, MatDialogModule, MatDialogRef} from '@angular/material/dialog';
|
||||
import {MatButtonModule} from '@angular/material/button';
|
||||
import {MatIconModule} from '@angular/material/icon';
|
||||
import {SharedModule} from '../../../shared/shared.module';
|
||||
import {DashboardDeviceDeleteDialogService} from './dashboard-device-delete-dialog.service';
|
||||
import {of} from 'rxjs';
|
||||
|
||||
import { DashboardDeviceDeleteDialogComponent } from './dashboard-device-delete-dialog.component';
|
||||
|
||||
describe('DashboardDeviceDeleteDialogComponent', () => {
|
||||
let component: DashboardDeviceDeleteDialogComponent;
|
||||
let fixture: ComponentFixture<DashboardDeviceDeleteDialogComponent>;
|
||||
let component: DashboardDeviceDeleteDialogComponent;
|
||||
let fixture: ComponentFixture<DashboardDeviceDeleteDialogComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ DashboardDeviceDeleteDialogComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
const matDialogRefSpy = jasmine.createSpyObj('MatDialogRef', ['closeDialog', 'close']);
|
||||
const dashboardDeviceDeleteDialogServiceSpy = jasmine.createSpyObj('DashboardDeviceDeleteDialogService', ['deleteDevice']);
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(DashboardDeviceDeleteDialogComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
HttpClientModule,
|
||||
MatDialogModule,
|
||||
MatButtonModule,
|
||||
MatIconModule,
|
||||
SharedModule,
|
||||
],
|
||||
providers: [
|
||||
{provide: MatDialogRef, useValue: matDialogRefSpy},
|
||||
{provide: MAT_DIALOG_DATA, useValue: {wwn: 'test-wwn', title: 'my-test-device-title'}},
|
||||
{provide: DashboardDeviceDeleteDialogService, useValue: dashboardDeviceDeleteDialogServiceSpy}
|
||||
],
|
||||
declarations: [DashboardDeviceDeleteDialogComponent]
|
||||
})
|
||||
.compileComponents()
|
||||
}));
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(DashboardDeviceDeleteDialogComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
|
||||
it('should close the component if cancel is clicked', () => {
|
||||
matDialogRefSpy.closeDialog.calls.reset();
|
||||
matDialogRefSpy.closeDialog()
|
||||
expect(matDialogRefSpy.closeDialog).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('should attempt to delete device if delete is clicked', () => {
|
||||
dashboardDeviceDeleteDialogServiceSpy.deleteDevice.and.returnValue(of({'success': true}));
|
||||
|
||||
component.onDeleteClick()
|
||||
expect(dashboardDeviceDeleteDialogServiceSpy.deleteDevice).toHaveBeenCalledWith('test-wwn');
|
||||
expect(dashboardDeviceDeleteDialogServiceSpy.deleteDevice.calls.count())
|
||||
.withContext('one call')
|
||||
.toBe(1);
|
||||
});
|
||||
});
|
||||
|
||||
+2
-3
@@ -1,7 +1,6 @@
|
||||
import { Component, OnInit, Inject } from '@angular/core';
|
||||
import {Component, Inject, OnInit} from '@angular/core';
|
||||
import {MAT_DIALOG_DATA, MatDialogRef} from '@angular/material/dialog';
|
||||
import {DashboardDeviceDeleteDialogService} from "./dashboard-device-delete-dialog.service";
|
||||
import {Subject} from "rxjs";
|
||||
import {DashboardDeviceDeleteDialogService} from 'app/layout/common/dashboard-device-delete-dialog/dashboard-device-delete-dialog.service';
|
||||
|
||||
@Component({
|
||||
selector: 'app-dashboard-device-delete-dialog',
|
||||
|
||||
+8
-31
@@ -1,44 +1,21 @@
|
||||
import { NgModule } from '@angular/core';
|
||||
import { RouterModule } from '@angular/router';
|
||||
import { Overlay } from '@angular/cdk/overlay';
|
||||
import { MAT_AUTOCOMPLETE_SCROLL_STRATEGY, MatAutocompleteModule } from '@angular/material/autocomplete';
|
||||
import { MatButtonModule } from '@angular/material/button';
|
||||
import { MatSelectModule } from '@angular/material/select';
|
||||
import { MatFormFieldModule } from '@angular/material/form-field';
|
||||
import { MatIconModule } from '@angular/material/icon';
|
||||
import { MatInputModule } from '@angular/material/input';
|
||||
import { SharedModule } from 'app/shared/shared.module';
|
||||
import {NgModule} from '@angular/core';
|
||||
import {RouterModule} from '@angular/router';
|
||||
import {MatButtonModule} from '@angular/material/button';
|
||||
import {MatIconModule} from '@angular/material/icon';
|
||||
import {SharedModule} from 'app/shared/shared.module';
|
||||
import {DashboardDeviceDeleteDialogComponent} from 'app/layout/common/dashboard-device-delete-dialog/dashboard-device-delete-dialog.component'
|
||||
import { MatButtonToggleModule} from "@angular/material/button-toggle";
|
||||
import {MatTabsModule} from "@angular/material/tabs";
|
||||
import {MatSliderModule} from "@angular/material/slider";
|
||||
import {MatSlideToggleModule} from "@angular/material/slide-toggle";
|
||||
import {MatTooltipModule} from "@angular/material/tooltip";
|
||||
import {dashboardRoutes} from "../../../modules/dashboard/dashboard.routing";
|
||||
import {MatDividerModule} from "@angular/material/divider";
|
||||
import {MatMenuModule} from "@angular/material/menu";
|
||||
import {MatProgressBarModule} from "@angular/material/progress-bar";
|
||||
import {MatSortModule} from "@angular/material/sort";
|
||||
import {MatTableModule} from "@angular/material/table";
|
||||
import {NgApexchartsModule} from "ng-apexcharts";
|
||||
import { MatDialogModule } from '@angular/material/dialog';
|
||||
import {dashboardRoutes} from 'app/modules/dashboard/dashboard.routing';
|
||||
import {MatDialogModule} from '@angular/material/dialog';
|
||||
|
||||
@NgModule({
|
||||
declarations: [
|
||||
DashboardDeviceDeleteDialogComponent
|
||||
],
|
||||
imports : [
|
||||
imports: [
|
||||
RouterModule.forChild([]),
|
||||
RouterModule.forChild(dashboardRoutes),
|
||||
MatButtonModule,
|
||||
MatDividerModule,
|
||||
MatTooltipModule,
|
||||
MatIconModule,
|
||||
MatMenuModule,
|
||||
MatProgressBarModule,
|
||||
MatSortModule,
|
||||
MatTableModule,
|
||||
NgApexchartsModule,
|
||||
SharedModule,
|
||||
MatDialogModule
|
||||
],
|
||||
|
||||
+98
-18
@@ -1,25 +1,105 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
import {async, ComponentFixture, TestBed} from '@angular/core/testing';
|
||||
|
||||
import { DashboardDeviceComponent } from './dashboard-device.component';
|
||||
import {DashboardDeviceComponent} from './dashboard-device.component';
|
||||
import {MatDialog} from '@angular/material/dialog';
|
||||
import {MatButtonModule} from '@angular/material/button';
|
||||
import {MatIconModule} from '@angular/material/icon';
|
||||
import {SharedModule} from 'app/shared/shared.module';
|
||||
import {MatMenuModule} from '@angular/material/menu';
|
||||
import {TREO_APP_CONFIG} from '@treo/services/config/config.constants';
|
||||
import {DeviceSummaryModel} from 'app/core/models/device-summary-model';
|
||||
import * as moment from 'moment';
|
||||
|
||||
describe('DashboardDeviceComponent', () => {
|
||||
let component: DashboardDeviceComponent;
|
||||
let fixture: ComponentFixture<DashboardDeviceComponent>;
|
||||
let component: DashboardDeviceComponent;
|
||||
let fixture: ComponentFixture<DashboardDeviceComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ DashboardDeviceComponent ]
|
||||
const matDialogSpy = jasmine.createSpyObj('MatDialog', ['open']);
|
||||
// const configServiceSpy = jasmine.createSpyObj('TreoConfigService', ['config$']);
|
||||
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
imports: [
|
||||
MatButtonModule,
|
||||
MatIconModule,
|
||||
MatMenuModule,
|
||||
SharedModule,
|
||||
],
|
||||
providers: [
|
||||
{provide: MatDialog, useValue: matDialogSpy},
|
||||
{provide: TREO_APP_CONFIG, useValue: {dashboardDisplay: 'name'}}
|
||||
],
|
||||
declarations: [DashboardDeviceComponent]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
// configServiceSpy.config$.and.returnValue(of({'success': true}));
|
||||
fixture = TestBed.createComponent(DashboardDeviceComponent);
|
||||
component = fixture.componentInstance;
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
|
||||
describe('#classDeviceLastUpdatedOn()', () => {
|
||||
|
||||
it('if non-zero device status, should be red', () => {
|
||||
// component.deviceSummary = summary.data.summary['0x5000c500673e6b5f'] as DeviceSummaryModel
|
||||
expect(component.classDeviceLastUpdatedOn({
|
||||
device: {
|
||||
device_status: 2
|
||||
}
|
||||
} as DeviceSummaryModel)).toBe('text-red')
|
||||
});
|
||||
|
||||
it('if non-zero device status, should be red', () => {
|
||||
// component.deviceSummary = summary.data.summary['0x5000c500673e6b5f'] as DeviceSummaryModel
|
||||
expect(component.classDeviceLastUpdatedOn({
|
||||
device: {
|
||||
device_status: 2
|
||||
}
|
||||
} as DeviceSummaryModel)).toBe('text-red')
|
||||
});
|
||||
|
||||
it('if healthy device status and updated in the last two weeks, should be green', () => {
|
||||
// component.deviceSummary = summary.data.summary['0x5000c500673e6b5f'] as DeviceSummaryModel
|
||||
expect(component.classDeviceLastUpdatedOn({
|
||||
device: {
|
||||
device_status: 0
|
||||
},
|
||||
smart: {
|
||||
collector_date: moment().subtract(13, 'days').toISOString()
|
||||
}
|
||||
} as DeviceSummaryModel)).toBe('text-green')
|
||||
});
|
||||
|
||||
it('if healthy device status and updated more than two weeks ago, but less than 1 month, should be yellow', () => {
|
||||
// component.deviceSummary = summary.data.summary['0x5000c500673e6b5f'] as DeviceSummaryModel
|
||||
expect(component.classDeviceLastUpdatedOn({
|
||||
device: {
|
||||
device_status: 0
|
||||
},
|
||||
smart: {
|
||||
collector_date: moment().subtract(3, 'weeks').toISOString()
|
||||
}
|
||||
} as DeviceSummaryModel)).toBe('text-yellow')
|
||||
});
|
||||
|
||||
it('if healthy device status and updated more 1 month ago, should be red', () => {
|
||||
// component.deviceSummary = summary.data.summary['0x5000c500673e6b5f'] as DeviceSummaryModel
|
||||
expect(component.classDeviceLastUpdatedOn({
|
||||
device: {
|
||||
device_status: 0
|
||||
},
|
||||
smart: {
|
||||
collector_date: moment().subtract(5, 'weeks').toISOString()
|
||||
}
|
||||
} as DeviceSummaryModel)).toBe('text-red')
|
||||
});
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(DashboardDeviceComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
||||
|
||||
+37
-34
@@ -1,27 +1,21 @@
|
||||
import { Component, Input, Output, OnInit, EventEmitter} from '@angular/core';
|
||||
import * as moment from "moment";
|
||||
import {takeUntil} from "rxjs/operators";
|
||||
import {AppConfig} from "app/core/config/app.config";
|
||||
import {TreoConfigService} from "@treo/services/config";
|
||||
import {Subject} from "rxjs";
|
||||
import humanizeDuration from 'humanize-duration'
|
||||
import {Component, EventEmitter, Input, OnInit, Output} from '@angular/core';
|
||||
import * as moment from 'moment';
|
||||
import {takeUntil} from 'rxjs/operators';
|
||||
import {AppConfig} from 'app/core/config/app.config';
|
||||
import {TreoConfigService} from '@treo/services/config';
|
||||
import {Subject} from 'rxjs';
|
||||
import humanizeDuration from 'humanize-duration'
|
||||
import {MatDialog} from '@angular/material/dialog';
|
||||
import {DashboardDeviceDeleteDialogComponent} from "app/layout/common/dashboard-device-delete-dialog/dashboard-device-delete-dialog.component";
|
||||
import {DeviceTitlePipe} from "app/shared/device-title.pipe";
|
||||
import {DashboardDeviceDeleteDialogComponent} from 'app/layout/common/dashboard-device-delete-dialog/dashboard-device-delete-dialog.component';
|
||||
import {DeviceTitlePipe} from 'app/shared/device-title.pipe';
|
||||
import {DeviceSummaryModel} from 'app/core/models/device-summary-model';
|
||||
|
||||
@Component({
|
||||
selector: 'app-dashboard-device',
|
||||
templateUrl: './dashboard-device.component.html',
|
||||
styleUrls: ['./dashboard-device.component.scss']
|
||||
selector: 'app-dashboard-device',
|
||||
templateUrl: './dashboard-device.component.html',
|
||||
styleUrls: ['./dashboard-device.component.scss']
|
||||
})
|
||||
export class DashboardDeviceComponent implements OnInit {
|
||||
@Input() deviceSummary: any;
|
||||
@Input() deviceWWN: string;
|
||||
@Output() deviceDeleted = new EventEmitter<string>();
|
||||
|
||||
config: AppConfig;
|
||||
|
||||
private _unsubscribeAll: Subject<any>;
|
||||
|
||||
constructor(
|
||||
private _configService: TreoConfigService,
|
||||
@@ -31,6 +25,16 @@ export class DashboardDeviceComponent implements OnInit {
|
||||
this._unsubscribeAll = new Subject();
|
||||
}
|
||||
|
||||
@Input() deviceSummary: DeviceSummaryModel;
|
||||
@Input() deviceWWN: string;
|
||||
@Output() deviceDeleted = new EventEmitter<string>();
|
||||
|
||||
config: AppConfig;
|
||||
|
||||
private _unsubscribeAll: Subject<any>;
|
||||
|
||||
readonly humanizeDuration = humanizeDuration;
|
||||
|
||||
ngOnInit(): void {
|
||||
// Subscribe to config changes
|
||||
this._configService.config$
|
||||
@@ -45,47 +49,46 @@ export class DashboardDeviceComponent implements OnInit {
|
||||
// @ Public methods
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
|
||||
classDeviceLastUpdatedOn(deviceSummary){
|
||||
classDeviceLastUpdatedOn(deviceSummary: DeviceSummaryModel): string {
|
||||
if (deviceSummary.device.device_status !== 0) {
|
||||
return 'text-red' // if the device has failed, always highlight in red
|
||||
} else if(deviceSummary.device.device_status === 0 && deviceSummary.smart){
|
||||
if(moment().subtract(14, 'd').isBefore(deviceSummary.smart.collector_date)){
|
||||
} else if (deviceSummary.device.device_status === 0 && deviceSummary.smart) {
|
||||
if (moment().subtract(14, 'days').isBefore(deviceSummary.smart.collector_date)) {
|
||||
// this device was updated in the last 2 weeks.
|
||||
return 'text-green'
|
||||
} else if(moment().subtract(1, 'm').isBefore(deviceSummary.smart.collector_date)){
|
||||
} else if (moment().subtract(1, 'months').isBefore(deviceSummary.smart.collector_date)) {
|
||||
// this device was updated in the last month
|
||||
return 'text-yellow'
|
||||
} else{
|
||||
} else {
|
||||
// last updated more than a month ago.
|
||||
return 'text-red'
|
||||
}
|
||||
|
||||
} else {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
deviceStatusString(deviceStatus){
|
||||
if(deviceStatus == 0){
|
||||
return "passed"
|
||||
deviceStatusString(deviceStatus: number): string {
|
||||
if (deviceStatus === 0) {
|
||||
return 'passed'
|
||||
} else {
|
||||
return "failed"
|
||||
return 'failed'
|
||||
}
|
||||
}
|
||||
|
||||
readonly humanizeDuration = humanizeDuration;
|
||||
|
||||
|
||||
|
||||
openDeleteDialog(): void {
|
||||
const dialogRef = this.dialog.open(DashboardDeviceDeleteDialogComponent, {
|
||||
// width: '250px',
|
||||
data: {wwn: this.deviceWWN, title: DeviceTitlePipe.deviceTitleWithFallback(this.deviceSummary.device, this.config.dashboardDisplay)}
|
||||
data: {
|
||||
wwn: this.deviceWWN,
|
||||
title: DeviceTitlePipe.deviceTitleWithFallback(this.deviceSummary.device, this.config.dashboardDisplay)
|
||||
}
|
||||
});
|
||||
|
||||
dialogRef.afterClosed().subscribe(result => {
|
||||
console.log('The dialog was closed', result);
|
||||
if(result.success){
|
||||
if (result.success) {
|
||||
this.deviceDeleted.emit(this.deviceWWN)
|
||||
}
|
||||
});
|
||||
|
||||
@@ -1,53 +1,30 @@
|
||||
import { NgModule } from '@angular/core';
|
||||
import { RouterModule } from '@angular/router';
|
||||
import { Overlay } from '@angular/cdk/overlay';
|
||||
import { MAT_AUTOCOMPLETE_SCROLL_STRATEGY, MatAutocompleteModule } from '@angular/material/autocomplete';
|
||||
import { MatButtonModule } from '@angular/material/button';
|
||||
import { MatSelectModule } from '@angular/material/select';
|
||||
import { MatFormFieldModule } from '@angular/material/form-field';
|
||||
import { MatIconModule } from '@angular/material/icon';
|
||||
import { MatInputModule } from '@angular/material/input';
|
||||
import { SharedModule } from 'app/shared/shared.module';
|
||||
import {NgModule} from '@angular/core';
|
||||
import {RouterModule} from '@angular/router';
|
||||
import {MatButtonModule} from '@angular/material/button';
|
||||
import {MatIconModule} from '@angular/material/icon';
|
||||
import {SharedModule} from 'app/shared/shared.module';
|
||||
import {DashboardDeviceComponent} from 'app/layout/common/dashboard-device/dashboard-device.component'
|
||||
import { MatDialogModule } from "@angular/material/dialog";
|
||||
import { MatButtonToggleModule} from "@angular/material/button-toggle";
|
||||
import {MatTabsModule} from "@angular/material/tabs";
|
||||
import {MatSliderModule} from "@angular/material/slider";
|
||||
import {MatSlideToggleModule} from "@angular/material/slide-toggle";
|
||||
import {MatTooltipModule} from "@angular/material/tooltip";
|
||||
import {dashboardRoutes} from "../../../modules/dashboard/dashboard.routing";
|
||||
import {MatDividerModule} from "@angular/material/divider";
|
||||
import {MatMenuModule} from "@angular/material/menu";
|
||||
import {MatProgressBarModule} from "@angular/material/progress-bar";
|
||||
import {MatSortModule} from "@angular/material/sort";
|
||||
import {MatTableModule} from "@angular/material/table";
|
||||
import {NgApexchartsModule} from "ng-apexcharts";
|
||||
import {DashboardDeviceDeleteDialogModule} from "../dashboard-device-delete-dialog/dashboard-device-delete-dialog.module";
|
||||
import {dashboardRoutes} from '../../../modules/dashboard/dashboard.routing';
|
||||
import {MatMenuModule} from '@angular/material/menu';
|
||||
import {DashboardDeviceDeleteDialogModule} from 'app/layout/common/dashboard-device-delete-dialog/dashboard-device-delete-dialog.module';
|
||||
|
||||
@NgModule({
|
||||
declarations: [
|
||||
DashboardDeviceComponent
|
||||
],
|
||||
imports : [
|
||||
imports: [
|
||||
RouterModule.forChild([]),
|
||||
RouterModule.forChild(dashboardRoutes),
|
||||
MatButtonModule,
|
||||
MatDividerModule,
|
||||
MatTooltipModule,
|
||||
MatIconModule,
|
||||
MatMenuModule,
|
||||
MatProgressBarModule,
|
||||
MatSortModule,
|
||||
MatTableModule,
|
||||
NgApexchartsModule,
|
||||
SharedModule,
|
||||
DashboardDeviceDeleteDialogModule
|
||||
],
|
||||
exports : [
|
||||
exports: [
|
||||
DashboardDeviceComponent,
|
||||
],
|
||||
providers : []
|
||||
providers: []
|
||||
})
|
||||
export class DashboardDeviceModule
|
||||
{
|
||||
export class DashboardDeviceModule {
|
||||
}
|
||||
|
||||
-25
@@ -1,25 +0,0 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { DashboardSettingsComponent } from './dashboard-settings.component';
|
||||
|
||||
describe('DashboardSettingsComponent', () => {
|
||||
let component: DashboardSettingsComponent;
|
||||
let fixture: ComponentFixture<DashboardSettingsComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ DashboardSettingsComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(DashboardSettingsComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
||||
+21
-23
@@ -1,13 +1,13 @@
|
||||
import { Component, OnInit } from '@angular/core';
|
||||
import {Component, OnInit} from '@angular/core';
|
||||
import {AppConfig} from 'app/core/config/app.config';
|
||||
import { TreoConfigService } from '@treo/services/config';
|
||||
import {Subject} from "rxjs";
|
||||
import {takeUntil} from "rxjs/operators";
|
||||
import {TreoConfigService} from '@treo/services/config';
|
||||
import {Subject} from 'rxjs';
|
||||
import {takeUntil} from 'rxjs/operators';
|
||||
|
||||
@Component({
|
||||
selector: 'app-dashboard-settings',
|
||||
templateUrl: './dashboard-settings.component.html',
|
||||
styleUrls: ['./dashboard-settings.component.scss']
|
||||
selector: 'app-dashboard-settings',
|
||||
templateUrl: './dashboard-settings.component.html',
|
||||
styleUrls: ['./dashboard-settings.component.scss']
|
||||
})
|
||||
export class DashboardSettingsComponent implements OnInit {
|
||||
|
||||
@@ -26,25 +26,23 @@ export class DashboardSettingsComponent implements OnInit {
|
||||
this._unsubscribeAll = new Subject();
|
||||
}
|
||||
|
||||
ngOnInit(): void {
|
||||
// Subscribe to config changes
|
||||
this._configService.config$
|
||||
.pipe(takeUntil(this._unsubscribeAll))
|
||||
.subscribe((config: AppConfig) => {
|
||||
ngOnInit(): void {
|
||||
// Subscribe to config changes
|
||||
this._configService.config$
|
||||
.pipe(takeUntil(this._unsubscribeAll))
|
||||
.subscribe((config: AppConfig) => {
|
||||
|
||||
// Store the config
|
||||
this.dashboardDisplay = config.dashboardDisplay;
|
||||
this.dashboardSort = config.dashboardSort;
|
||||
this.temperatureUnit = config.temperatureUnit;
|
||||
this.theme = config.theme;
|
||||
// Store the config
|
||||
this.dashboardDisplay = config.dashboardDisplay;
|
||||
this.dashboardSort = config.dashboardSort;
|
||||
this.temperatureUnit = config.temperatureUnit;
|
||||
this.theme = config.theme;
|
||||
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
saveSettings(): void {
|
||||
});
|
||||
|
||||
}
|
||||
|
||||
saveSettings(): void {
|
||||
const newSettings = {
|
||||
dashboardDisplay: this.dashboardDisplay,
|
||||
dashboardSort: this.dashboardSort,
|
||||
@@ -53,7 +51,7 @@ export class DashboardSettingsComponent implements OnInit {
|
||||
}
|
||||
this._configService.config = newSettings
|
||||
console.log(`Saved Settings: ${JSON.stringify(newSettings)}`)
|
||||
}
|
||||
}
|
||||
|
||||
formatLabel(value: number): number {
|
||||
return value;
|
||||
|
||||
+6
-6
@@ -9,12 +9,12 @@ import { MatIconModule } from '@angular/material/icon';
|
||||
import { MatInputModule } from '@angular/material/input';
|
||||
import { SharedModule } from 'app/shared/shared.module';
|
||||
import {DashboardSettingsComponent} from 'app/layout/common/dashboard-settings/dashboard-settings.component'
|
||||
import { MatDialogModule } from "@angular/material/dialog";
|
||||
import { MatButtonToggleModule} from "@angular/material/button-toggle";
|
||||
import {MatTabsModule} from "@angular/material/tabs";
|
||||
import {MatSliderModule} from "@angular/material/slider";
|
||||
import {MatSlideToggleModule} from "@angular/material/slide-toggle";
|
||||
import {MatTooltipModule} from "@angular/material/tooltip";
|
||||
import { MatDialogModule } from '@angular/material/dialog';
|
||||
import { MatButtonToggleModule} from '@angular/material/button-toggle';
|
||||
import {MatTabsModule} from '@angular/material/tabs';
|
||||
import {MatSliderModule} from '@angular/material/slider';
|
||||
import {MatSlideToggleModule} from '@angular/material/slide-toggle';
|
||||
import {MatTooltipModule} from '@angular/material/tooltip';
|
||||
|
||||
@NgModule({
|
||||
declarations: [
|
||||
|
||||
@@ -1,26 +1,25 @@
|
||||
import { NgModule } from '@angular/core';
|
||||
import { RouterModule } from '@angular/router';
|
||||
import { Overlay } from '@angular/cdk/overlay';
|
||||
import { MAT_AUTOCOMPLETE_SCROLL_STRATEGY, MatAutocompleteModule } from '@angular/material/autocomplete';
|
||||
import { MatButtonModule } from '@angular/material/button';
|
||||
import { MatSelectModule } from '@angular/material/select';
|
||||
import { MatFormFieldModule } from '@angular/material/form-field';
|
||||
import { MatIconModule } from '@angular/material/icon';
|
||||
import { MatInputModule } from '@angular/material/input';
|
||||
import { SharedModule } from 'app/shared/shared.module';
|
||||
import {NgModule} from '@angular/core';
|
||||
import {RouterModule} from '@angular/router';
|
||||
import {MatAutocompleteModule} from '@angular/material/autocomplete';
|
||||
import {MatButtonModule} from '@angular/material/button';
|
||||
import {MatSelectModule} from '@angular/material/select';
|
||||
import {MatFormFieldModule} from '@angular/material/form-field';
|
||||
import {MatIconModule} from '@angular/material/icon';
|
||||
import {MatInputModule} from '@angular/material/input';
|
||||
import {SharedModule} from 'app/shared/shared.module';
|
||||
import {DetailSettingsComponent} from 'app/layout/common/detail-settings/detail-settings.component'
|
||||
import { MatDialogModule } from "@angular/material/dialog";
|
||||
import { MatButtonToggleModule} from "@angular/material/button-toggle";
|
||||
import {MatTabsModule} from "@angular/material/tabs";
|
||||
import {MatSliderModule} from "@angular/material/slider";
|
||||
import {MatSlideToggleModule} from "@angular/material/slide-toggle";
|
||||
import {MatTooltipModule} from "@angular/material/tooltip";
|
||||
import {MatDialogModule} from '@angular/material/dialog';
|
||||
import {MatButtonToggleModule} from '@angular/material/button-toggle';
|
||||
import {MatTabsModule} from '@angular/material/tabs';
|
||||
import {MatSliderModule} from '@angular/material/slider';
|
||||
import {MatSlideToggleModule} from '@angular/material/slide-toggle';
|
||||
import {MatTooltipModule} from '@angular/material/tooltip';
|
||||
|
||||
@NgModule({
|
||||
declarations: [
|
||||
DetailSettingsComponent
|
||||
],
|
||||
imports : [
|
||||
imports: [
|
||||
RouterModule.forChild([]),
|
||||
MatAutocompleteModule,
|
||||
MatDialogModule,
|
||||
@@ -36,11 +35,10 @@ import {MatTooltipModule} from "@angular/material/tooltip";
|
||||
MatSlideToggleModule,
|
||||
SharedModule
|
||||
],
|
||||
exports : [
|
||||
exports: [
|
||||
DetailSettingsComponent
|
||||
],
|
||||
providers : []
|
||||
providers: []
|
||||
})
|
||||
export class DetailSettingsModule
|
||||
{
|
||||
export class DetailSettingsModule {
|
||||
}
|
||||
|
||||
@@ -45,7 +45,7 @@ export class LayoutComponent implements OnInit, OnDestroy
|
||||
// Set the private defaults
|
||||
this._unsubscribeAll = new Subject();
|
||||
|
||||
this.systemPrefersDark = window.matchMedia && window.matchMedia("(prefers-color-scheme: dark)").matches;
|
||||
this.systemPrefersDark = window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
|
||||
}
|
||||
|
||||
|
||||
@@ -1,5 +1,5 @@
|
||||
|
||||
<div *ngIf="data && data.data && data.data.summary; else emptyDashboard">
|
||||
<div *ngIf="summaryData; else emptyDashboard">
|
||||
<div class="flex flex-col flex-auto w-full p-8 xs:p-2">
|
||||
|
||||
<div class="flex flex-wrap w-full">
|
||||
|
||||
@@ -1,17 +1,24 @@
|
||||
import { AfterViewInit, ChangeDetectionStrategy, Component, OnDestroy, OnInit, ViewChild, ViewEncapsulation } from '@angular/core';
|
||||
import { MatSort } from '@angular/material/sort';
|
||||
import { MatTableDataSource } from '@angular/material/table';
|
||||
import { Subject } from 'rxjs';
|
||||
import { takeUntil } from 'rxjs/operators';
|
||||
import {
|
||||
AfterViewInit,
|
||||
ChangeDetectionStrategy,
|
||||
Component,
|
||||
OnDestroy,
|
||||
OnInit,
|
||||
ViewChild,
|
||||
ViewEncapsulation
|
||||
} from '@angular/core';
|
||||
import {Subject} from 'rxjs';
|
||||
import {takeUntil} from 'rxjs/operators';
|
||||
import {ApexOptions, ChartComponent} from 'ng-apexcharts';
|
||||
import { DashboardService } from 'app/modules/dashboard/dashboard.service';
|
||||
import {DashboardService} from 'app/modules/dashboard/dashboard.service';
|
||||
import {MatDialog} from '@angular/material/dialog';
|
||||
import { DashboardSettingsComponent } from 'app/layout/common/dashboard-settings/dashboard-settings.component';
|
||||
import {AppConfig} from "app/core/config/app.config";
|
||||
import {TreoConfigService} from "@treo/services/config";
|
||||
import {Router} from "@angular/router";
|
||||
import {TemperaturePipe} from "app/shared/temperature.pipe";
|
||||
import {DeviceTitlePipe} from "app/shared/device-title.pipe";
|
||||
import {DashboardSettingsComponent} from 'app/layout/common/dashboard-settings/dashboard-settings.component';
|
||||
import {AppConfig} from 'app/core/config/app.config';
|
||||
import {TreoConfigService} from '@treo/services/config';
|
||||
import {Router} from '@angular/router';
|
||||
import {TemperaturePipe} from 'app/shared/temperature.pipe';
|
||||
import {DeviceTitlePipe} from 'app/shared/device-title.pipe';
|
||||
import {DeviceSummaryModel} from 'app/core/models/device-summary-model';
|
||||
|
||||
@Component({
|
||||
selector : 'example',
|
||||
@@ -22,23 +29,26 @@ import {DeviceTitlePipe} from "app/shared/device-title.pipe";
|
||||
})
|
||||
export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
|
||||
{
|
||||
data: any;
|
||||
summaryData: { [key: string]: DeviceSummaryModel };
|
||||
hostGroups: { [hostId: string]: string[] } = {}
|
||||
temperatureOptions: ApexOptions;
|
||||
tempDurationKey: string = "forever"
|
||||
tempDurationKey = 'forever'
|
||||
config: AppConfig;
|
||||
|
||||
// Private
|
||||
private _unsubscribeAll: Subject<any>;
|
||||
@ViewChild("tempChart", { static: false }) tempChart: ChartComponent;
|
||||
@ViewChild('tempChart', { static: false }) tempChart: ChartComponent;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param {SmartService} _smartService
|
||||
* @param {DashboardService} _dashboardService
|
||||
* @param {TreoConfigService} _configService
|
||||
* @param {MatDialog} dialog
|
||||
* @param {Router} router
|
||||
*/
|
||||
constructor(
|
||||
private _smartService: DashboardService,
|
||||
private _dashboardService: DashboardService,
|
||||
private _configService: TreoConfigService,
|
||||
public dialog: MatDialog,
|
||||
private router: Router,
|
||||
@@ -64,34 +74,34 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
|
||||
.pipe(takeUntil(this._unsubscribeAll))
|
||||
.subscribe((config: AppConfig) => {
|
||||
|
||||
//check if the old config and the new config do not match.
|
||||
let oldConfig = JSON.stringify(this.config)
|
||||
let newConfig = JSON.stringify(config)
|
||||
// check if the old config and the new config do not match.
|
||||
const oldConfig = JSON.stringify(this.config)
|
||||
const newConfig = JSON.stringify(config)
|
||||
|
||||
if(oldConfig != newConfig){
|
||||
if(oldConfig !== newConfig){
|
||||
console.log(`Configuration updated: ${newConfig} vs ${oldConfig}`)
|
||||
// Store the config
|
||||
this.config = config;
|
||||
|
||||
if(oldConfig){
|
||||
console.log("reloading component...")
|
||||
console.log('reloading component...')
|
||||
this.refreshComponent()
|
||||
}
|
||||
}
|
||||
});
|
||||
|
||||
// Get the data
|
||||
this._smartService.data$
|
||||
this._dashboardService.data$
|
||||
.pipe(takeUntil(this._unsubscribeAll))
|
||||
.subscribe((data) => {
|
||||
|
||||
// Store the data
|
||||
this.data = data;
|
||||
this.summaryData = data;
|
||||
|
||||
//generate group data.
|
||||
for(let wwn in this.data.data.summary){
|
||||
let hostid = this.data.data.summary[wwn].device.host_id
|
||||
let hostDeviceList = this.hostGroups[hostid] || []
|
||||
// generate group data.
|
||||
for (const wwn in this.summaryData) {
|
||||
const hostid = this.summaryData[wwn].device.host_id
|
||||
const hostDeviceList = this.hostGroups[hostid] || []
|
||||
hostDeviceList.push(wwn)
|
||||
this.hostGroups[hostid] = hostDeviceList
|
||||
}
|
||||
@@ -121,34 +131,34 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
// @ Private methods
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
private refreshComponent(){
|
||||
private refreshComponent(): void {
|
||||
|
||||
let currentUrl = this.router.url;
|
||||
const currentUrl = this.router.url;
|
||||
this.router.routeReuseStrategy.shouldReuseRoute = () => false;
|
||||
this.router.onSameUrlNavigation = 'reload';
|
||||
this.router.navigate([currentUrl]);
|
||||
}
|
||||
|
||||
private _deviceDataTemperatureSeries() {
|
||||
var deviceTemperatureSeries = []
|
||||
private _deviceDataTemperatureSeries(): any[] {
|
||||
const deviceTemperatureSeries = []
|
||||
|
||||
console.log("DEVICE DATA SUMMARY", this.data)
|
||||
console.log('DEVICE DATA SUMMARY', this.summaryData)
|
||||
|
||||
for(const wwn in this.data.data.summary){
|
||||
var deviceSummary = this.data.data.summary[wwn]
|
||||
if (!deviceSummary.temp_history){
|
||||
for (const wwn in this.summaryData) {
|
||||
const deviceSummary = this.summaryData[wwn]
|
||||
if (!deviceSummary.temp_history) {
|
||||
continue
|
||||
}
|
||||
|
||||
let deviceName = DeviceTitlePipe.deviceTitleWithFallback(deviceSummary.device, this.config.dashboardDisplay)
|
||||
const deviceName = DeviceTitlePipe.deviceTitleWithFallback(deviceSummary.device, this.config.dashboardDisplay)
|
||||
|
||||
var deviceSeriesMetadata = {
|
||||
const deviceSeriesMetadata = {
|
||||
name: deviceName,
|
||||
data: []
|
||||
}
|
||||
|
||||
for(let tempHistory of deviceSummary.temp_history){
|
||||
let newDate = new Date(tempHistory.date);
|
||||
for(const tempHistory of deviceSummary.temp_history){
|
||||
const newDate = new Date(tempHistory.date);
|
||||
deviceSeriesMetadata.data.push({
|
||||
x: newDate,
|
||||
y: TemperaturePipe.formatTemperature(tempHistory.temp, this.config.temperatureUnit, false)
|
||||
@@ -206,7 +216,7 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
|
||||
}
|
||||
}
|
||||
},
|
||||
xaxis : {
|
||||
xaxis: {
|
||||
type: 'datetime'
|
||||
}
|
||||
};
|
||||
@@ -216,17 +226,17 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
|
||||
// @ Public methods
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
|
||||
deviceSummariesForHostGroup(hostGroupWWNs: string[]) {
|
||||
let deviceSummaries = []
|
||||
for(let wwn of hostGroupWWNs){
|
||||
if(this.data.data.summary[wwn]){
|
||||
deviceSummaries.push(this.data.data.summary[wwn])
|
||||
deviceSummariesForHostGroup(hostGroupWWNs: string[]): DeviceSummaryModel[] {
|
||||
const deviceSummaries: DeviceSummaryModel[] = []
|
||||
for (const wwn of hostGroupWWNs) {
|
||||
if (this.summaryData[wwn]) {
|
||||
deviceSummaries.push(this.summaryData[wwn])
|
||||
}
|
||||
}
|
||||
return deviceSummaries
|
||||
}
|
||||
|
||||
openDialog() {
|
||||
openDialog(): void {
|
||||
const dialogRef = this.dialog.open(DashboardSettingsComponent);
|
||||
|
||||
dialogRef.afterClosed().subscribe(result => {
|
||||
@@ -234,8 +244,8 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
|
||||
});
|
||||
}
|
||||
|
||||
onDeviceDeleted(wwn: string) {
|
||||
delete this.data.data.summary[wwn] // remove the device from the summary list.
|
||||
onDeviceDeleted(wwn: string): void {
|
||||
delete this.summaryData[wwn] // remove the device from the summary list.
|
||||
}
|
||||
|
||||
/*
|
||||
@@ -246,16 +256,16 @@ export class DashboardComponent implements OnInit, AfterViewInit, OnDestroy
|
||||
DURATION_KEY_FOREVER = "forever"
|
||||
*/
|
||||
|
||||
changeSummaryTempDuration(durationKey: string){
|
||||
changeSummaryTempDuration(durationKey: string): void {
|
||||
this.tempDurationKey = durationKey
|
||||
|
||||
this._smartService.getSummaryTempData(durationKey)
|
||||
.subscribe((data) => {
|
||||
this._dashboardService.getSummaryTempData(durationKey)
|
||||
.subscribe((tempHistoryData) => {
|
||||
|
||||
// given a list of device temp history, override the data in the "summary" object.
|
||||
for(const wwn in this.data.data.summary) {
|
||||
for (const wwn in this.summaryData) {
|
||||
// console.log(`Updating ${wwn}, length: ${this.data.data.summary[wwn].temp_history.length}`)
|
||||
this.data.data.summary[wwn].temp_history = data.data.temp_history[wwn] || []
|
||||
this.summaryData[wwn].temp_history = tempHistoryData[wwn] || []
|
||||
}
|
||||
|
||||
// Prepare the chart series data
|
||||
|
||||
@@ -12,8 +12,8 @@ import { MatSortModule } from '@angular/material/sort';
|
||||
import { MatTableModule } from '@angular/material/table';
|
||||
import { NgApexchartsModule } from 'ng-apexcharts';
|
||||
import { MatTooltipModule } from '@angular/material/tooltip'
|
||||
import { DashboardSettingsModule } from "app/layout/common/dashboard-settings/dashboard-settings.module";
|
||||
import { DashboardDeviceModule } from "app/layout/common/dashboard-device/dashboard-device.module";
|
||||
import { DashboardSettingsModule } from 'app/layout/common/dashboard-settings/dashboard-settings.module';
|
||||
import { DashboardDeviceModule } from 'app/layout/common/dashboard-device/dashboard-device.module';
|
||||
|
||||
@NgModule({
|
||||
declarations: [
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
import { ActivatedRouteSnapshot, Resolve, RouterStateSnapshot } from '@angular/router';
|
||||
import { Observable } from 'rxjs';
|
||||
import { DashboardService } from 'app/modules/dashboard/dashboard.service';
|
||||
import {Injectable} from '@angular/core';
|
||||
import {ActivatedRouteSnapshot, Resolve, RouterStateSnapshot} from '@angular/router';
|
||||
import {Observable} from 'rxjs';
|
||||
import {DashboardService} from 'app/modules/dashboard/dashboard.service';
|
||||
import {DeviceSummaryModel} from 'app/core/models/device-summary-model';
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root'
|
||||
})
|
||||
export class DashboardResolver implements Resolve<any>
|
||||
{
|
||||
export class DashboardResolver implements Resolve<any> {
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
@@ -29,8 +29,7 @@ export class DashboardResolver implements Resolve<any>
|
||||
* @param route
|
||||
* @param state
|
||||
*/
|
||||
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<any>
|
||||
{
|
||||
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<{ [p: string]: DeviceSummaryModel }> {
|
||||
return this._dashboardService.getSummaryData();
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Route } from '@angular/router';
|
||||
import { DashboardComponent } from 'app/modules/dashboard/dashboard.component';
|
||||
import {DashboardResolver} from "./dashboard.resolvers";
|
||||
import {DashboardResolver} from 'app/modules/dashboard/dashboard.resolvers';
|
||||
|
||||
export const dashboardRoutes: Route[] = [
|
||||
{
|
||||
|
||||
@@ -0,0 +1,44 @@
|
||||
import {HttpClient} from '@angular/common/http';
|
||||
import {DashboardService} from './dashboard.service';
|
||||
import {of} from 'rxjs';
|
||||
import {summary} from 'app/data/mock/summary/data'
|
||||
import {temp_history} from 'app/data/mock/summary/temp_history'
|
||||
import {DeviceSummaryModel} from 'app/core/models/device-summary-model';
|
||||
import {SmartTemperatureModel} from 'app/core/models/measurements/smart-temperature-model';
|
||||
|
||||
describe('DashboardService', () => {
|
||||
let service: DashboardService;
|
||||
let httpClientSpy: jasmine.SpyObj<HttpClient>;
|
||||
|
||||
beforeEach(() => {
|
||||
httpClientSpy = jasmine.createSpyObj('HttpClient', ['get']);
|
||||
service = new DashboardService(httpClientSpy);
|
||||
});
|
||||
|
||||
it('should unwrap and return getSummaryData() (HttpClient called once)', (done: DoneFn) => {
|
||||
httpClientSpy.get.and.returnValue(of(summary));
|
||||
|
||||
service.getSummaryData().subscribe(value => {
|
||||
expect(value).toBe(summary.data.summary as { [key: string]: DeviceSummaryModel });
|
||||
done();
|
||||
});
|
||||
expect(httpClientSpy.get.calls.count())
|
||||
.withContext('one call')
|
||||
.toBe(1);
|
||||
});
|
||||
|
||||
it('should unwrap and return getSummaryTempData() (HttpClient called once)', (done: DoneFn) => {
|
||||
// const expectedHeroes: any[] =
|
||||
// [{ id: 1, name: 'A' }, { id: 2, name: 'B' }];
|
||||
|
||||
httpClientSpy.get.and.returnValue(of(temp_history));
|
||||
|
||||
service.getSummaryTempData('weekly').subscribe(value => {
|
||||
expect(value).toBe(temp_history.data.temp_history as { [key: string]: SmartTemperatureModel[] });
|
||||
done();
|
||||
});
|
||||
expect(httpClientSpy.get.calls.count())
|
||||
.withContext('one call')
|
||||
.toBe(1);
|
||||
});
|
||||
});
|
||||
@@ -1,16 +1,19 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
import { HttpClient } from '@angular/common/http';
|
||||
import { BehaviorSubject, Observable } from 'rxjs';
|
||||
import { tap } from 'rxjs/operators';
|
||||
import { getBasePath } from 'app/app.routing';
|
||||
import {Injectable} from '@angular/core';
|
||||
import {HttpClient} from '@angular/common/http';
|
||||
import {BehaviorSubject, Observable} from 'rxjs';
|
||||
import {map, tap} from 'rxjs/operators';
|
||||
import {getBasePath} from 'app/app.routing';
|
||||
import {DeviceSummaryResponseWrapper} from 'app/core/models/device-summary-response-wrapper';
|
||||
import {DeviceSummaryModel} from 'app/core/models/device-summary-model';
|
||||
import {SmartTemperatureModel} from 'app/core/models/measurements/smart-temperature-model';
|
||||
import {DeviceSummaryTempResponseWrapper} from 'app/core/models/device-summary-temp-response-wrapper';
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root'
|
||||
})
|
||||
export class DashboardService
|
||||
{
|
||||
export class DashboardService {
|
||||
// Observables
|
||||
private _data: BehaviorSubject<any>;
|
||||
private _data: BehaviorSubject<{ [p: string]: DeviceSummaryModel }>;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
@@ -32,8 +35,7 @@ export class DashboardService
|
||||
/**
|
||||
* Getter for data
|
||||
*/
|
||||
get data$(): Observable<any>
|
||||
{
|
||||
get data$(): Observable<{ [p: string]: DeviceSummaryModel }> {
|
||||
return this._data.asObservable();
|
||||
}
|
||||
|
||||
@@ -44,22 +46,28 @@ export class DashboardService
|
||||
/**
|
||||
* Get data
|
||||
*/
|
||||
getSummaryData(): Observable<any>
|
||||
{
|
||||
getSummaryData(): Observable<{ [key: string]: DeviceSummaryModel }> {
|
||||
return this._httpClient.get(getBasePath() + '/api/summary').pipe(
|
||||
tap((response: any) => {
|
||||
map((response: DeviceSummaryResponseWrapper) => {
|
||||
// console.log("FILTERING=----", response.data.summary)
|
||||
return response.data.summary
|
||||
}),
|
||||
tap((response: { [key: string]: DeviceSummaryModel }) => {
|
||||
this._data.next(response);
|
||||
})
|
||||
);
|
||||
}
|
||||
|
||||
getSummaryTempData(durationKey: string): Observable<any>
|
||||
{
|
||||
let params = {}
|
||||
if(durationKey){
|
||||
params["duration_key"] = durationKey
|
||||
getSummaryTempData(durationKey: string): Observable<{ [key: string]: SmartTemperatureModel[] }> {
|
||||
const params = {}
|
||||
if (durationKey) {
|
||||
params['duration_key'] = durationKey
|
||||
}
|
||||
|
||||
return this._httpClient.get(getBasePath() + '/api/summary/temp', {params: params});
|
||||
return this._httpClient.get(getBasePath() + '/api/summary/temp', {params: params}).pipe(
|
||||
map((response: DeviceSummaryTempResponseWrapper) => {
|
||||
return response.data.temp_history
|
||||
})
|
||||
);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -143,6 +143,7 @@
|
||||
<table class="w-full bg-transparent"
|
||||
mat-table
|
||||
matSort
|
||||
multiTemplateDataRows
|
||||
[dataSource]="smartAttributeDataSource"
|
||||
[trackBy]="trackByFn"
|
||||
#smartAttributeTable>
|
||||
@@ -203,7 +204,7 @@
|
||||
</th>
|
||||
<td mat-cell
|
||||
*matCellDef="let attribute">
|
||||
<span class="pr-6 whitespace-no-wrap" matTooltip="{{getAttributeDescription(attribute)}}">
|
||||
<span class="pr-6 whitespace-no-wrap" matTooltip="click for more details.">
|
||||
{{getAttributeName(attribute)}} <mat-icon *ngIf="getAttributeDescription(attribute)" class="icon-size-10" [svgIcon]="'info'"></mat-icon>
|
||||
</span>
|
||||
</td>
|
||||
@@ -324,6 +325,72 @@
|
||||
</td>
|
||||
</ng-container>
|
||||
|
||||
|
||||
<!-- Expanded Content Column - The detail row is made up of this one column that spans across all columns -->
|
||||
<ng-container matColumnDef="expandedDetail">
|
||||
<td mat-cell *matCellDef="let attribute" [attr.colspan]="smartAttributeTableColumns.length">
|
||||
|
||||
|
||||
|
||||
<div class="attribute-detail"
|
||||
[@detailExpand]="attribute == expandedAttribute ? 'expanded' : 'collapsed'">
|
||||
|
||||
<div class="flex flex-auto w-1/3 min-w-80 py-4">
|
||||
<div class="flex flex-col flex-auto justify-end text-md pb-3">
|
||||
{{getAttributeDescription(attribute)}}
|
||||
</div>
|
||||
</div>
|
||||
<div class="flex flex-auto w-2/3 min-w-80">
|
||||
<div class="flex flex-col flex-auto justify-end text-md px-6 pb-3">
|
||||
<div class="flex items-center justify-between py-3 border-b last:border-b-0 ng-star-inserted">
|
||||
<div class="flex items-center w-1/4">Type</div>
|
||||
<div class="flex items-center w-1/4">Value</div>
|
||||
<div class="flex items-center w-1/4">Worst/Thresh</div>
|
||||
<div class="flex items-center w-1/4">Failure %</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between py-3 border-b last:border-b-0 ng-star-inserted">
|
||||
<div class="flex items-center w-1/4">
|
||||
<div class="flex-shrink-0 w-2 h-2 mr-3 rounded-full"
|
||||
[ngClass]="{'bg-red': getAttributeScrutinyStatusName(attribute.status) === 'failed',
|
||||
'bg-green': getAttributeScrutinyStatusName(attribute.status) === 'passed',
|
||||
'bg-yellow': getAttributeScrutinyStatusName(attribute.status) === 'warn'}"></div>
|
||||
<div class="truncate">Scrutiny</div>
|
||||
</div>
|
||||
<div class="w-1/4 items-center font-medium">{{getAttributeValue(attribute)}}</div>
|
||||
<div class="w-1/4 items-center text-secondary">--</div>
|
||||
<div class="w-1/4 items-center text-secondary">{{(attribute.failure_rate | percent) || '--'}}</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between py-3 border-b last:border-b-0 ng-star-inserted">
|
||||
<div class="flex items-center w-1/4">
|
||||
<div class="flex-shrink-0 w-2 h-2 mr-3 rounded-full"
|
||||
[ngClass]="{'bg-red': getAttributeSmartStatusName(attribute.status) === 'failed',
|
||||
'bg-green': getAttributeSmartStatusName(attribute.status) === 'passed'}"
|
||||
></div>
|
||||
<div class="truncate">Normalized</div>
|
||||
</div>
|
||||
<div class="w-1/4 items-center font-medium">{{attribute.value}}</div>
|
||||
<div class="w-1/4 items-center text-secondary">{{getAttributeWorst(attribute) || '--' }}/{{getAttributeThreshold(attribute)}}</div>
|
||||
<div class="w-1/4 items-center text-secondary">--</div>
|
||||
</div>
|
||||
|
||||
<div class="flex items-center justify-between py-3 border-b last:border-b-0 ng-star-inserted">
|
||||
<div class="flex items-center w-1/4">
|
||||
<div class="flex-shrink-0 w-2 h-2 mr-3 rounded-full"></div>
|
||||
<div class="truncate">Raw</div>
|
||||
</div>
|
||||
<div class="w-1/4 items-center font-medium">{{attribute.raw_value}}</div>
|
||||
<div class="w-1/4 items-center text-secondary">--</div>
|
||||
<div class="w-1/4 items-center text-secondary">--</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</div>
|
||||
</td>
|
||||
</ng-container>
|
||||
|
||||
|
||||
<!-- Footer -->
|
||||
<ng-container matColumnDef="recentOrdersTableFooter">
|
||||
<td class="px-3 border-none"
|
||||
@@ -344,7 +411,10 @@
|
||||
<tr class="attribute-row h-16"
|
||||
mat-row
|
||||
[ngClass]="{'yellow-50': getAttributeCritical(row)}"
|
||||
[class.attribute-expanded-row]="expandedAttribute === row"
|
||||
(click)="expandedAttribute = expandedAttribute === row ? null : row"
|
||||
*matRowDef="let row; columns: smartAttributeTableColumns;"></tr>
|
||||
<tr mat-row *matRowDef="let row; columns: ['expandedDetail']" class="attribute-detail-row"></tr>
|
||||
<tr class="h-16"
|
||||
mat-footer-row
|
||||
*matFooterRowDef="['recentOrdersTableFooter']"></tr>
|
||||
|
||||
@@ -1,7 +1,6 @@
|
||||
@import 'treo';
|
||||
|
||||
detail {
|
||||
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
@@ -20,5 +19,35 @@ detail {
|
||||
}
|
||||
|
||||
|
||||
|
||||
}
|
||||
|
||||
//table {
|
||||
// width: 100%;
|
||||
//}
|
||||
|
||||
$primary: map-get($theme, primary);
|
||||
$is-dark: map-get($theme, is-dark);
|
||||
tr.attribute-detail-row {
|
||||
height: 0;
|
||||
}
|
||||
|
||||
//tr.attribute-row:not(.attribute-expanded-row):hover {
|
||||
// @if ($is-dark) {
|
||||
// background: rgba(0, 0, 0, 0.05);
|
||||
// } @else {
|
||||
// background: map-get($primary, 50);
|
||||
// }
|
||||
//}
|
||||
|
||||
tr.attribute-row:not(.attribute-expanded-row):active {
|
||||
background: #efefef;
|
||||
}
|
||||
|
||||
.attribute-row td {
|
||||
border-bottom-width: 0;
|
||||
}
|
||||
|
||||
.attribute-detail {
|
||||
overflow: hidden;
|
||||
display: flex;
|
||||
}
|
||||
|
||||
@@ -1,25 +0,0 @@
|
||||
import { async, ComponentFixture, TestBed } from '@angular/core/testing';
|
||||
|
||||
import { DetailComponent } from './detail.component';
|
||||
|
||||
describe('DetailComponent', () => {
|
||||
let component: DetailComponent;
|
||||
let fixture: ComponentFixture<DetailComponent>;
|
||||
|
||||
beforeEach(async(() => {
|
||||
TestBed.configureTestingModule({
|
||||
declarations: [ DetailComponent ]
|
||||
})
|
||||
.compileComponents();
|
||||
}));
|
||||
|
||||
beforeEach(() => {
|
||||
fixture = TestBed.createComponent(DetailComponent);
|
||||
component = fixture.componentInstance;
|
||||
fixture.detectChanges();
|
||||
});
|
||||
|
||||
it('should create', () => {
|
||||
expect(component).toBeTruthy();
|
||||
});
|
||||
});
|
||||
@@ -1,67 +1,94 @@
|
||||
import {AfterViewInit, Component, OnDestroy, OnInit, ViewChild} from '@angular/core';
|
||||
import {ApexOptions} from "ng-apexcharts";
|
||||
import {MatTableDataSource} from "@angular/material/table";
|
||||
import {MatSort} from "@angular/material/sort";
|
||||
import {Subject} from "rxjs";
|
||||
import {DetailService} from "./detail.service";
|
||||
import {takeUntil} from "rxjs/operators";
|
||||
import {fadeOut} from "../../../@treo/animations/fade";
|
||||
import {DetailSettingsComponent} from "app/layout/common/detail-settings/detail-settings.component";
|
||||
import {MatDialog} from "@angular/material/dialog";
|
||||
import humanizeDuration from 'humanize-duration';
|
||||
import {TreoConfigService} from "../../../@treo/services/config";
|
||||
import {AppConfig} from "../../core/config/app.config";
|
||||
import {AfterViewInit, Component, Inject, LOCALE_ID, OnDestroy, OnInit, ViewChild} from '@angular/core';
|
||||
import {ApexOptions} from 'ng-apexcharts';
|
||||
import {AppConfig} from 'app/core/config/app.config';
|
||||
import {DetailService} from './detail.service';
|
||||
import {DetailSettingsComponent} from 'app/layout/common/detail-settings/detail-settings.component';
|
||||
import {MatDialog} from '@angular/material/dialog';
|
||||
import {MatSort} from '@angular/material/sort';
|
||||
import {MatTableDataSource} from '@angular/material/table';
|
||||
import {Subject} from 'rxjs';
|
||||
import {TreoConfigService} from '@treo/services/config';
|
||||
import {animate, state, style, transition, trigger} from '@angular/animations';
|
||||
import {formatDate} from '@angular/common';
|
||||
import {takeUntil} from 'rxjs/operators';
|
||||
import {DeviceModel} from 'app/core/models/device-model';
|
||||
import {SmartModel} from 'app/core/models/measurements/smart-model';
|
||||
import {SmartAttributeModel} from 'app/core/models/measurements/smart-attribute-model';
|
||||
import {AttributeMetadataModel} from 'app/core/models/thresholds/attribute-metadata-model';
|
||||
|
||||
// from Constants.go - these must match
|
||||
const AttributeStatusPassed = 0
|
||||
const AttributeStatusFailedSmart = 1
|
||||
const AttributeStatusWarningScrutiny = 2
|
||||
const AttributeStatusFailedScrutiny = 4
|
||||
|
||||
|
||||
@Component({
|
||||
selector: 'detail',
|
||||
templateUrl: './detail.component.html',
|
||||
styleUrls: ['./detail.component.scss']
|
||||
selector: 'detail',
|
||||
templateUrl: './detail.component.html',
|
||||
styleUrls: ['./detail.component.scss'],
|
||||
animations: [
|
||||
trigger('detailExpand', [
|
||||
state('collapsed', style({height: '0px', minHeight: '0'})),
|
||||
state('expanded', style({height: '*'})),
|
||||
transition('expanded <=> collapsed', animate('225ms cubic-bezier(0.4, 0.0, 0.2, 1)')),
|
||||
]),
|
||||
],
|
||||
})
|
||||
|
||||
export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
|
||||
config: AppConfig;
|
||||
|
||||
onlyCritical: boolean = true;
|
||||
// data: any;
|
||||
|
||||
metadata: any;
|
||||
device: any;
|
||||
smart_results: any[];
|
||||
|
||||
commonSparklineOptions: Partial<ApexOptions>;
|
||||
smartAttributeDataSource: MatTableDataSource<any>;
|
||||
smartAttributeTableColumns: string[];
|
||||
|
||||
|
||||
@ViewChild('smartAttributeTable', {read: MatSort})
|
||||
smartAttributeTableMatSort: MatSort;
|
||||
|
||||
// Private
|
||||
private _unsubscribeAll: Subject<any>;
|
||||
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
* @param {DetailService} _detailService
|
||||
* @param {MatDialog} dialog
|
||||
* @param {TreoConfigService} _configService
|
||||
* @param {string} locale
|
||||
*/
|
||||
constructor(
|
||||
private _detailService: DetailService,
|
||||
public dialog: MatDialog,
|
||||
private _configService: TreoConfigService,
|
||||
|
||||
|
||||
)
|
||||
{
|
||||
@Inject(LOCALE_ID) public locale: string
|
||||
) {
|
||||
// Set the private defaults
|
||||
this._unsubscribeAll = new Subject();
|
||||
|
||||
// Set the defaults
|
||||
this.smartAttributeDataSource = new MatTableDataSource();
|
||||
// this.recentTransactionsTableColumns = ['status', 'id', 'name', 'value', 'worst', 'thresh'];
|
||||
this.smartAttributeTableColumns = ['status', 'id', 'name', 'value', 'worst', 'thresh','ideal', 'failure', 'history'];
|
||||
this.smartAttributeTableColumns = ['status', 'id', 'name', 'value', 'worst', 'thresh', 'ideal', 'failure', 'history'];
|
||||
|
||||
this.systemPrefersDark = window.matchMedia && window.matchMedia('(prefers-color-scheme: dark)').matches;
|
||||
|
||||
}
|
||||
|
||||
config: AppConfig;
|
||||
|
||||
onlyCritical = true;
|
||||
// data: any;
|
||||
expandedAttribute: SmartAttributeModel | null;
|
||||
|
||||
metadata: { [p: string]: AttributeMetadataModel } | { [p: number]: AttributeMetadataModel };
|
||||
device: DeviceModel;
|
||||
// tslint:disable-next-line:variable-name
|
||||
smart_results: SmartModel[];
|
||||
|
||||
commonSparklineOptions: Partial<ApexOptions>;
|
||||
smartAttributeDataSource: MatTableDataSource<SmartAttributeModel>;
|
||||
smartAttributeTableColumns: string[];
|
||||
|
||||
@ViewChild('smartAttributeTable', {read: MatSort})
|
||||
smartAttributeTableMatSort: MatSort;
|
||||
|
||||
// Private
|
||||
private _unsubscribeAll: Subject<any>;
|
||||
private systemPrefersDark: boolean;
|
||||
|
||||
readonly humanizeDuration = humanizeDuration;
|
||||
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
// @ Lifecycle hooks
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
@@ -69,8 +96,7 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
/**
|
||||
* On init
|
||||
*/
|
||||
ngOnInit(): void
|
||||
{
|
||||
ngOnInit(): void {
|
||||
// Subscribe to config changes
|
||||
this._configService.config$
|
||||
.pipe(takeUntil(this._unsubscribeAll))
|
||||
@@ -82,13 +108,13 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
// Get the data
|
||||
this._detailService.data$
|
||||
.pipe(takeUntil(this._unsubscribeAll))
|
||||
.subscribe((data) => {
|
||||
.subscribe((respWrapper) => {
|
||||
|
||||
// Store the data
|
||||
// this.data = data;
|
||||
this.device = data.data.device;
|
||||
this.smart_results = data.data.smart_results
|
||||
this.metadata = data.metadata;
|
||||
this.device = respWrapper.data.device;
|
||||
this.smart_results = respWrapper.data.smart_results
|
||||
this.metadata = respWrapper.metadata;
|
||||
|
||||
|
||||
// Store the table data
|
||||
@@ -102,8 +128,7 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
/**
|
||||
* After view init
|
||||
*/
|
||||
ngAfterViewInit(): void
|
||||
{
|
||||
ngAfterViewInit(): void {
|
||||
// Make the data source sortable
|
||||
this.smartAttributeDataSource.sort = this.smartAttributeTableMatSort;
|
||||
}
|
||||
@@ -111,8 +136,7 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
/**
|
||||
* On destroy
|
||||
*/
|
||||
ngOnDestroy(): void
|
||||
{
|
||||
ngOnDestroy(): void {
|
||||
// Unsubscribe from all subscriptions
|
||||
this._unsubscribeAll.next();
|
||||
this._unsubscribeAll.complete();
|
||||
@@ -121,168 +145,202 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
// @ Private methods
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
|
||||
getAttributeStatusName(attributeStatus: number): string {
|
||||
// tslint:disable:no-bitwise
|
||||
|
||||
// from Constants.go
|
||||
// AttributeStatusPassed AttributeStatus = 0
|
||||
// AttributeStatusFailedSmart AttributeStatus = 1
|
||||
// AttributeStatusWarningScrutiny AttributeStatus = 2
|
||||
// AttributeStatusFailedScrutiny AttributeStatus = 4
|
||||
|
||||
if(attributeStatus === 0){
|
||||
if (attributeStatus === AttributeStatusPassed) {
|
||||
return 'passed'
|
||||
|
||||
} else if ((attributeStatus & 1) !== 0 || (attributeStatus & 4) !== 0 ){
|
||||
} else if ((attributeStatus & AttributeStatusFailedScrutiny) !== 0 || (attributeStatus & AttributeStatusFailedSmart) !== 0) {
|
||||
return 'failed'
|
||||
} else if ((attributeStatus & 2) !== 0){
|
||||
} else if ((attributeStatus & AttributeStatusWarningScrutiny) !== 0) {
|
||||
return 'warn'
|
||||
}
|
||||
return ''
|
||||
// tslint:enable:no-bitwise
|
||||
}
|
||||
|
||||
getAttributeName(attribute_data): string {
|
||||
let attribute_metadata = this.metadata[attribute_data.attribute_id]
|
||||
if(!attribute_metadata){
|
||||
getAttributeScrutinyStatusName(attributeStatus: number): string {
|
||||
// tslint:disable:no-bitwise
|
||||
if ((attributeStatus & AttributeStatusFailedScrutiny) !== 0) {
|
||||
return 'failed'
|
||||
} else if ((attributeStatus & AttributeStatusWarningScrutiny) !== 0) {
|
||||
return 'warn'
|
||||
} else {
|
||||
return 'passed'
|
||||
}
|
||||
// tslint:enable:no-bitwise
|
||||
}
|
||||
|
||||
getAttributeSmartStatusName(attributeStatus: number): string {
|
||||
// tslint:disable:no-bitwise
|
||||
if ((attributeStatus & AttributeStatusFailedSmart) !== 0) {
|
||||
return 'failed'
|
||||
} else {
|
||||
return 'passed'
|
||||
}
|
||||
// tslint:enable:no-bitwise
|
||||
}
|
||||
|
||||
|
||||
getAttributeName(attributeData: SmartAttributeModel): string {
|
||||
const attributeMetadata = this.metadata[attributeData.attribute_id]
|
||||
if (!attributeMetadata) {
|
||||
return 'Unknown Attribute Name'
|
||||
} else {
|
||||
return attribute_metadata.display_name
|
||||
return attributeMetadata.display_name
|
||||
}
|
||||
}
|
||||
getAttributeDescription(attribute_data){
|
||||
let attribute_metadata = this.metadata[attribute_data.attribute_id]
|
||||
if(!attribute_metadata){
|
||||
|
||||
getAttributeDescription(attributeData: SmartAttributeModel): string {
|
||||
const attributeMetadata = this.metadata[attributeData.attribute_id]
|
||||
if (!attributeMetadata) {
|
||||
return 'Unknown'
|
||||
} else {
|
||||
return attribute_metadata.description
|
||||
return attributeMetadata.description
|
||||
}
|
||||
return
|
||||
}
|
||||
|
||||
getAttributeValue(attribute_data){
|
||||
if(this.isAta()) {
|
||||
let attribute_metadata = this.metadata[attribute_data.attribute_id]
|
||||
if(!attribute_metadata){
|
||||
return attribute_data.value
|
||||
} else if (attribute_metadata.display_type == "raw") {
|
||||
return attribute_data.raw_value
|
||||
} else if (attribute_metadata.display_type == "transformed" && attribute_data.transformed_value) {
|
||||
return attribute_data.transformed_value
|
||||
getAttributeValue(attributeData: SmartAttributeModel): number {
|
||||
if (this.isAta()) {
|
||||
const attributeMetadata = this.metadata[attributeData.attribute_id]
|
||||
if (!attributeMetadata) {
|
||||
return attributeData.value
|
||||
} else if (attributeMetadata.display_type === 'raw') {
|
||||
return attributeData.raw_value
|
||||
} else if (attributeMetadata.display_type === 'transformed' && attributeData.transformed_value) {
|
||||
return attributeData.transformed_value
|
||||
} else {
|
||||
return attribute_data.value
|
||||
return attributeData.value
|
||||
}
|
||||
}
|
||||
else{
|
||||
return attribute_data.value
|
||||
} else {
|
||||
return attributeData.value
|
||||
}
|
||||
}
|
||||
|
||||
getAttributeValueType(attribute_data){
|
||||
if(this.isAta()) {
|
||||
let attribute_metadata = this.metadata[attribute_data.attribute_id]
|
||||
if(!attribute_metadata){
|
||||
getAttributeValueType(attributeData: SmartAttributeModel): string {
|
||||
if (this.isAta()) {
|
||||
const attributeMetadata = this.metadata[attributeData.attribute_id]
|
||||
if (!attributeMetadata) {
|
||||
return ''
|
||||
} else {
|
||||
return attribute_metadata.display_type
|
||||
return attributeMetadata.display_type
|
||||
}
|
||||
} else {
|
||||
return ''
|
||||
}
|
||||
}
|
||||
|
||||
getAttributeIdeal(attribute_data){
|
||||
if(this.isAta()){
|
||||
return this.metadata[attribute_data.attribute_id]?.display_type == "raw" ? this.metadata[attribute_data.attribute_id]?.ideal : ''
|
||||
getAttributeIdeal(attributeData: SmartAttributeModel): string {
|
||||
if (this.isAta()) {
|
||||
return this.metadata[attributeData.attribute_id]?.display_type === 'raw' ? this.metadata[attributeData.attribute_id]?.ideal : ''
|
||||
} else {
|
||||
return this.metadata[attribute_data.attribute_id]?.ideal
|
||||
return this.metadata[attributeData.attribute_id]?.ideal
|
||||
}
|
||||
}
|
||||
|
||||
getAttributeWorst(attribute_data){
|
||||
let attribute_metadata = this.metadata[attribute_data.attribute_id]
|
||||
if(!attribute_metadata){
|
||||
return attribute_data.worst
|
||||
getAttributeWorst(attributeData: SmartAttributeModel): number | string {
|
||||
const attributeMetadata = this.metadata[attributeData.attribute_id]
|
||||
if (!attributeMetadata) {
|
||||
return attributeData.worst
|
||||
} else {
|
||||
return attribute_metadata?.display_type == "normalized" ? attribute_data.worst : ''
|
||||
return attributeMetadata?.display_type === 'normalized' ? attributeData.worst : ''
|
||||
}
|
||||
}
|
||||
|
||||
getAttributeThreshold(attribute_data){
|
||||
if(this.isAta()){
|
||||
let attribute_metadata = this.metadata[attribute_data.attribute_id]
|
||||
if(!attribute_metadata || attribute_metadata.display_type == "normalized"){
|
||||
return attribute_data.thresh
|
||||
getAttributeThreshold(attributeData: SmartAttributeModel): number | string {
|
||||
if (this.isAta()) {
|
||||
const attributeMetadata = this.metadata[attributeData.attribute_id]
|
||||
if (!attributeMetadata || attributeMetadata.display_type === 'normalized') {
|
||||
return attributeData.thresh
|
||||
} else {
|
||||
// if(this.data.metadata[attribute_data.attribute_id].observed_thresholds){
|
||||
//
|
||||
// } else {
|
||||
// }
|
||||
// return ''
|
||||
return attribute_data.thresh
|
||||
return attributeData.thresh
|
||||
}
|
||||
} else {
|
||||
return (attribute_data.thresh == -1 ? '' : attribute_data.thresh )
|
||||
return (attributeData.thresh === -1 ? '' : attributeData.thresh)
|
||||
}
|
||||
}
|
||||
|
||||
getAttributeCritical(attribute_data){
|
||||
return this.metadata[attribute_data.attribute_id]?.critical
|
||||
getAttributeCritical(attributeData: SmartAttributeModel): boolean {
|
||||
return this.metadata[attributeData.attribute_id]?.critical
|
||||
}
|
||||
getHiddenAttributes(){
|
||||
if (!this.smart_results || this.smart_results.length == 0) {
|
||||
|
||||
getHiddenAttributes(): number {
|
||||
if (!this.smart_results || this.smart_results.length === 0) {
|
||||
return 0
|
||||
}
|
||||
|
||||
let attributes_length = 0
|
||||
let attributes = this.smart_results[0]?.attrs
|
||||
let attributesLength = 0
|
||||
const attributes = this.smart_results[0]?.attrs
|
||||
if (attributes) {
|
||||
attributes_length = Object.keys(attributes).length
|
||||
attributesLength = Object.keys(attributes).length
|
||||
}
|
||||
|
||||
return attributes_length - this.smartAttributeDataSource.data.length
|
||||
return attributesLength - this.smartAttributeDataSource.data.length
|
||||
}
|
||||
|
||||
isAta(): boolean {
|
||||
return this.device.device_protocol == 'ATA'
|
||||
return this.device.device_protocol === 'ATA'
|
||||
}
|
||||
|
||||
isScsi(): boolean {
|
||||
return this.device.device_protocol == 'SCSI'
|
||||
return this.device.device_protocol === 'SCSI'
|
||||
}
|
||||
|
||||
isNvme(): boolean {
|
||||
return this.device.device_protocol == 'NVMe'
|
||||
return this.device.device_protocol === 'NVMe'
|
||||
}
|
||||
|
||||
private _generateSmartAttributeTableDataSource(smart_results){
|
||||
var smartAttributeDataSource = [];
|
||||
private _generateSmartAttributeTableDataSource(smartResults: SmartModel[]): SmartAttributeModel[] {
|
||||
const smartAttributeDataSource: SmartAttributeModel[] = [];
|
||||
|
||||
if(smart_results.length == 0){
|
||||
if (smartResults.length === 0) {
|
||||
return smartAttributeDataSource
|
||||
}
|
||||
var latest_smart_result = smart_results[0];
|
||||
let attributes = {}
|
||||
if(this.isScsi()) {
|
||||
const latestSmartResult = smartResults[0];
|
||||
let attributes: { [p: string]: SmartAttributeModel } = {}
|
||||
if (this.isScsi()) {
|
||||
this.smartAttributeTableColumns = ['status', 'name', 'value', 'thresh', 'history'];
|
||||
attributes = latest_smart_result.attrs
|
||||
} else if(this.isNvme()){
|
||||
attributes = latestSmartResult.attrs
|
||||
} else if (this.isNvme()) {
|
||||
this.smartAttributeTableColumns = ['status', 'name', 'value', 'thresh', 'ideal', 'history'];
|
||||
attributes = latest_smart_result.attrs
|
||||
attributes = latestSmartResult.attrs
|
||||
} else {
|
||||
//ATA
|
||||
attributes = latest_smart_result.attrs
|
||||
this.smartAttributeTableColumns = ['status', 'id', 'name', 'value', 'worst', 'thresh','ideal', 'failure', 'history'];
|
||||
// ATA
|
||||
attributes = latestSmartResult.attrs
|
||||
this.smartAttributeTableColumns = ['status', 'id', 'name', 'value', 'thresh', 'ideal', 'failure', 'history'];
|
||||
}
|
||||
|
||||
for(const attrId in attributes){
|
||||
var attr = attributes[attrId]
|
||||
for (const attrId in attributes) {
|
||||
const attr = attributes[attrId]
|
||||
|
||||
//chart history data
|
||||
// chart history data
|
||||
if (!attr.chartData) {
|
||||
|
||||
|
||||
var attrHistory = []
|
||||
for (let smart_result of smart_results){
|
||||
attrHistory.push(this.getAttributeValue(smart_result.attrs[attrId]))
|
||||
const attrHistory = []
|
||||
for (const smartResult of smartResults) {
|
||||
// attrHistory.push(this.getAttributeValue(smart_result.attrs[attrId]))
|
||||
|
||||
const chartDatapoint = {
|
||||
x: formatDate(smartResult.date, 'MMMM dd, yyyy - HH:mm', this.locale),
|
||||
y: this.getAttributeValue(smartResult.attrs[attrId])
|
||||
}
|
||||
const attributeStatusName = this.getAttributeStatusName(smartResult.attrs[attrId].status)
|
||||
if (attributeStatusName === 'failed') {
|
||||
chartDatapoint['strokeColor'] = '#F05252'
|
||||
chartDatapoint['fillColor'] = '#F05252'
|
||||
} else if (attributeStatusName === 'warn') {
|
||||
chartDatapoint['strokeColor'] = '#C27803'
|
||||
chartDatapoint['fillColor'] = '#C27803'
|
||||
}
|
||||
attrHistory.push(chartDatapoint)
|
||||
}
|
||||
|
||||
// var rawHistory = (attr.history || []).map(hist_attr => this.getAttributeValue(hist_attr)).reverse()
|
||||
@@ -290,14 +348,14 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
|
||||
attributes[attrId].chartData = [
|
||||
{
|
||||
name: "chart-line-sparkline",
|
||||
name: 'chart-line-sparkline',
|
||||
data: attrHistory
|
||||
}
|
||||
]
|
||||
}
|
||||
//determine when to include the attributes in table.
|
||||
// determine when to include the attributes in table.
|
||||
|
||||
if(!this.onlyCritical || this.onlyCritical && this.metadata[attr.attribute_id]?.critical || attr.value < attr.thresh){
|
||||
if (!this.onlyCritical || this.onlyCritical && this.metadata[attr.attribute_id]?.critical || attr.value < attr.thresh) {
|
||||
smartAttributeDataSource.push(attr)
|
||||
}
|
||||
}
|
||||
@@ -309,13 +367,12 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
*
|
||||
* @private
|
||||
*/
|
||||
private _prepareChartData(): void
|
||||
{
|
||||
private _prepareChartData(): void {
|
||||
|
||||
// Account balance
|
||||
this.commonSparklineOptions = {
|
||||
chart: {
|
||||
type: "bar",
|
||||
type: 'bar',
|
||||
width: 100,
|
||||
height: 25,
|
||||
sparkline: {
|
||||
@@ -325,23 +382,30 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
enabled: false
|
||||
}
|
||||
},
|
||||
// theme:{
|
||||
// // @ts-ignore
|
||||
// // mode:
|
||||
// mode: 'dark',
|
||||
// },
|
||||
tooltip: {
|
||||
fixed: {
|
||||
enabled: false
|
||||
},
|
||||
x: {
|
||||
show: false
|
||||
show: true
|
||||
},
|
||||
y: {
|
||||
title: {
|
||||
formatter: function(seriesName) {
|
||||
return "";
|
||||
formatter: (seriesName) => {
|
||||
return '';
|
||||
}
|
||||
}
|
||||
},
|
||||
marker: {
|
||||
show: false
|
||||
}
|
||||
},
|
||||
theme: this.determineTheme(this.config)
|
||||
|
||||
},
|
||||
stroke: {
|
||||
width: 2,
|
||||
@@ -350,20 +414,28 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
};
|
||||
}
|
||||
|
||||
private determineTheme(config: AppConfig): string {
|
||||
if (config.theme === 'system') {
|
||||
return this.systemPrefersDark ? 'dark' : 'light'
|
||||
} else {
|
||||
return config.theme
|
||||
}
|
||||
}
|
||||
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
// @ Public methods
|
||||
// -----------------------------------------------------------------------------------------------------
|
||||
|
||||
toHex(decimalNumb){
|
||||
return "0x" + Number(decimalNumb).toString(16).padStart(2, '0').toUpperCase()
|
||||
toHex(decimalNumb: number | string): string {
|
||||
return '0x' + Number(decimalNumb).toString(16).padStart(2, '0').toUpperCase()
|
||||
}
|
||||
toggleOnlyCritical(){
|
||||
|
||||
toggleOnlyCritical(): void {
|
||||
this.onlyCritical = !this.onlyCritical
|
||||
this.smartAttributeDataSource.data = this._generateSmartAttributeTableDataSource(this.smart_results);
|
||||
|
||||
}
|
||||
|
||||
openDialog() {
|
||||
openDialog(): void {
|
||||
const dialogRef = this.dialog.open(DetailSettingsComponent);
|
||||
|
||||
dialogRef.afterClosed().subscribe(result => {
|
||||
@@ -377,12 +449,9 @@ export class DetailComponent implements OnInit, AfterViewInit, OnDestroy {
|
||||
* @param index
|
||||
* @param item
|
||||
*/
|
||||
trackByFn(index: number, item: any): any
|
||||
{
|
||||
trackByFn(index: number, item: any): any {
|
||||
return index;
|
||||
// return item.id || index;
|
||||
}
|
||||
|
||||
readonly humanizeDuration = humanizeDuration;
|
||||
|
||||
}
|
||||
|
||||
@@ -13,7 +13,7 @@ import { MatTableModule } from '@angular/material/table';
|
||||
import { MatTooltipModule } from '@angular/material/tooltip'
|
||||
import { NgApexchartsModule } from 'ng-apexcharts';
|
||||
import { TreoCardModule } from '@treo/components/card';
|
||||
import {DetailSettingsModule} from "app/layout/common/detail-settings/detail-settings.module";
|
||||
import {DetailSettingsModule} from 'app/layout/common/detail-settings/detail-settings.module';
|
||||
|
||||
@NgModule({
|
||||
declarations: [
|
||||
|
||||
@@ -1,13 +1,13 @@
|
||||
import { Injectable } from '@angular/core';
|
||||
import { ActivatedRouteSnapshot, Resolve, RouterStateSnapshot } from '@angular/router';
|
||||
import { Observable } from 'rxjs';
|
||||
import { DetailService } from 'app/modules/detail/detail.service';
|
||||
import {Injectable} from '@angular/core';
|
||||
import {ActivatedRouteSnapshot, Resolve, RouterStateSnapshot} from '@angular/router';
|
||||
import {Observable} from 'rxjs';
|
||||
import {DetailService} from 'app/modules/detail/detail.service';
|
||||
import {DeviceDetailsResponseWrapper} from 'app/core/models/device-details-response-wrapper';
|
||||
|
||||
@Injectable({
|
||||
providedIn: 'root'
|
||||
})
|
||||
export class DetailResolver implements Resolve<any>
|
||||
{
|
||||
export class DetailResolver implements Resolve<any> {
|
||||
/**
|
||||
* Constructor
|
||||
*
|
||||
@@ -29,8 +29,7 @@ export class DetailResolver implements Resolve<any>
|
||||
* @param route
|
||||
* @param state
|
||||
*/
|
||||
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<any>
|
||||
{
|
||||
resolve(route: ActivatedRouteSnapshot, state: RouterStateSnapshot): Observable<DeviceDetailsResponseWrapper> {
|
||||
return this._detailService.getData(route.params.wwn);
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,6 +1,6 @@
|
||||
import { Route } from '@angular/router';
|
||||
import { DetailComponent } from 'app/modules/detail/detail.component';
|
||||
import {DetailResolver} from "./detail.resolvers";
|
||||
import {DetailResolver} from './detail.resolvers';
|
||||
|
||||
export const detailRoutes: Route[] = [
|
||||
{
|
||||
|
||||
Some files were not shown because too many files have changed in this diff Show More
Reference in New Issue
Block a user