tweaking retention policy code so we can test downsampling scripts.

pull/228/head
Jason Kulatunga 3 years ago
parent 8fb58591a6
commit b776fb8886

@ -54,7 +54,8 @@ web:
src:
frontend:
path: ./dist
influxdb:
retention_policy: false
log:
file: 'web.log' #absolute or relative paths allowed, eg. web.log
@ -80,7 +81,7 @@ If you'd like to populate the database with some test data, you can run the fol
docker run -p 8086:8086 --rm influxdb:2.0
docker run -p 8086:8086 \
docker run --rm -p 8086:8086 \
-e DOCKER_INFLUXDB_INIT_USERNAME=admin \
-e DOCKER_INFLUXDB_INIT_PASSWORD=password12345 \
-e DOCKER_INFLUXDB_INIT_ORG=scrutiny \
@ -88,15 +89,15 @@ docker run -p 8086:8086 \
influxdb:2.0
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/web/testdata/register-devices-req.json localhost:8080/api/devices/register
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-ata.json localhost:8080/api/device/0x5000cca264eb01d7/smart
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-ata-date.json localhost:8080/api/device/0x5000cca264eb01d7/smart
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-ata-date2.json localhost:8080/api/device/0x5000cca264eb01d7/smart
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-fail2.json localhost:8080/api/device/0x5000cca264ec3183/smart
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-nvme.json localhost:8080/api/device/0x5002538e40a22954/smart
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-scsi.json localhost:8080/api/device/0x5000cca252c859cc/smart
curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-scsi2.json localhost:8080/api/device/0x5000cca264ebc248/smart
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/web/testdata/register-devices-req.json localhost:8080/api/devices/register
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-ata.json localhost:8080/api/device/0x5000cca264eb01d7/smart
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-ata-date.json localhost:8080/api/device/0x5000cca264eb01d7/smart
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-ata-date2.json localhost:8080/api/device/0x5000cca264eb01d7/smart
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-fail2.json localhost:8080/api/device/0x5000cca264ec3183/smart
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-nvme.json localhost:8080/api/device/0x5002538e40a22954/smart
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-scsi.json localhost:8080/api/device/0x5000cca252c859cc/smart
# curl -X POST -H "Content-Type: application/json" -d @webapp/backend/pkg/models/testdata/smart-scsi2.json localhost:8080/api/device/0x5000cca264ebc248/smart
go run webapp/backend/pkg/models/testdata/helper.go
curl localhost:8080/api/summary

@ -32,6 +32,7 @@ web:
# token: 'my-token'
# org: 'my-org'
# bucket: 'bucket'
retention_policy: true
log:
file: '' #absolute or relative paths allowed, eg. web.log

@ -43,6 +43,7 @@ func (c *configuration) Init() error {
c.SetDefault("web.influxdb.bucket", "metrics")
c.SetDefault("web.influxdb.init_username", "admin")
c.SetDefault("web.influxdb.init_password", "password12345")
c.SetDefault("web.influxdb.retention_policy", true)
//c.SetDefault("disks.include", []string{})
//c.SetDefault("disks.exclude", []string{})

@ -18,6 +18,17 @@ import (
"time"
)
const (
// 60seconds * 60minutes * 24hours * 15 days
RETENTION_PERIOD_15_DAYS_IN_SECONDS = 1_296_000
// 60seconds * 60minutes * 24hours * 7 days * 9 weeks
RETENTION_PERIOD_9_WEEKS_IN_SECONDS = 5_443_200
// 60seconds * 60minutes * 24hours * 7 days * (52 + 52 + 4)weeks
RETENTION_PERIOD_25_MONTHS_IN_SECONDS = 65_318_400
)
//// GormLogger is a custom logger for Gorm, making it use logrus.
//type GormLogger struct{ Logger logrus.FieldLogger }
//
@ -151,17 +162,28 @@ func (sr *scrutinyRepository) Close() error {
func (sr *scrutinyRepository) EnsureBuckets(ctx context.Context, org *domain.Organization) error {
var mainBucketRetentionRule domain.RetentionRule
var weeklyBucketRetentionRule domain.RetentionRule
var monthlyBucketRetentionRule domain.RetentionRule
if sr.appConfig.GetBool("web.influxdb.retention_policy") {
// for tetsting purposes, we may not want to set a retention policy, this will allow to set data with old timestamps,
//then manually run the downsampling scripts
mainBucketRetentionRule = domain.RetentionRule{EverySeconds: RETENTION_PERIOD_15_DAYS_IN_SECONDS}
weeklyBucketRetentionRule = domain.RetentionRule{EverySeconds: RETENTION_PERIOD_9_WEEKS_IN_SECONDS}
monthlyBucketRetentionRule = domain.RetentionRule{EverySeconds: RETENTION_PERIOD_25_MONTHS_IN_SECONDS}
}
mainBucket := sr.appConfig.GetString("web.influxdb.bucket")
if foundMainBucket, foundErr := sr.influxClient.BucketsAPI().FindBucketByName(ctx, mainBucket); foundErr != nil {
// metrics bucket will have a retention period of (14+1) 15 days (since it will be down-sampled once a week)
// in seconds (60seconds * 60minutes * 24hours * 15 days) = 1_296_000
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, mainBucket, domain.RetentionRule{EverySeconds: 1_296_000})
// metrics bucket will have a retention period of 15 days (since it will be down-sampled once a week)
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, mainBucket, mainBucketRetentionRule)
if err != nil {
return err
}
} else {
//correctly set the retention period for the main bucket (cant do it during creation)
foundMainBucket.RetentionRules = domain.RetentionRules{domain.RetentionRule{EverySeconds: 1_296_000}}
} else if sr.appConfig.GetBool("web.influxdb.retention_policy") {
//correctly set the retention period for the main bucket (cant do it during setup/creation)
foundMainBucket.RetentionRules = domain.RetentionRules{mainBucketRetentionRule}
sr.influxClient.BucketsAPI().UpdateBucket(ctx, foundMainBucket)
}
@ -169,8 +191,7 @@ func (sr *scrutinyRepository) EnsureBuckets(ctx context.Context, org *domain.Org
weeklyBucket := fmt.Sprintf("%s_weekly", sr.appConfig.GetString("web.influxdb.bucket"))
if _, foundErr := sr.influxClient.BucketsAPI().FindBucketByName(ctx, weeklyBucket); foundErr != nil {
// metrics_weekly bucket will have a retention period of 8+1 weeks (since it will be down-sampled once a month)
// in seconds (60seconds * 60minutes * 24hours * 7 days * 9 weeks) = 5_443_200
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, weeklyBucket, domain.RetentionRule{EverySeconds: 5_443_200})
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, weeklyBucket, weeklyBucketRetentionRule)
if err != nil {
return err
}
@ -179,8 +200,7 @@ func (sr *scrutinyRepository) EnsureBuckets(ctx context.Context, org *domain.Org
monthlyBucket := fmt.Sprintf("%s_monthly", sr.appConfig.GetString("web.influxdb.bucket"))
if _, foundErr := sr.influxClient.BucketsAPI().FindBucketByName(ctx, monthlyBucket); foundErr != nil {
// metrics_monthly bucket will have a retention period of 24+1 months (since it will be down-sampled once a year)
// in seconds (60seconds * 60minutes * 24hours * 7 days * (52 + 52 + 4)weeks) = 65_318_400
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, monthlyBucket, domain.RetentionRule{EverySeconds: 65_318_400})
_, err := sr.influxClient.BucketsAPI().CreateBucketWithName(ctx, org, monthlyBucket, monthlyBucketRetentionRule)
if err != nil {
return err
}

@ -0,0 +1,97 @@
package main
import (
"bytes"
"encoding/json"
"fmt"
"github.com/analogj/scrutiny/webapp/backend/pkg/models/collector"
"io"
"io/ioutil"
"log"
"net/http"
"os"
"time"
)
func main() {
//webapp/backend/pkg/web/testdata/register-devices-req.json
devices := "webapp/backend/pkg/web/testdata/register-devices-req.json"
smartData := map[string][]string{
"0x5000cca264eb01d7": {"webapp/backend/pkg/models/testdata/smart-ata.json", "webapp/backend/pkg/models/testdata/smart-ata-date.json", "webapp/backend/pkg/models/testdata/smart-ata-date2.json"},
"0x5000cca264ec3183": {"webapp/backend/pkg/models/testdata/smart-fail2.json"},
"0x5002538e40a22954": {"webapp/backend/pkg/models/testdata/smart-nvme.json"},
"0x5000cca252c859cc": {"webapp/backend/pkg/models/testdata/smart-scsi.json"},
"0x5000cca264ebc248": {"webapp/backend/pkg/models/testdata/smart-scsi2.json"},
}
// send a post request to register devices
file, err := os.Open(devices)
if err != nil {
log.Fatalf("ERROR %v", err)
}
defer file.Close()
_, err = SendPostRequest("http://localhost:8080/api/devices/register", file)
if err != nil {
log.Fatalf("ERROR %v", err)
}
//
for diskId, smartDataFileNames := range smartData {
for _, smartDataFileName := range smartDataFileNames {
for daysToSubtract := 0; daysToSubtract <= 30; daysToSubtract++ { //add 4 weeks worth of data
smartDataReader, err := readSmartDataFileFixTimestamp(daysToSubtract, smartDataFileName)
if err != nil {
log.Fatalf("ERROR %v", err)
}
_, err = SendPostRequest(fmt.Sprintf("http://localhost:8080/api/device/%s/smart", diskId), smartDataReader)
if err != nil {
log.Fatalf("ERROR %v", err)
}
}
}
}
}
func SendPostRequest(url string, file io.Reader) ([]byte, error) {
response, err := http.Post(url, "application/json", file)
if err != nil {
return nil, err
}
defer response.Body.Close()
log.Printf("%v\n", response.Status)
return ioutil.ReadAll(response.Body)
}
// InfluxDB will throw an error/ignore any submitted data with a timestamp older than the
// retention period. Lets fix this by opening test files, modifying the timestamp and returning an io.Reader
func readSmartDataFileFixTimestamp(daysToSubtract int, smartDataFilepath string) (io.Reader, error) {
metricsfile, err := os.Open(smartDataFilepath)
if err != nil {
return nil, err
}
metricsFileData, err := ioutil.ReadAll(metricsfile)
if err != nil {
return nil, err
}
//unmarshal because we need to change the timestamp
var smartData collector.SmartInfo
err = json.Unmarshal(metricsFileData, &smartData)
if err != nil {
return nil, err
}
daysToSubtractInHours := time.Duration(-1 * 24 * daysToSubtract)
smartData.LocalTime.TimeT = time.Now().Add(daysToSubtractInHours * time.Hour).Unix()
updatedSmartDataBytes, err := json.Marshal(smartData)
return bytes.NewReader(updatedSmartDataBytes), nil
}

@ -69,7 +69,7 @@
}
},
"local_time": {
"time_t": 1635107644,
"time_t": 1637039918,
"asctime": "Sun Jun 30 00:03:30 2021 UTC"
},
"smart_status": {

@ -69,7 +69,7 @@
}
},
"local_time": {
"time_t": 1635127644,
"time_t": 1637039918,
"asctime": "Tue Feb 23 00:03:30 2021 UTC"
},
"smart_status": {

@ -70,7 +70,7 @@
}
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Sun Sep 13 16:29:23 2020 UTC"
},
"read_lookahead": {

@ -69,7 +69,7 @@
}
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Sun Jun 21 00:03:30 2020 UTC"
},
"smart_status": {

@ -66,7 +66,7 @@
}
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Thu Aug 01 15:05:13 2019 WEDT"
},
"smart_status": {

@ -70,7 +70,7 @@
}
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Wed Jul 8 15:48:23 2020 CEST"
},
"smart_status": {

@ -79,7 +79,7 @@
}
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Mon Aug 24 21:38:38 2020 CEST"
},
"smart_status": {

@ -79,7 +79,7 @@
}
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Mon Aug 24 21:38:42 2020 CEST"
},
"smart_status": {

@ -59,7 +59,7 @@
},
"logical_block_size": 512,
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Wed Jun 10 14:01:02 2020 CEST"
},
"smart_status": {

@ -67,7 +67,7 @@
},
"logical_block_size": 512,
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Sun Sep 20 16:24:50 2020 Europe"
},
"smart_status": {

@ -43,7 +43,7 @@
"name": "disk"
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Wed Oct 09 10:31:07 2019 RDT"
},
"temperature": {

@ -26,7 +26,7 @@
"name": "disk"
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Fri Aug 21 22:27:02 2020 UTC"
},
"smart_status": {

@ -44,7 +44,7 @@
"name": "disk"
},
"local_time": {
"time_t": 1635117644,
"time_t": 1637039918,
"asctime": "Sun Dec 16 17:09:15 2018 CST"
},
"smart_status": {

File diff suppressed because it is too large Load Diff
Loading…
Cancel
Save