more 'active' removal; overview api sends spark metrics; spark ui (#74, #80)

This commit is contained in:
Michael Quigley 2022-10-19 15:21:15 -04:00
parent e2d3208165
commit e5e683d694
No known key found for this signature in database
GPG Key ID: 9B60314A9DD20A62
12 changed files with 141 additions and 80 deletions

View File

@ -2,6 +2,7 @@ package controller
import (
"github.com/go-openapi/loads"
influxdb2 "github.com/influxdata/influxdb-client-go/v2"
"github.com/openziti-test-kitchen/zrok/controller/store"
"github.com/openziti-test-kitchen/zrok/rest_server_zrok"
"github.com/openziti-test-kitchen/zrok/rest_server_zrok/operations"
@ -13,6 +14,7 @@ import (
var cfg *Config
var str *store.Store
var mtr *metricsAgent
var idb influxdb2.Client
const version = "v0.2.0"
@ -47,6 +49,10 @@ func Run(inCfg *Config) error {
return errors.Wrap(err, "error opening store")
}
if cfg.Influx != nil {
idb = influxdb2.NewClient(cfg.Influx.Url, cfg.Influx.Token)
}
if cfg.Metrics != nil {
mtr = newMetricsAgent()
go mtr.run()

View File

@ -21,7 +21,6 @@ import (
)
type metricsAgent struct {
influx influxdb2.Client
writeApi api.WriteAPIBlocking
metricsQueue chan *model.Metrics
envCache map[string]*envCacheEntry
@ -43,9 +42,8 @@ func newMetricsAgent() *metricsAgent {
shutdown: make(chan struct{}),
joined: make(chan struct{}),
}
if cfg.Influx != nil {
ma.influx = influxdb2.NewClient(cfg.Influx.Url, cfg.Influx.Token)
ma.writeApi = ma.influx.WriteAPIBlocking(cfg.Influx.Org, cfg.Influx.Bucket)
if idb != nil {
ma.writeApi = idb.WriteAPIBlocking(cfg.Influx.Org, cfg.Influx.Bucket)
}
return ma
}

View File

@ -1,7 +1,10 @@
package controller
import (
"context"
"fmt"
"github.com/go-openapi/runtime/middleware"
"github.com/openziti-test-kitchen/zrok/controller/store"
"github.com/openziti-test-kitchen/zrok/rest_model_zrok"
"github.com/openziti-test-kitchen/zrok/rest_server_zrok/operations/metadata"
"github.com/sirupsen/logrus"
@ -26,31 +29,90 @@ func overviewHandler(_ metadata.OverviewParams, principal *rest_model_zrok.Princ
logrus.Errorf("error finding services for environment '%v': %v", env.ZId, err)
return metadata.NewOverviewInternalServerError()
}
if env.Active {
es := &rest_model_zrok.EnvironmentServices{
Environment: &rest_model_zrok.Environment{
Address: env.Address,
CreatedAt: env.CreatedAt.String(),
Description: env.Description,
Host: env.Host,
UpdatedAt: env.UpdatedAt.String(),
ZID: env.ZId,
},
}
for _, svc := range svcs {
if svc.Active {
es.Services = append(es.Services, &rest_model_zrok.Service{
CreatedAt: svc.CreatedAt.String(),
Frontend: svc.Frontend,
Backend: svc.Backend,
UpdatedAt: svc.UpdatedAt.String(),
ZID: svc.ZId,
Name: svc.Name,
})
}
}
out = append(out, es)
es := &rest_model_zrok.EnvironmentServices{
Environment: &rest_model_zrok.Environment{
Address: env.Address,
CreatedAt: env.CreatedAt.String(),
Description: env.Description,
Host: env.Host,
UpdatedAt: env.UpdatedAt.String(),
ZID: env.ZId,
},
}
sparkData, err := sparkDataForServices(svcs)
if err != nil {
logrus.Errorf("error querying spark data for services: %v", err)
return metadata.NewOverviewInternalServerError()
}
for _, svc := range svcs {
es.Services = append(es.Services, &rest_model_zrok.Service{
CreatedAt: svc.CreatedAt.String(),
Frontend: svc.Frontend,
Backend: svc.Backend,
UpdatedAt: svc.UpdatedAt.String(),
ZID: svc.ZId,
Name: svc.Name,
Metrics: sparkData[svc.Name],
})
}
out = append(out, es)
}
return metadata.NewOverviewOK().WithPayload(out)
}
func sparkDataForServices(svcs []*store.Service) (map[string][]int64, error) {
out := make(map[string][]int64)
if len(svcs) > 0 {
qapi := idb.QueryAPI(cfg.Influx.Org)
result, err := qapi.Query(context.Background(), sparkFluxQuery(svcs))
if err != nil {
return nil, err
}
for result.Next() {
combinedRate := int64(0)
readRate := result.Record().ValueByKey("_value_t1")
if readRate != nil {
combinedRate += int64(readRate.(float64))
}
writeRate := result.Record().ValueByKey("_value_t2")
if writeRate != nil {
combinedRate += int64(writeRate.(float64))
}
svcName := result.Record().ValueByKey("service_t1").(string)
svcMetrics := out[svcName]
svcMetrics = append(svcMetrics, combinedRate)
out[svcName] = svcMetrics
}
}
return out, nil
}
func sparkFluxQuery(svcs []*store.Service) string {
svcFilter := "|> filter(fn: (r) =>"
for i, svc := range svcs {
if i > 0 {
svcFilter += " or"
}
svcFilter += fmt.Sprintf(" r[\"service\"] == \"%v\"", svc.Name)
}
svcFilter += ")"
query := "read = from(bucket: \"zrok\")" +
"|> range(start: -5m)" +
"|> filter(fn: (r) => r[\"_measurement\"] == \"xfer\")" +
"|> filter(fn: (r) => r[\"_field\"] == \"bytesRead\")" +
"|> filter(fn: (r) => r[\"namespace\"] == \"frontend\")" +
svcFilter +
"|> aggregateWindow(every: 5s, fn: mean, createEmpty: true)\n\n" +
"written = from(bucket: \"zrok\")" +
"|> range(start: -5m)" +
"|> filter(fn: (r) => r[\"_measurement\"] == \"xfer\")" +
"|> filter(fn: (r) => r[\"_field\"] == \"bytesWritten\")" +
"|> filter(fn: (r) => r[\"namespace\"] == \"frontend\")" +
svcFilter +
"|> aggregateWindow(every: 5s, fn: mean, createEmpty: true)\n\n" +
"join(tables: {t1: read, t2: written}, on: [\"_time\"])"
return query
}

View File

@ -12,11 +12,10 @@ type Environment struct {
Host string
Address string
ZId string
Active bool
}
func (self *Store) CreateEnvironment(accountId int, i *Environment, tx *sqlx.Tx) (int, error) {
stmt, err := tx.Prepare("insert into environments (account_id, description, host, address, z_id, active) values (?, ?, ?, ?, ?, true)")
stmt, err := tx.Prepare("insert into environments (account_id, description, host, address, z_id) values (?, ?, ?, ?, ?)")
if err != nil {
return 0, errors.Wrap(err, "error preparing environments insert statement")
}

View File

@ -12,11 +12,10 @@ type Service struct {
Name string
Frontend string
Backend string
Active bool
}
func (self *Store) CreateService(envId int, svc *Service, tx *sqlx.Tx) (int, error) {
stmt, err := tx.Prepare("insert into services (environment_id, z_id, name, frontend, backend, active) values (?, ?, ?, ?, ?, true)")
stmt, err := tx.Prepare("insert into services (environment_id, z_id, name, frontend, backend) values (?, ?, ?, ?, ?)")
if err != nil {
return 0, errors.Wrap(err, "error preparing services insert statement")
}
@ -72,12 +71,12 @@ func (self *Store) FindServicesForEnvironment(envId int, tx *sqlx.Tx) ([]*Servic
}
func (self *Store) UpdateService(svc *Service, tx *sqlx.Tx) error {
sql := "update services set z_id = ?, name = ?, frontend = ?, backend = ?, active = ?, updated_at = strftime('%Y-%m-%d %H:%M:%f', 'now') where id = ?"
sql := "update services set z_id = ?, name = ?, frontend = ?, backend = ?, updated_at = strftime('%Y-%m-%d %H:%M:%f', 'now') where id = ?"
stmt, err := tx.Prepare(sql)
if err != nil {
return errors.Wrap(err, "error preparing services update statement")
}
_, err = stmt.Exec(svc.ZId, svc.Name, svc.Frontend, svc.Backend, svc.Active, svc.Id)
_, err = stmt.Exec(svc.ZId, svc.Name, svc.Frontend, svc.Backend, svc.Id)
if err != nil {
return errors.Wrap(err, "error executing services update statement")
}

View File

@ -38,7 +38,6 @@ create table environments (
host string,
address string,
z_id string not null unique,
active boolean not null,
created_at datetime not null default(strftime('%Y-%m-%d %H:%M:%f', 'now')),
updated_at datetime not null default(strftime('%Y-%m-%d %H:%M:%f', 'now')),
@ -55,7 +54,6 @@ create table services (
name string not null unique,
frontend string,
backend string,
active boolean not null,
created_at datetime not null default(strftime('%Y-%m-%d %H:%M:%f', 'now')),
updated_at datetime not null default(strftime('%Y-%m-%d %H:%M:%f', 'now')),

View File

@ -101,8 +101,7 @@ func (self *untunnelHandler) Handle(params tunnel.UntunnelParams, principal *res
logrus.Infof("deallocated service '%v'", svcName)
ssvc.Active = false
if err := str.UpdateService(ssvc, tx); err != nil {
if err := str.DeleteService(ssvc.Id, tx); err != nil {
logrus.Errorf("error deactivating service '%v': %v", svcZId, err)
return tunnel.NewUntunnelInternalServerError()
}

21
ui/package-lock.json generated
View File

@ -21,6 +21,7 @@
"react-flow-renderer": "^10.3.12",
"react-router-dom": "^6.4.0",
"react-scripts": "5.0.1",
"react-sparklines": "^1.7.0",
"styled-components": "^5.3.5"
}
},
@ -14401,6 +14402,18 @@
}
}
},
"node_modules/react-sparklines": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/react-sparklines/-/react-sparklines-1.7.0.tgz",
"integrity": "sha512-bJFt9K4c5Z0k44G8KtxIhbG+iyxrKjBZhdW6afP+R7EnIq+iKjbWbEFISrf3WKNFsda+C46XAfnX0StS5fbDcg==",
"dependencies": {
"prop-types": "^15.5.10"
},
"peerDependencies": {
"react": "*",
"react-dom": "*"
}
},
"node_modules/react-transition-group": {
"version": "4.4.5",
"resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz",
@ -27208,6 +27221,14 @@
"workbox-webpack-plugin": "^6.4.1"
}
},
"react-sparklines": {
"version": "1.7.0",
"resolved": "https://registry.npmjs.org/react-sparklines/-/react-sparklines-1.7.0.tgz",
"integrity": "sha512-bJFt9K4c5Z0k44G8KtxIhbG+iyxrKjBZhdW6afP+R7EnIq+iKjbWbEFISrf3WKNFsda+C46XAfnX0StS5fbDcg==",
"requires": {
"prop-types": "^15.5.10"
}
},
"react-transition-group": {
"version": "4.4.5",
"resolved": "https://registry.npmjs.org/react-transition-group/-/react-transition-group-4.4.5.tgz",

View File

@ -16,6 +16,7 @@
"react-flow-renderer": "^10.3.12",
"react-router-dom": "^6.4.0",
"react-scripts": "5.0.1",
"react-sparklines": "^1.7.0",
"styled-components": "^5.3.5"
},
"scripts": {

View File

@ -30,17 +30,8 @@ const Environments = (props) => {
},
]
const conditionalRowStyles = [
{
when: row => !row.environment.active,
style: {
display: 'none'
}
}
]
const servicesComponent = ({ data }) => <Services services={data.services} />
const servicesExpanded = row => row.services != null && row.services.length > 0 && row.services.some((row) => row.active)
const servicesExpanded = row => row.services != null && row.services.length > 0
return (
<div>
@ -54,7 +45,6 @@ const Environments = (props) => {
expandableRows
expandableRowsComponent={servicesComponent}
expandableRowExpanded={servicesExpanded}
conditionalRowStyles={conditionalRowStyles}
/>
</div>
)}

View File

@ -24,7 +24,7 @@ const Network = () => {
reactFlow.fitView({maxZoom: 1})
}
});
})
}, [])
useEffect(() => {
let mounted = true
@ -81,23 +81,21 @@ function buildGraph(overview) {
id++
if(item.services != null) {
item.services.forEach((item) => {
if(item.active) {
out.nodes.push({
id: '' + id,
data: {label: <div><Icon path={mdiAccessPointNetwork} size={0.75} className={"flowNode"}/> { item.frontend }</div>},
position: {x: (id * 25), y: 0},
style: { width: 'fit-content', backgroundColor: '#9367ef', color: 'white' },
type: 'output',
draggable: true
})
out.edges.push({
id: 'e' + envId + '-' + id,
source: '' + envId,
target: '' + id,
animated: true
})
id++
}
out.nodes.push({
id: '' + id,
data: {label: <div><Icon path={mdiAccessPointNetwork} size={0.75} className={"flowNode"}/> { item.frontend }</div>},
position: {x: (id * 25), y: 0},
style: { width: 'fit-content', backgroundColor: '#9367ef', color: 'white' },
type: 'output',
draggable: true
})
out.edges.push({
id: 'e' + envId + '-' + id,
source: '' + envId,
target: '' + id,
animated: true
})
id++
});
}
});

View File

@ -1,11 +1,7 @@
import DataTable from 'react-data-table-component';
import {useEffect} from "react";
import {Sparklines, SparklinesLine} from 'react-sparklines';
const Services = (props) => {
useEffect((props) => {
console.log(props)
}, [])
const columns = [
{
name: 'Frontend',
@ -17,14 +13,9 @@ const Services = (props) => {
selector: row => row.backend,
sortable: true,
},
]
const conditionalRowStyles = [
{
when: row => !row.active,
style: {
display: 'none'
}
name: 'Activity',
cell: row => <Sparklines data={row.metrics} height={20} limit={60}><SparklinesLine color={"#3b2693"}/></Sparklines>
}
]
@ -35,7 +26,6 @@ const Services = (props) => {
columns={columns}
data={props.services}
defaultSortFieldId={1}
conditionalRowStyles={conditionalRowStyles}
/>
)}
</div>