new sparkline implementation (#325)

This commit is contained in:
Michael Quigley 2023-05-11 15:21:10 -04:00
parent 4c4f0c30f0
commit bb2b7c3da7
No known key found for this signature in database
GPG Key ID: 9B60314A9DD20A62
13 changed files with 358 additions and 76 deletions

View File

@ -40,9 +40,10 @@ func (h *environmentDetailHandler) Handle(params metadata.GetEnvironmentDetailPa
logrus.Errorf("error finding shares for environment '%v' for user '%v': %v", senv.ZId, principal.Email, err)
return metadata.NewGetEnvironmentDetailInternalServerError()
}
var sparkData map[string][]int64
sparkRx := make(map[string][]int64)
sparkTx := make(map[string][]int64)
if cfg.Metrics != nil && cfg.Metrics.Influx != nil {
sparkData, err = sparkDataForShares(shrs)
sparkRx, sparkTx, err = sparkDataForShares(shrs)
if err != nil {
logrus.Errorf("error querying spark data for shares for user '%v': %v", principal.Email, err)
}
@ -62,6 +63,10 @@ func (h *environmentDetailHandler) Handle(params metadata.GetEnvironmentDetailPa
if shr.BackendProxyEndpoint != nil {
beProxyEndpoint = *shr.BackendProxyEndpoint
}
var sparkData []*rest_model_zrok.SparkDataSample
for i := 0; i < len(sparkRx[shr.Token]) && i < len(sparkTx[shr.Token]); i++ {
sparkData = append(sparkData, &rest_model_zrok.SparkDataSample{Rx: float64(sparkRx[shr.Token][i]), Tx: float64(sparkTx[shr.Token][i])})
}
es.Shares = append(es.Shares, &rest_model_zrok.Share{
Token: shr.Token,
ZID: shr.ZId,
@ -71,7 +76,7 @@ func (h *environmentDetailHandler) Handle(params metadata.GetEnvironmentDetailPa
FrontendEndpoint: feEndpoint,
BackendProxyEndpoint: beProxyEndpoint,
Reserved: shr.Reserved,
Metrics: sparkData[shr.Token],
SparkData: sparkData,
CreatedAt: shr.CreatedAt.UnixMilli(),
UpdatedAt: shr.UpdatedAt.UnixMilli(),
})

View File

@ -42,9 +42,10 @@ func (h *shareDetailHandler) Handle(params metadata.GetShareDetailParams, princi
logrus.Errorf("environment not matched for share '%v' for account '%v'", params.ShrToken, principal.Email)
return metadata.NewGetShareDetailNotFound()
}
var sparkData map[string][]int64
sparkRx := make(map[string][]int64)
sparkTx := make(map[string][]int64)
if cfg.Metrics != nil && cfg.Metrics.Influx != nil {
sparkData, err = sparkDataForShares([]*store.Share{shr})
sparkRx, sparkTx, err = sparkDataForShares([]*store.Share{shr})
if err != nil {
logrus.Errorf("error querying spark data for share: %v", err)
}
@ -63,6 +64,10 @@ func (h *shareDetailHandler) Handle(params metadata.GetShareDetailParams, princi
if shr.BackendProxyEndpoint != nil {
beProxyEndpoint = *shr.BackendProxyEndpoint
}
var sparkData []*rest_model_zrok.SparkDataSample
for i := 0; i < len(sparkRx[shr.Token]) && i < len(sparkTx[shr.Token]); i++ {
sparkData = append(sparkData, &rest_model_zrok.SparkDataSample{Rx: float64(sparkRx[shr.Token][i]), Tx: float64(sparkTx[shr.Token][i])})
}
return metadata.NewGetShareDetailOK().WithPayload(&rest_model_zrok.Share{
Token: shr.Token,
ZID: shr.ZId,
@ -72,7 +77,7 @@ func (h *shareDetailHandler) Handle(params metadata.GetShareDetailParams, princi
FrontendEndpoint: feEndpoint,
BackendProxyEndpoint: beProxyEndpoint,
Reserved: shr.Reserved,
Metrics: sparkData[shr.Token],
SparkData: sparkData,
CreatedAt: shr.CreatedAt.UnixMilli(),
UpdatedAt: shr.UpdatedAt.UnixMilli(),
})

View File

@ -6,37 +6,43 @@ import (
"github.com/openziti/zrok/controller/store"
)
func sparkDataForShares(shrs []*store.Share) (map[string][]int64, error) {
out := make(map[string][]int64)
func sparkDataForShares(shrs []*store.Share) (rx, tx map[string][]int64, err error) {
rx = make(map[string][]int64)
tx = make(map[string][]int64)
if len(shrs) > 0 {
qapi := idb.QueryAPI(cfg.Metrics.Influx.Org)
result, err := qapi.Query(context.Background(), sparkFluxQuery(shrs))
query := sparkFluxQuery(shrs, cfg.Metrics.Influx.Bucket)
result, err := qapi.Query(context.Background(), query)
if err != nil {
return nil, err
return nil, nil, err
}
for result.Next() {
combinedRate := int64(0)
readRate := result.Record().ValueByKey("tx")
if readRate != nil {
combinedRate += readRate.(int64)
}
writeRate := result.Record().ValueByKey("tx")
if writeRate != nil {
combinedRate += writeRate.(int64)
}
shrToken := result.Record().ValueByKey("share").(string)
shrMetrics := out[shrToken]
shrMetrics = append(shrMetrics, combinedRate)
out[shrToken] = shrMetrics
switch result.Record().Field() {
case "rx":
rxV := int64(0)
if v, ok := result.Record().Value().(int64); ok {
rxV = v
}
rxData := append(rx[shrToken], rxV)
rx[shrToken] = rxData
case "tx":
txV := int64(0)
if v, ok := result.Record().Value().(int64); ok {
txV = v
}
txData := append(tx[shrToken], txV)
tx[shrToken] = txData
}
}
}
return out, nil
return rx, tx, nil
}
func sparkFluxQuery(shrs []*store.Share) string {
func sparkFluxQuery(shrs []*store.Share, bucket string) string {
shrFilter := "|> filter(fn: (r) =>"
for i, shr := range shrs {
if i > 0 {
@ -45,14 +51,12 @@ func sparkFluxQuery(shrs []*store.Share) string {
shrFilter += fmt.Sprintf(" r[\"share\"] == \"%v\"", shr.Token)
}
shrFilter += ")"
query := "read = from(bucket: \"zrok\")" +
"|> range(start: -5m)" +
"|> filter(fn: (r) => r[\"_measurement\"] == \"xfer\")" +
"|> filter(fn: (r) => r[\"_field\"] == \"rx\" or r[\"_field\"] == \"tx\")" +
query := fmt.Sprintf("from(bucket: \"%v\")\n", bucket) +
"|> range(start: -5m)\n" +
"|> filter(fn: (r) => r[\"_measurement\"] == \"xfer\")\n" +
"|> filter(fn: (r) => r[\"_field\"] == \"rx\" or r[\"_field\"] == \"tx\")\n" +
"|> filter(fn: (r) => r[\"namespace\"] == \"backend\")" +
shrFilter +
"|> aggregateWindow(every: 5s, fn: sum, createEmpty: true)\n" +
"|> pivot(rowKey:[\"_time\"], columnKey: [\"_field\"], valueColumn: \"_value\")" +
"|> yield(name: \"last\")"
"|> aggregateWindow(every: 10s, fn: sum, createEmpty: true)\n"
return query
}

View File

@ -33,15 +33,15 @@ type Share struct {
// frontend selection
FrontendSelection string `json:"frontendSelection,omitempty"`
// metrics
Metrics ShareMetrics `json:"metrics,omitempty"`
// reserved
Reserved bool `json:"reserved,omitempty"`
// share mode
ShareMode string `json:"shareMode,omitempty"`
// spark data
SparkData SparkData `json:"sparkData,omitempty"`
// token
Token string `json:"token,omitempty"`
@ -56,7 +56,7 @@ type Share struct {
func (m *Share) Validate(formats strfmt.Registry) error {
var res []error
if err := m.validateMetrics(formats); err != nil {
if err := m.validateSparkData(formats); err != nil {
res = append(res, err)
}
@ -66,16 +66,16 @@ func (m *Share) Validate(formats strfmt.Registry) error {
return nil
}
func (m *Share) validateMetrics(formats strfmt.Registry) error {
if swag.IsZero(m.Metrics) { // not required
func (m *Share) validateSparkData(formats strfmt.Registry) error {
if swag.IsZero(m.SparkData) { // not required
return nil
}
if err := m.Metrics.Validate(formats); err != nil {
if err := m.SparkData.Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("metrics")
return ve.ValidateName("sparkData")
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("metrics")
return ce.ValidateName("sparkData")
}
return err
}
@ -87,7 +87,7 @@ func (m *Share) validateMetrics(formats strfmt.Registry) error {
func (m *Share) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
if err := m.contextValidateMetrics(ctx, formats); err != nil {
if err := m.contextValidateSparkData(ctx, formats); err != nil {
res = append(res, err)
}
@ -97,13 +97,13 @@ func (m *Share) ContextValidate(ctx context.Context, formats strfmt.Registry) er
return nil
}
func (m *Share) contextValidateMetrics(ctx context.Context, formats strfmt.Registry) error {
func (m *Share) contextValidateSparkData(ctx context.Context, formats strfmt.Registry) error {
if err := m.Metrics.ContextValidate(ctx, formats); err != nil {
if err := m.SparkData.ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName("metrics")
return ve.ValidateName("sparkData")
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName("metrics")
return ce.ValidateName("sparkData")
}
return err
}

View File

@ -7,21 +7,67 @@ package rest_model_zrok
import (
"context"
"strconv"
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// ShareMetrics share metrics
//
// swagger:model shareMetrics
type ShareMetrics []int64
type ShareMetrics []*ShareMetricsSample
// Validate validates this share metrics
func (m ShareMetrics) Validate(formats strfmt.Registry) error {
var res []error
for i := 0; i < len(m); i++ {
if swag.IsZero(m[i]) { // not required
continue
}
if m[i] != nil {
if err := m[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName(strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName(strconv.Itoa(i))
}
return err
}
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
// ContextValidate validates this share metrics based on context it is used
// ContextValidate validate this share metrics based on the context it is used
func (m ShareMetrics) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
for i := 0; i < len(m); i++ {
if m[i] != nil {
if err := m[i].ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName(strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName(strconv.Itoa(i))
}
return err
}
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}

View File

@ -0,0 +1,56 @@
// Code generated by go-swagger; DO NOT EDIT.
package rest_model_zrok
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// ShareMetricsSample share metrics sample
//
// swagger:model shareMetricsSample
type ShareMetricsSample struct {
// rx
Rx float64 `json:"rx,omitempty"`
// timestamp
Timestamp float64 `json:"timestamp,omitempty"`
// tx
Tx float64 `json:"tx,omitempty"`
}
// Validate validates this share metrics sample
func (m *ShareMetricsSample) Validate(formats strfmt.Registry) error {
return nil
}
// ContextValidate validates this share metrics sample based on context it is used
func (m *ShareMetricsSample) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
return nil
}
// MarshalBinary interface implementation
func (m *ShareMetricsSample) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *ShareMetricsSample) UnmarshalBinary(b []byte) error {
var res ShareMetricsSample
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}

View File

@ -0,0 +1,73 @@
// Code generated by go-swagger; DO NOT EDIT.
package rest_model_zrok
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"strconv"
"github.com/go-openapi/errors"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// SparkData spark data
//
// swagger:model sparkData
type SparkData []*SparkDataSample
// Validate validates this spark data
func (m SparkData) Validate(formats strfmt.Registry) error {
var res []error
for i := 0; i < len(m); i++ {
if swag.IsZero(m[i]) { // not required
continue
}
if m[i] != nil {
if err := m[i].Validate(formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName(strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName(strconv.Itoa(i))
}
return err
}
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}
// ContextValidate validate this spark data based on the context it is used
func (m SparkData) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
var res []error
for i := 0; i < len(m); i++ {
if m[i] != nil {
if err := m[i].ContextValidate(ctx, formats); err != nil {
if ve, ok := err.(*errors.Validation); ok {
return ve.ValidateName(strconv.Itoa(i))
} else if ce, ok := err.(*errors.CompositeError); ok {
return ce.ValidateName(strconv.Itoa(i))
}
return err
}
}
}
if len(res) > 0 {
return errors.CompositeValidationError(res...)
}
return nil
}

View File

@ -0,0 +1,53 @@
// Code generated by go-swagger; DO NOT EDIT.
package rest_model_zrok
// This file was generated by the swagger tool.
// Editing this file might prove futile when you re-run the swagger generate command
import (
"context"
"github.com/go-openapi/strfmt"
"github.com/go-openapi/swag"
)
// SparkDataSample spark data sample
//
// swagger:model sparkDataSample
type SparkDataSample struct {
// rx
Rx float64 `json:"rx,omitempty"`
// tx
Tx float64 `json:"tx,omitempty"`
}
// Validate validates this spark data sample
func (m *SparkDataSample) Validate(formats strfmt.Registry) error {
return nil
}
// ContextValidate validates this spark data sample based on context it is used
func (m *SparkDataSample) ContextValidate(ctx context.Context, formats strfmt.Registry) error {
return nil
}
// MarshalBinary interface implementation
func (m *SparkDataSample) MarshalBinary() ([]byte, error) {
if m == nil {
return nil, nil
}
return swag.WriteJSON(m)
}
// UnmarshalBinary interface implementation
func (m *SparkDataSample) UnmarshalBinary(b []byte) error {
var res SparkDataSample
if err := swag.ReadJSON(b, &res); err != nil {
return err
}
*m = res
return nil
}

View File

@ -1281,15 +1281,15 @@ func init() {
"frontendSelection": {
"type": "string"
},
"metrics": {
"$ref": "#/definitions/shareMetrics"
},
"reserved": {
"type": "boolean"
},
"shareMode": {
"type": "string"
},
"sparkData": {
"$ref": "#/definitions/sparkData"
},
"token": {
"type": "string"
},
@ -1301,12 +1301,6 @@ func init() {
}
}
},
"shareMetrics": {
"type": "array",
"items": {
"type": "integer"
}
},
"shareRequest": {
"type": "object",
"properties": {
@ -1372,6 +1366,23 @@ func init() {
"$ref": "#/definitions/share"
}
},
"sparkData": {
"type": "array",
"items": {
"$ref": "#/definitions/sparkDataSample"
}
},
"sparkDataSample": {
"type": "object",
"properties": {
"rx": {
"type": "number"
},
"tx": {
"type": "number"
}
}
},
"unaccessRequest": {
"type": "object",
"properties": {
@ -2717,15 +2728,15 @@ func init() {
"frontendSelection": {
"type": "string"
},
"metrics": {
"$ref": "#/definitions/shareMetrics"
},
"reserved": {
"type": "boolean"
},
"shareMode": {
"type": "string"
},
"sparkData": {
"$ref": "#/definitions/sparkData"
},
"token": {
"type": "string"
},
@ -2737,12 +2748,6 @@ func init() {
}
}
},
"shareMetrics": {
"type": "array",
"items": {
"type": "integer"
}
},
"shareRequest": {
"type": "object",
"properties": {
@ -2808,6 +2813,23 @@ func init() {
"$ref": "#/definitions/share"
}
},
"sparkData": {
"type": "array",
"items": {
"$ref": "#/definitions/sparkDataSample"
}
},
"sparkDataSample": {
"type": "object",
"properties": {
"rx": {
"type": "number"
},
"tx": {
"type": "number"
}
}
},
"unaccessRequest": {
"type": "object",
"properties": {

View File

@ -844,8 +844,8 @@ definitions:
type: string
reserved:
type: boolean
metrics:
$ref: "#/definitions/shareMetrics"
sparkData:
$ref: "#/definitions/sparkData"
createdAt:
type: integer
updatedAt:
@ -856,10 +856,18 @@ definitions:
items:
$ref: "#/definitions/share"
shareMetrics:
sparkData:
type: array
items:
type: integer
$ref: "#/definitions/sparkDataSample"
sparkDataSample:
type: object
properties:
rx:
type: number
tx:
type: number
shareRequest:
type: object

View File

@ -200,11 +200,19 @@
* @property {string} frontendEndpoint
* @property {string} backendProxyEndpoint
* @property {boolean} reserved
* @property {module:types.shareMetrics} metrics
* @property {module:types.sparkData} sparkData
* @property {number} createdAt
* @property {number} updatedAt
*/
/**
* @typedef sparkDataSample
* @memberof module:types
*
* @property {number} rx
* @property {number} tx
*/
/**
* @typedef shareRequest
* @memberof module:types

View File

@ -45,8 +45,9 @@ const SharesTab = (props) => {
name: "Activity",
cell: row => {
return <ResponsiveContainer width={"100%"} height={"100%"}>
<AreaChart data={row.metrics}>
<Area type="basis" dataKey={(v) => v} stroke={"#777"} fillOpacity={0.5} fill={"#04adef"} isAnimationActive={false} dot={false} />
<AreaChart data={row.sparkData}>
<Area type={"linear"} dataKey={(v) => v.rx ? v.rx : 0} stroke={"#231069"} fill={"#04adef"} isAnimationActive={false} dot={false} />
<Area type={"linear"} dataKey={(v) => v.tx ? v.tx * -1 : 0} stroke={"#231069"} fill={"#9BF316"} isAnimationActive={false} dot={false} />
</AreaChart>
</ResponsiveContainer>
}

View File

@ -6,7 +6,7 @@ import PropertyTable from "../../PropertyTable";
import {Tab, Tabs} from "react-bootstrap";
import ActionsTab from "./ActionsTab";
import SecretToggle from "../../SecretToggle";
import {Area, AreaChart, Line, LineChart, ResponsiveContainer, XAxis} from "recharts";
import {Area, AreaChart, ResponsiveContainer} from "recharts";
import MetricsTab from "./MetricsTab";
const ShareDetail = (props) => {
@ -40,10 +40,11 @@ const ShareDetail = (props) => {
}, [props.selection]);
const customProperties = {
metrics: row => (
sparkData: row => (
<ResponsiveContainer width={"100%"} height={"100%"}>
<AreaChart data={row.value}>
<Area type="basis" dataKey={(v) => v} stroke={"#777"} fillOpacity={0.5} fill={"#04adef"} isAnimationActive={false} dot={false} />
<Area type={"basis"} dataKey={(v) => v.rx ? v.rx : 0} stroke={"#231069"} fill={"#04adef"} isAnimationActive={false} dot={false} />
<Area type={"basis"} dataKey={(v) => v.tx ? v.tx * -1 : 0} stroke={"#231069"} fill={"#9BF316"} isAnimationActive={false} dot={false} />
</AreaChart>
</ResponsiveContainer>
),