prevent sparklines handler crashing when no configured metrics subsystem (#905)

This commit is contained in:
Michael Quigley 2025-03-03 18:20:20 -05:00
parent 9c7cb65213
commit 6361a3ced3
No known key found for this signature in database
GPG Key ID: 9B60314A9DD20A62
2 changed files with 80 additions and 76 deletions

View File

@ -70,7 +70,7 @@ func Run(inCfg *config.Config) error {
api.MetadataConfigurationHandler = newConfigurationHandler(cfg) api.MetadataConfigurationHandler = newConfigurationHandler(cfg)
api.MetadataClientVersionCheckHandler = metadata.ClientVersionCheckHandlerFunc(clientVersionCheckHandler) api.MetadataClientVersionCheckHandler = metadata.ClientVersionCheckHandlerFunc(clientVersionCheckHandler)
api.MetadataGetAccountDetailHandler = newAccountDetailHandler() api.MetadataGetAccountDetailHandler = newAccountDetailHandler()
api.MetadataGetSparklinesHandler = newSparklinesHandler() api.MetadataGetSparklinesHandler = newSparklinesHandler(cfg)
if cfg.Metrics != nil && cfg.Metrics.Influx != nil { if cfg.Metrics != nil && cfg.Metrics.Influx != nil {
api.MetadataGetAccountMetricsHandler = newGetAccountMetricsHandler(cfg.Metrics.Influx) api.MetadataGetAccountMetricsHandler = newGetAccountMetricsHandler(cfg.Metrics.Influx)
api.MetadataGetEnvironmentMetricsHandler = newGetEnvironmentMetricsHandler(cfg.Metrics.Influx) api.MetadataGetEnvironmentMetricsHandler = newGetEnvironmentMetricsHandler(cfg.Metrics.Influx)

View File

@ -2,6 +2,7 @@ package controller
import ( import (
"github.com/go-openapi/runtime/middleware" "github.com/go-openapi/runtime/middleware"
"github.com/openziti/zrok/controller/config"
"github.com/openziti/zrok/controller/store" "github.com/openziti/zrok/controller/store"
"github.com/openziti/zrok/rest_model_zrok" "github.com/openziti/zrok/rest_model_zrok"
"github.com/openziti/zrok/rest_server_zrok/operations/metadata" "github.com/openziti/zrok/rest_server_zrok/operations/metadata"
@ -10,96 +11,99 @@ import (
) )
type sparklinesHandler struct { type sparklinesHandler struct {
cfg *config.Config
} }
func newSparklinesHandler() *sparklinesHandler { func newSparklinesHandler(cfg *config.Config) *sparklinesHandler {
return &sparklinesHandler{} return &sparklinesHandler{cfg: cfg}
} }
func (h *sparklinesHandler) Handle(params metadata.GetSparklinesParams, principal *rest_model_zrok.Principal) middleware.Responder { func (h *sparklinesHandler) Handle(params metadata.GetSparklinesParams, principal *rest_model_zrok.Principal) middleware.Responder {
trx, err := str.Begin()
if err != nil {
logrus.Errorf("error beginning transaction: %v", err)
return metadata.NewGetSparklinesInternalServerError()
}
defer func() { _ = trx.Rollback() }()
out := &metadata.GetSparklinesOKBody{} out := &metadata.GetSparklinesOKBody{}
if len(params.Body.Environments) > 0 { if h.cfg.Metrics != nil && h.cfg.Metrics.Influx != nil {
if envs, err := str.FindEnvironmentsForAccount(int(principal.ID), trx); err == nil { trx, err := str.Begin()
var selectedEnvs []*store.Environment if err != nil {
selectedEnvsIdIdx := make(map[int]*store.Environment) logrus.Errorf("error beginning transaction: %v", err)
for _, envZId := range params.Body.Environments {
if idx := slices.IndexFunc(envs, func(env *store.Environment) bool { return env.ZId == envZId }); idx > -1 {
selectedEnvs = append(selectedEnvs, envs[idx])
selectedEnvsIdIdx[envs[idx].Id] = envs[idx]
} else {
logrus.Warnf("requested sparkdata for environment '%v' not owned by '%v'", envZId, principal.Email)
}
}
envsRxSparkdata, envsTxSparkdata, err := sparkDataForEnvironments(selectedEnvs)
if err != nil {
logrus.Errorf("error getting sparkdata for selected environments for '%v': %v", principal.Email, err)
return metadata.NewGetSparklinesInternalServerError()
}
for envId, rx := range envsRxSparkdata {
tx := envsTxSparkdata[envId]
forEnv := selectedEnvsIdIdx[envId]
var samples []*rest_model_zrok.MetricsSample
for i := 0; i < len(rx) && i < len(tx); i++ {
samples = append(samples, &rest_model_zrok.MetricsSample{
Rx: float64(rx[i]),
Tx: float64(tx[i]),
})
}
out.Sparklines = append(out.Sparklines, &rest_model_zrok.Metrics{
Scope: "environment",
ID: forEnv.ZId,
Samples: samples,
})
}
} else {
logrus.Errorf("error finding environments for '%v': %v", principal.Email, err)
return metadata.NewGetSparklinesInternalServerError() return metadata.NewGetSparklinesInternalServerError()
} }
} defer func() { _ = trx.Rollback() }()
if len(params.Body.Shares) > 0 { if len(params.Body.Environments) > 0 {
if shrs, err := str.FindAllSharesForAccount(int(principal.ID), trx); err == nil { if envs, err := str.FindEnvironmentsForAccount(int(principal.ID), trx); err == nil {
var selectedShares []*store.Share var selectedEnvs []*store.Environment
for _, selectedShareToken := range params.Body.Shares { selectedEnvsIdIdx := make(map[int]*store.Environment)
if idx := slices.IndexFunc(shrs, func(shr *store.Share) bool { return shr.Token == selectedShareToken }); idx > -1 { for _, envZId := range params.Body.Environments {
selectedShares = append(selectedShares, shrs[idx]) if idx := slices.IndexFunc(envs, func(env *store.Environment) bool { return env.ZId == envZId }); idx > -1 {
} else { selectedEnvs = append(selectedEnvs, envs[idx])
logrus.Warnf("requested sparkdata for share '%v' not owned by '%v'", selectedShareToken, principal.Email) selectedEnvsIdIdx[envs[idx].Id] = envs[idx]
} else {
logrus.Warnf("requested sparkdata for environment '%v' not owned by '%v'", envZId, principal.Email)
}
} }
} envsRxSparkdata, envsTxSparkdata, err := sparkDataForEnvironments(selectedEnvs)
shrsRxSparkdata, shrsTxSparkdata, err := sparkDataForShares(selectedShares) if err != nil {
if err != nil { logrus.Errorf("error getting sparkdata for selected environments for '%v': %v", principal.Email, err)
logrus.Errorf("error getting sparkdata for selected shares for '%v': %v", principal.Email, err) return metadata.NewGetSparklinesInternalServerError()
return metadata.NewGetSparklinesInternalServerError() }
} for envId, rx := range envsRxSparkdata {
for shrToken, rx := range shrsRxSparkdata { tx := envsTxSparkdata[envId]
tx := shrsTxSparkdata[shrToken] forEnv := selectedEnvsIdIdx[envId]
var samples []*rest_model_zrok.MetricsSample var samples []*rest_model_zrok.MetricsSample
for i := 0; i < len(rx) && i < len(tx); i++ { for i := 0; i < len(rx) && i < len(tx); i++ {
samples = append(samples, &rest_model_zrok.MetricsSample{ samples = append(samples, &rest_model_zrok.MetricsSample{
Rx: float64(rx[i]), Rx: float64(rx[i]),
Tx: float64(tx[i]), Tx: float64(tx[i]),
})
}
out.Sparklines = append(out.Sparklines, &rest_model_zrok.Metrics{
Scope: "environment",
ID: forEnv.ZId,
Samples: samples,
}) })
} }
out.Sparklines = append(out.Sparklines, &rest_model_zrok.Metrics{ } else {
Scope: "share", logrus.Errorf("error finding environments for '%v': %v", principal.Email, err)
ID: shrToken, return metadata.NewGetSparklinesInternalServerError()
Samples: samples, }
}) }
if len(params.Body.Shares) > 0 {
if shrs, err := str.FindAllSharesForAccount(int(principal.ID), trx); err == nil {
var selectedShares []*store.Share
for _, selectedShareToken := range params.Body.Shares {
if idx := slices.IndexFunc(shrs, func(shr *store.Share) bool { return shr.Token == selectedShareToken }); idx > -1 {
selectedShares = append(selectedShares, shrs[idx])
} else {
logrus.Warnf("requested sparkdata for share '%v' not owned by '%v'", selectedShareToken, principal.Email)
}
}
shrsRxSparkdata, shrsTxSparkdata, err := sparkDataForShares(selectedShares)
if err != nil {
logrus.Errorf("error getting sparkdata for selected shares for '%v': %v", principal.Email, err)
return metadata.NewGetSparklinesInternalServerError()
}
for shrToken, rx := range shrsRxSparkdata {
tx := shrsTxSparkdata[shrToken]
var samples []*rest_model_zrok.MetricsSample
for i := 0; i < len(rx) && i < len(tx); i++ {
samples = append(samples, &rest_model_zrok.MetricsSample{
Rx: float64(rx[i]),
Tx: float64(tx[i]),
})
}
out.Sparklines = append(out.Sparklines, &rest_model_zrok.Metrics{
Scope: "share",
ID: shrToken,
Samples: samples,
})
}
} else {
logrus.Errorf("error finding shares for '%v': %v", principal.Email, err)
return metadata.NewGetSparklinesInternalServerError()
} }
} else {
logrus.Errorf("error finding shares for '%v': %v", principal.Email, err)
return metadata.NewGetSparklinesInternalServerError()
} }
} }