Move alerting provider mocked tests at the watchdog level

This commit is contained in:
TwiN 2021-12-02 22:15:51 -05:00
parent f6336eac4e
commit 6954e9dde7
23 changed files with 279 additions and 176 deletions

View File

@ -160,7 +160,7 @@ If you want to test it locally, see [Docker](#docker).
| `endpoints[].dns` | Configuration for an endpoint of type DNS. <br />See [Monitoring an endpoint using DNS queries](#monitoring-an-endpoint-using-dns-queries). | `""` |
| `endpoints[].dns.query-type` | Query type (e.g. MX) | `""` |
| `endpoints[].dns.query-name` | Query name (e.g. example.com) | `""` |
| `endpoints[].alerts[].type` | Type of alert. <br />Valid types: `slack`, `discord`, `pagerduty`, `twilio`, `mattermost`, `messagebird`, `teams` `custom`. | Required `""` |
| `endpoints[].alerts[].type` | Type of alert. <br />Valid types: `slack`, `discord`, `email`, `pagerduty`, `twilio`, `mattermost`, `messagebird`, `teams` `custom`. | Required `""` |
| `endpoints[].alerts[].enabled` | Whether to enable the alert. | `false` |
| `endpoints[].alerts[].failure-threshold` | Number of failures in a row needed before triggering the alert. | `3` |
| `endpoints[].alerts[].success-threshold` | Number of successes in a row before an ongoing incident is marked as resolved. | `2` |

View File

@ -2,11 +2,9 @@ package custom
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"strings"
"github.com/TwiN/gatus/v3/alerting/alert"
@ -101,22 +99,13 @@ func (provider *AlertProvider) buildHTTPRequest(endpointName, alertDescription s
}
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
request := provider.buildHTTPRequest(endpoint.Name, alert.GetDescription(), resolved)
response, err := client.GetHTTPClient(provider.ClientConfig).Do(request)
if err != nil {
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -3,6 +3,8 @@ package custom
import (
"io/ioutil"
"testing"
"github.com/TwiN/gatus/v3/alerting/alert"
)
func TestAlertProvider_IsValid(t *testing.T) {
@ -96,6 +98,15 @@ func TestAlertProvider_GetAlertStatePlaceholderValueDefaults(t *testing.T) {
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}
// TestAlertProvider_isBackwardCompatibleWithServiceRename checks if the custom alerting provider still supports
// service placeholders after the migration from "service" to "endpoint"
//

View File

@ -2,11 +2,9 @@ package discord
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -28,12 +26,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, provider.WebhookURL, buffer)
if err != nil {
@ -45,10 +37,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -67,3 +67,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -1,10 +1,8 @@
package email
import (
"errors"
"fmt"
"math"
"os"
"strings"
"github.com/TwiN/gatus/v3/alerting/alert"
@ -31,12 +29,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
subject, body := provider.buildMessageSubjectAndBody(endpoint, alert, result, resolved)
m := gomail.NewMessage()
m.SetHeader("From", provider.From)

View File

@ -68,3 +68,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -2,11 +2,9 @@ package mattermost
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -34,12 +32,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, provider.WebhookURL, buffer)
if err != nil {
@ -51,10 +43,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -67,3 +67,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -2,11 +2,9 @@ package messagebird
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -35,12 +33,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
// Reference doc for messagebird: https://developers.messagebird.com/api/sms-messaging/#send-outbound-sms
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, restAPIURL, buffer)
if err != nil {
@ -53,10 +45,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -71,3 +71,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -3,12 +3,10 @@ package pagerduty
import (
"bytes"
"encoding/json"
"errors"
"fmt"
"io/ioutil"
"log"
"net/http"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -55,12 +53,6 @@ func (provider *AlertProvider) IsValid() bool {
//
// Relevant: https://developer.pagerduty.com/docs/events-api-v2/trigger-events/
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, restAPIURL, buffer)
if err != nil {
@ -72,10 +64,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
if alert.IsSendingOnResolved() {
@ -87,8 +76,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
body, err := ioutil.ReadAll(response.Body)
var payload pagerDutyResponsePayload
if err = json.Unmarshal(body, &payload); err != nil {
// Silently fail. We don't want to create tons of alerts just because we failed to parse
// the body.
// Silently fail. We don't want to create tons of alerts just because we failed to parse the body.
log.Printf("[pagerduty][Send] Ran into error unmarshaling pagerduty response: %s", err.Error())
} else {
alert.ResolveKey = payload.DedupKey

View File

@ -155,3 +155,12 @@ func TestAlertProvider_getIntegrationKeyForGroup(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -2,11 +2,9 @@ package slack
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -28,12 +26,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, provider.WebhookURL, buffer)
if err != nil {
@ -45,10 +37,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -67,3 +67,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -2,11 +2,9 @@ package teams
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -28,12 +26,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, provider.WebhookURL, buffer)
if err != nil {
@ -45,10 +37,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -67,3 +67,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -2,11 +2,9 @@ package telegram
import (
"bytes"
"errors"
"fmt"
"io/ioutil"
"net/http"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -29,12 +27,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, fmt.Sprintf("https://api.telegram.org/bot%s/sendMessage", provider.Token), buffer)
if err != nil {
@ -46,10 +38,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -67,3 +67,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -3,12 +3,10 @@ package twilio
import (
"bytes"
"encoding/base64"
"errors"
"fmt"
"io/ioutil"
"net/http"
"net/url"
"os"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/client"
@ -33,12 +31,6 @@ func (provider *AlertProvider) IsValid() bool {
// Send an alert using the provider
func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert, result *core.Result, resolved bool) error {
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
return errors.New("error")
}
return nil
}
buffer := bytes.NewBuffer([]byte(provider.buildRequestBody(endpoint, alert, result, resolved)))
request, err := http.NewRequest(http.MethodPost, fmt.Sprintf("https://api.twilio.com/2010-04-01/Accounts/%s/Messages.json", provider.SID), buffer)
if err != nil {
@ -51,10 +43,7 @@ func (provider *AlertProvider) Send(endpoint *core.Endpoint, alert *alert.Alert,
return err
}
if response.StatusCode > 399 {
body, err := ioutil.ReadAll(response.Body)
if err != nil {
return fmt.Errorf("call to provider alert returned status code %d", response.StatusCode)
}
body, _ := ioutil.ReadAll(response.Body)
return fmt.Errorf("call to provider alert returned status code %d: %s", response.StatusCode, string(body))
}
return err

View File

@ -67,3 +67,12 @@ func TestAlertProvider_buildRequestBody(t *testing.T) {
})
}
}
func TestAlertProvider_GetDefaultAlert(t *testing.T) {
if (AlertProvider{DefaultAlert: &alert.Alert{}}).GetDefaultAlert() == nil {
t.Error("expected default alert to be not nil")
}
if (AlertProvider{DefaultAlert: nil}).GetDefaultAlert() != nil {
t.Error("expected default alert to be nil")
}
}

View File

@ -1,7 +1,9 @@
package watchdog
import (
"errors"
"log"
"os"
"github.com/TwiN/gatus/v3/alerting"
"github.com/TwiN/gatus/v3/core"
@ -36,7 +38,14 @@ func handleAlertsToTrigger(endpoint *core.Endpoint, result *core.Result, alertin
alertProvider := alertingConfig.GetAlertingProviderByAlertType(endpointAlert.Type)
if alertProvider != nil && alertProvider.IsValid() {
log.Printf("[watchdog][handleAlertsToTrigger] Sending %s alert because alert for endpoint=%s with description='%s' has been TRIGGERED", endpointAlert.Type, endpoint.Name, endpointAlert.GetDescription())
err := alertProvider.Send(endpoint, endpointAlert, result, false)
var err error
if os.Getenv("MOCK_ALERT_PROVIDER") == "true" {
if os.Getenv("MOCK_ALERT_PROVIDER_ERROR") == "true" {
err = errors.New("error")
}
} else {
err = alertProvider.Send(endpoint, endpointAlert, result, false)
}
if err != nil {
log.Printf("[watchdog][handleAlertsToTrigger] Failed to send an alert for endpoint=%s: %s", endpoint.Name, err.Error())
} else {

View File

@ -7,7 +7,15 @@ import (
"github.com/TwiN/gatus/v3/alerting"
"github.com/TwiN/gatus/v3/alerting/alert"
"github.com/TwiN/gatus/v3/alerting/provider/custom"
"github.com/TwiN/gatus/v3/alerting/provider/discord"
"github.com/TwiN/gatus/v3/alerting/provider/email"
"github.com/TwiN/gatus/v3/alerting/provider/mattermost"
"github.com/TwiN/gatus/v3/alerting/provider/messagebird"
"github.com/TwiN/gatus/v3/alerting/provider/pagerduty"
"github.com/TwiN/gatus/v3/alerting/provider/slack"
"github.com/TwiN/gatus/v3/alerting/provider/teams"
"github.com/TwiN/gatus/v3/alerting/provider/telegram"
"github.com/TwiN/gatus/v3/alerting/provider/twilio"
"github.com/TwiN/gatus/v3/config"
"github.com/TwiN/gatus/v3/core"
)
@ -106,7 +114,7 @@ func TestHandleAlertingWhenTriggeredAlertIsAlmostResolvedButendpointStartFailing
}
enabled := true
endpoint := &core.Endpoint{
URL: "http://example.com",
URL: "https://example.com",
Alerts: []*alert.Alert{
{
Type: alert.TypeCustom,
@ -141,7 +149,7 @@ func TestHandleAlertingWhenTriggeredAlertIsResolvedButSendOnResolvedIsFalse(t *t
enabled := true
disabled := false
endpoint := &core.Endpoint{
URL: "http://example.com",
URL: "https://example.com",
Alerts: []*alert.Alert{
{
Type: alert.TypeCustom,
@ -173,7 +181,7 @@ func TestHandleAlertingWhenTriggeredAlertIsResolvedPagerDuty(t *testing.T) {
}
enabled := true
endpoint := &core.Endpoint{
URL: "http://example.com",
URL: "https://example.com",
Alerts: []*alert.Alert{
{
Type: alert.TypePagerDuty,
@ -197,22 +205,122 @@ func TestHandleAlertingWhenTriggeredAlertIsResolvedPagerDuty(t *testing.T) {
func TestHandleAlertingWithProviderThatReturnsAnError(t *testing.T) {
_ = os.Setenv("MOCK_ALERT_PROVIDER", "true")
defer os.Clearenv()
cfg := &config.Config{
Debug: true,
Alerting: &alerting.Config{
enabled := true
scenarios := []struct {
Name string
AlertingConfig *alerting.Config
AlertType alert.Type
}{
{
Name: "custom",
AlertType: alert.TypeCustom,
AlertingConfig: &alerting.Config{
Custom: &custom.AlertProvider{
URL: "https://twin.sh/health",
Method: "GET",
},
},
},
{
Name: "discord",
AlertType: alert.TypeDiscord,
AlertingConfig: &alerting.Config{
Discord: &discord.AlertProvider{
WebhookURL: "https://example.com",
},
},
},
{
Name: "email",
AlertType: alert.TypeEmail,
AlertingConfig: &alerting.Config{
Email: &email.AlertProvider{
From: "from@example.com",
Password: "hunter2",
Host: "mail.example.com",
Port: 587,
To: "to@example.com",
},
},
},
{
Name: "mattermost",
AlertType: alert.TypeMattermost,
AlertingConfig: &alerting.Config{
Mattermost: &mattermost.AlertProvider{
WebhookURL: "https://example.com",
},
},
},
{
Name: "messagebird",
AlertType: alert.TypeMessagebird,
AlertingConfig: &alerting.Config{
Messagebird: &messagebird.AlertProvider{
AccessKey: "1",
Originator: "2",
Recipients: "3",
},
},
},
{
Name: "pagerduty",
AlertType: alert.TypePagerDuty,
AlertingConfig: &alerting.Config{
PagerDuty: &pagerduty.AlertProvider{
IntegrationKey: "00000000000000000000000000000000",
},
},
},
{
Name: "slack",
AlertType: alert.TypeSlack,
AlertingConfig: &alerting.Config{
Slack: &slack.AlertProvider{
WebhookURL: "https://example.com",
},
},
},
{
Name: "teams",
AlertType: alert.TypeTeams,
AlertingConfig: &alerting.Config{
Teams: &teams.AlertProvider{
WebhookURL: "https://example.com",
},
},
},
{
Name: "telegram",
AlertType: alert.TypeTelegram,
AlertingConfig: &alerting.Config{
Telegram: &telegram.AlertProvider{
Token: "1",
ID: "2",
},
},
},
{
Name: "twilio",
AlertType: alert.TypeTwilio,
AlertingConfig: &alerting.Config{
Twilio: &twilio.AlertProvider{
SID: "1",
Token: "2",
From: "3",
To: "4",
},
},
},
}
enabled := true
for _, scenario := range scenarios {
t.Run(scenario.Name, func(t *testing.T) {
endpoint := &core.Endpoint{
URL: "http://example.com",
URL: "https://example.com",
Alerts: []*alert.Alert{
{
Type: alert.TypeCustom,
Type: scenario.AlertType,
Enabled: &enabled,
FailureThreshold: 2,
SuccessThreshold: 2,
@ -221,35 +329,37 @@ func TestHandleAlertingWithProviderThatReturnsAnError(t *testing.T) {
},
},
}
_ = os.Setenv("MOCK_ALERT_PROVIDER_ERROR", "true")
HandleAlerting(endpoint, &core.Result{Success: false}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: false}, scenario.AlertingConfig, true)
verify(t, endpoint, 1, 0, false, "")
HandleAlerting(endpoint, &core.Result{Success: false}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: false}, scenario.AlertingConfig, true)
verify(t, endpoint, 2, 0, false, "The alert should have failed to trigger, because the alert provider is returning an error")
HandleAlerting(endpoint, &core.Result{Success: false}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: false}, scenario.AlertingConfig, true)
verify(t, endpoint, 3, 0, false, "The alert should still not be triggered, because the alert provider is still returning an error")
HandleAlerting(endpoint, &core.Result{Success: false}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: false}, scenario.AlertingConfig, true)
verify(t, endpoint, 4, 0, false, "The alert should still not be triggered, because the alert provider is still returning an error")
_ = os.Setenv("MOCK_ALERT_PROVIDER_ERROR", "false")
HandleAlerting(endpoint, &core.Result{Success: false}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: false}, scenario.AlertingConfig, true)
verify(t, endpoint, 5, 0, true, "The alert should've been triggered because the alert provider is no longer returning an error")
HandleAlerting(endpoint, &core.Result{Success: true}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: true}, scenario.AlertingConfig, true)
verify(t, endpoint, 0, 1, true, "The alert should've still been triggered")
_ = os.Setenv("MOCK_ALERT_PROVIDER_ERROR", "true")
HandleAlerting(endpoint, &core.Result{Success: true}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: true}, scenario.AlertingConfig, true)
verify(t, endpoint, 0, 2, false, "The alert should've been resolved DESPITE THE ALERT PROVIDER RETURNING AN ERROR. See Alert.Triggered for further explanation.")
_ = os.Setenv("MOCK_ALERT_PROVIDER_ERROR", "false")
// Make sure that everything's working as expected after a rough patch
HandleAlerting(endpoint, &core.Result{Success: false}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: false}, scenario.AlertingConfig, true)
verify(t, endpoint, 1, 0, false, "")
HandleAlerting(endpoint, &core.Result{Success: false}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: false}, scenario.AlertingConfig, true)
verify(t, endpoint, 2, 0, true, "The alert should have triggered")
HandleAlerting(endpoint, &core.Result{Success: true}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: true}, scenario.AlertingConfig, true)
verify(t, endpoint, 0, 1, true, "The alert should still be triggered")
HandleAlerting(endpoint, &core.Result{Success: true}, cfg.Alerting, cfg.Debug)
HandleAlerting(endpoint, &core.Result{Success: true}, scenario.AlertingConfig, true)
verify(t, endpoint, 0, 2, false, "The alert should have been resolved")
})
}
}
func TestHandleAlertingWithProviderThatOnlyReturnsErrorOnResolve(t *testing.T) {
@ -267,7 +377,7 @@ func TestHandleAlertingWithProviderThatOnlyReturnsErrorOnResolve(t *testing.T) {
}
enabled := true
endpoint := &core.Endpoint{
URL: "http://example.com",
URL: "https://example.com",
Alerts: []*alert.Alert{
{
Type: alert.TypeCustom,
@ -306,19 +416,19 @@ func TestHandleAlertingWithProviderThatOnlyReturnsErrorOnResolve(t *testing.T) {
func verify(t *testing.T, endpoint *core.Endpoint, expectedNumberOfFailuresInARow, expectedNumberOfSuccessInARow int, expectedTriggered bool, expectedTriggeredReason string) {
if endpoint.NumberOfFailuresInARow != expectedNumberOfFailuresInARow {
t.Fatalf("endpoint.NumberOfFailuresInARow should've been %d, got %d", expectedNumberOfFailuresInARow, endpoint.NumberOfFailuresInARow)
t.Errorf("endpoint.NumberOfFailuresInARow should've been %d, got %d", expectedNumberOfFailuresInARow, endpoint.NumberOfFailuresInARow)
}
if endpoint.NumberOfSuccessesInARow != expectedNumberOfSuccessInARow {
t.Fatalf("endpoint.NumberOfSuccessesInARow should've been %d, got %d", expectedNumberOfSuccessInARow, endpoint.NumberOfSuccessesInARow)
t.Errorf("endpoint.NumberOfSuccessesInARow should've been %d, got %d", expectedNumberOfSuccessInARow, endpoint.NumberOfSuccessesInARow)
}
if endpoint.Alerts[0].Triggered != expectedTriggered {
if len(expectedTriggeredReason) != 0 {
t.Fatal(expectedTriggeredReason)
t.Error(expectedTriggeredReason)
} else {
if expectedTriggered {
t.Fatal("The alert should've been triggered")
t.Error("The alert should've been triggered")
} else {
t.Fatal("The alert shouldn't have been triggered")
t.Error("The alert shouldn't have been triggered")
}
}
}