2023-10-12 15:42:36 +02:00
|
|
|
package server
|
|
|
|
|
|
|
|
import (
|
2024-05-30 15:22:42 +02:00
|
|
|
"encoding/json"
|
2024-03-10 19:09:45 +01:00
|
|
|
"errors"
|
2024-02-22 12:27:08 +01:00
|
|
|
"fmt"
|
2023-10-12 15:42:36 +02:00
|
|
|
"path/filepath"
|
|
|
|
"runtime"
|
|
|
|
"strings"
|
|
|
|
"sync"
|
|
|
|
"time"
|
|
|
|
|
|
|
|
log "github.com/sirupsen/logrus"
|
2024-05-16 18:28:37 +02:00
|
|
|
"gorm.io/driver/postgres"
|
2023-10-12 15:42:36 +02:00
|
|
|
"gorm.io/driver/sqlite"
|
|
|
|
"gorm.io/gorm"
|
|
|
|
"gorm.io/gorm/clause"
|
|
|
|
"gorm.io/gorm/logger"
|
2023-11-28 13:45:26 +01:00
|
|
|
|
|
|
|
nbdns "github.com/netbirdio/netbird/dns"
|
2023-11-30 11:51:35 +01:00
|
|
|
"github.com/netbirdio/netbird/management/server/account"
|
2024-03-27 18:48:48 +01:00
|
|
|
nbgroup "github.com/netbirdio/netbird/management/server/group"
|
2023-11-28 13:45:26 +01:00
|
|
|
nbpeer "github.com/netbirdio/netbird/management/server/peer"
|
2024-02-20 09:59:56 +01:00
|
|
|
"github.com/netbirdio/netbird/management/server/posture"
|
2023-11-28 13:45:26 +01:00
|
|
|
"github.com/netbirdio/netbird/management/server/status"
|
|
|
|
"github.com/netbirdio/netbird/management/server/telemetry"
|
|
|
|
"github.com/netbirdio/netbird/route"
|
2023-10-12 15:42:36 +02:00
|
|
|
)
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
// SqlStore represents an account storage backed by a Sql DB persisted to disk
|
|
|
|
type SqlStore struct {
|
2023-10-12 15:42:36 +02:00
|
|
|
db *gorm.DB
|
|
|
|
accountLocks sync.Map
|
|
|
|
globalAccountLock sync.Mutex
|
|
|
|
metrics telemetry.AppMetrics
|
|
|
|
installationPK int
|
2024-05-16 18:28:37 +02:00
|
|
|
storeEngine StoreEngine
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
type installation struct {
|
|
|
|
ID uint `gorm:"primaryKey"`
|
|
|
|
InstallationIDValue string
|
|
|
|
}
|
|
|
|
|
2024-04-18 18:14:21 +02:00
|
|
|
type migrationFunc func(*gorm.DB) error
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
// NewSqlStore creates a new SqlStore instance.
|
|
|
|
func NewSqlStore(db *gorm.DB, storeEngine StoreEngine, metrics telemetry.AppMetrics) (*SqlStore, error) {
|
2023-10-12 15:42:36 +02:00
|
|
|
sql, err := db.DB()
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
conns := runtime.NumCPU()
|
|
|
|
sql.SetMaxOpenConns(conns) // TODO: make it configurable
|
|
|
|
|
2024-04-18 18:14:21 +02:00
|
|
|
if err := migrate(db); err != nil {
|
|
|
|
return nil, fmt.Errorf("migrate: %w", err)
|
|
|
|
}
|
2023-10-12 15:42:36 +02:00
|
|
|
err = db.AutoMigrate(
|
2024-03-27 18:48:48 +01:00
|
|
|
&SetupKey{}, &nbpeer.Peer{}, &User{}, &PersonalAccessToken{}, &nbgroup.Group{},
|
2023-10-12 15:42:36 +02:00
|
|
|
&Account{}, &Policy{}, &PolicyRule{}, &route.Route{}, &nbdns.NameServerGroup{},
|
2024-02-20 11:53:11 +01:00
|
|
|
&installation{}, &account.ExtraSettings{}, &posture.Checks{}, &nbpeer.NetworkAddress{},
|
2023-10-12 15:42:36 +02:00
|
|
|
)
|
|
|
|
if err != nil {
|
2024-04-18 18:14:21 +02:00
|
|
|
return nil, fmt.Errorf("auto migrate: %w", err)
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
return &SqlStore{db: db, storeEngine: storeEngine, metrics: metrics, installationPK: 1}, nil
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// AcquireGlobalLock acquires global lock across all the accounts and returns a function that releases the lock
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) AcquireGlobalLock() (unlock func()) {
|
2024-03-27 18:48:48 +01:00
|
|
|
log.Tracef("acquiring global lock")
|
2023-10-12 15:42:36 +02:00
|
|
|
start := time.Now()
|
|
|
|
s.globalAccountLock.Lock()
|
|
|
|
|
|
|
|
unlock = func() {
|
|
|
|
s.globalAccountLock.Unlock()
|
2024-03-27 18:48:48 +01:00
|
|
|
log.Tracef("released global lock in %v", time.Since(start))
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
took := time.Since(start)
|
2024-03-27 18:48:48 +01:00
|
|
|
log.Tracef("took %v to acquire global lock", took)
|
2023-10-12 15:42:36 +02:00
|
|
|
if s.metrics != nil {
|
|
|
|
s.metrics.StoreMetrics().CountGlobalLockAcquisitionDuration(took)
|
|
|
|
}
|
|
|
|
|
|
|
|
return unlock
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) AcquireAccountWriteLock(accountID string) (unlock func()) {
|
2024-05-07 14:30:03 +02:00
|
|
|
log.Tracef("acquiring write lock for account %s", accountID)
|
2023-10-12 15:42:36 +02:00
|
|
|
|
|
|
|
start := time.Now()
|
2024-05-07 14:30:03 +02:00
|
|
|
value, _ := s.accountLocks.LoadOrStore(accountID, &sync.RWMutex{})
|
|
|
|
mtx := value.(*sync.RWMutex)
|
2023-10-12 15:42:36 +02:00
|
|
|
mtx.Lock()
|
|
|
|
|
|
|
|
unlock = func() {
|
|
|
|
mtx.Unlock()
|
2024-05-07 14:30:03 +02:00
|
|
|
log.Tracef("released write lock for account %s in %v", accountID, time.Since(start))
|
|
|
|
}
|
|
|
|
|
|
|
|
return unlock
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) AcquireAccountReadLock(accountID string) (unlock func()) {
|
2024-05-07 14:30:03 +02:00
|
|
|
log.Tracef("acquiring read lock for account %s", accountID)
|
|
|
|
|
|
|
|
start := time.Now()
|
|
|
|
value, _ := s.accountLocks.LoadOrStore(accountID, &sync.RWMutex{})
|
|
|
|
mtx := value.(*sync.RWMutex)
|
|
|
|
mtx.RLock()
|
|
|
|
|
|
|
|
unlock = func() {
|
|
|
|
mtx.RUnlock()
|
|
|
|
log.Tracef("released read lock for account %s in %v", accountID, time.Since(start))
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return unlock
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) SaveAccount(account *Account) error {
|
2023-10-12 15:42:36 +02:00
|
|
|
start := time.Now()
|
|
|
|
|
|
|
|
for _, key := range account.SetupKeys {
|
|
|
|
account.SetupKeysG = append(account.SetupKeysG, *key)
|
|
|
|
}
|
|
|
|
|
|
|
|
for id, peer := range account.Peers {
|
|
|
|
peer.ID = id
|
|
|
|
account.PeersG = append(account.PeersG, *peer)
|
|
|
|
}
|
|
|
|
|
|
|
|
for id, user := range account.Users {
|
|
|
|
user.Id = id
|
|
|
|
for id, pat := range user.PATs {
|
|
|
|
pat.ID = id
|
|
|
|
user.PATsG = append(user.PATsG, *pat)
|
|
|
|
}
|
|
|
|
account.UsersG = append(account.UsersG, *user)
|
|
|
|
}
|
|
|
|
|
|
|
|
for id, group := range account.Groups {
|
|
|
|
group.ID = id
|
|
|
|
account.GroupsG = append(account.GroupsG, *group)
|
|
|
|
}
|
|
|
|
|
|
|
|
for id, route := range account.Routes {
|
|
|
|
route.ID = id
|
|
|
|
account.RoutesG = append(account.RoutesG, *route)
|
|
|
|
}
|
|
|
|
|
|
|
|
for id, ns := range account.NameServerGroups {
|
|
|
|
ns.ID = id
|
|
|
|
account.NameServerGroupsG = append(account.NameServerGroupsG, *ns)
|
|
|
|
}
|
|
|
|
|
|
|
|
err := s.db.Transaction(func(tx *gorm.DB) error {
|
|
|
|
result := tx.Select(clause.Associations).Delete(account.Policies, "account_id = ?", account.Id)
|
|
|
|
if result.Error != nil {
|
|
|
|
return result.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
result = tx.Select(clause.Associations).Delete(account.UsersG, "account_id = ?", account.Id)
|
|
|
|
if result.Error != nil {
|
|
|
|
return result.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
result = tx.Select(clause.Associations).Delete(account)
|
|
|
|
if result.Error != nil {
|
|
|
|
return result.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
result = tx.
|
|
|
|
Session(&gorm.Session{FullSaveAssociations: true}).
|
2024-04-20 22:04:20 +02:00
|
|
|
Clauses(clause.OnConflict{UpdateAll: true}).
|
|
|
|
Create(account)
|
2023-10-12 15:42:36 +02:00
|
|
|
if result.Error != nil {
|
|
|
|
return result.Error
|
|
|
|
}
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
|
|
|
took := time.Since(start)
|
|
|
|
if s.metrics != nil {
|
|
|
|
s.metrics.StoreMetrics().CountPersistenceDuration(took)
|
|
|
|
}
|
2024-05-16 18:28:37 +02:00
|
|
|
log.Debugf("took %d ms to persist an account to the store", took.Milliseconds())
|
2023-10-12 15:42:36 +02:00
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) DeleteAccount(account *Account) error {
|
2023-11-28 14:23:38 +01:00
|
|
|
start := time.Now()
|
|
|
|
|
|
|
|
err := s.db.Transaction(func(tx *gorm.DB) error {
|
|
|
|
result := tx.Select(clause.Associations).Delete(account.Policies, "account_id = ?", account.Id)
|
|
|
|
if result.Error != nil {
|
|
|
|
return result.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
result = tx.Select(clause.Associations).Delete(account.UsersG, "account_id = ?", account.Id)
|
|
|
|
if result.Error != nil {
|
|
|
|
return result.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
result = tx.Select(clause.Associations).Delete(account)
|
|
|
|
if result.Error != nil {
|
|
|
|
return result.Error
|
|
|
|
}
|
|
|
|
|
|
|
|
return nil
|
|
|
|
})
|
|
|
|
|
|
|
|
took := time.Since(start)
|
|
|
|
if s.metrics != nil {
|
|
|
|
s.metrics.StoreMetrics().CountPersistenceDuration(took)
|
|
|
|
}
|
2024-05-16 18:28:37 +02:00
|
|
|
log.Debugf("took %d ms to delete an account to the store", took.Milliseconds())
|
2023-11-28 14:23:38 +01:00
|
|
|
|
|
|
|
return err
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) SaveInstallationID(ID string) error {
|
2023-10-12 15:42:36 +02:00
|
|
|
installation := installation{InstallationIDValue: ID}
|
|
|
|
installation.ID = uint(s.installationPK)
|
|
|
|
|
|
|
|
return s.db.Clauses(clause.OnConflict{UpdateAll: true}).Create(&installation).Error
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetInstallationID() string {
|
2023-10-12 15:42:36 +02:00
|
|
|
var installation installation
|
|
|
|
|
|
|
|
if result := s.db.First(&installation, "id = ?", s.installationPK); result.Error != nil {
|
|
|
|
return ""
|
|
|
|
}
|
|
|
|
|
|
|
|
return installation.InstallationIDValue
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) SavePeerStatus(accountID, peerID string, peerStatus nbpeer.PeerStatus) error {
|
2024-05-07 14:30:03 +02:00
|
|
|
var peerCopy nbpeer.Peer
|
|
|
|
peerCopy.Status = &peerStatus
|
|
|
|
result := s.db.Model(&nbpeer.Peer{}).
|
|
|
|
Where("account_id = ? AND id = ?", accountID, peerID).
|
|
|
|
Updates(peerCopy)
|
2023-10-12 15:42:36 +02:00
|
|
|
|
|
|
|
if result.Error != nil {
|
2024-05-07 14:30:03 +02:00
|
|
|
return result.Error
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
2024-05-07 14:30:03 +02:00
|
|
|
if result.RowsAffected == 0 {
|
|
|
|
return status.Errorf(status.NotFound, "peer %s not found", peerID)
|
|
|
|
}
|
2023-10-12 15:42:36 +02:00
|
|
|
|
2024-05-07 14:30:03 +02:00
|
|
|
return nil
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) SavePeerLocation(accountID string, peerWithLocation *nbpeer.Peer) error {
|
2024-05-07 14:30:03 +02:00
|
|
|
// To maintain data integrity, we create a copy of the peer's location to prevent unintended updates to other fields.
|
|
|
|
var peerCopy nbpeer.Peer
|
|
|
|
// Since the location field has been migrated to JSON serialization,
|
|
|
|
// updating the struct ensures the correct data format is inserted into the database.
|
|
|
|
peerCopy.Location = peerWithLocation.Location
|
|
|
|
|
|
|
|
result := s.db.Model(&nbpeer.Peer{}).
|
|
|
|
Where("account_id = ? and id = ?", accountID, peerWithLocation.ID).
|
|
|
|
Updates(peerCopy)
|
|
|
|
|
2024-02-20 09:59:56 +01:00
|
|
|
if result.Error != nil {
|
2024-05-07 14:30:03 +02:00
|
|
|
return result.Error
|
2024-02-20 09:59:56 +01:00
|
|
|
}
|
|
|
|
|
2024-05-07 14:30:03 +02:00
|
|
|
if result.RowsAffected == 0 {
|
|
|
|
return status.Errorf(status.NotFound, "peer %s not found", peerWithLocation.ID)
|
|
|
|
}
|
2024-02-20 09:59:56 +01:00
|
|
|
|
2024-05-07 14:30:03 +02:00
|
|
|
return nil
|
2024-02-20 09:59:56 +01:00
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
// DeleteHashedPAT2TokenIDIndex is noop in SqlStore
|
|
|
|
func (s *SqlStore) DeleteHashedPAT2TokenIDIndex(hashedToken string) error {
|
2023-10-12 15:42:36 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
// DeleteTokenID2UserIDIndex is noop in SqlStore
|
|
|
|
func (s *SqlStore) DeleteTokenID2UserIDIndex(tokenID string) error {
|
2023-10-12 15:42:36 +02:00
|
|
|
return nil
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAccountByPrivateDomain(domain string) (*Account, error) {
|
2023-10-12 15:42:36 +02:00
|
|
|
var account Account
|
|
|
|
|
2023-10-25 00:12:10 +02:00
|
|
|
result := s.db.First(&account, "domain = ? and is_domain_primary_account = ? and domain_category = ?",
|
|
|
|
strings.ToLower(domain), true, PrivateCategory)
|
2023-10-12 15:42:36 +02:00
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: provided domain is not registered or is not private")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting account from the store: %s", result.Error)
|
|
|
|
return nil, status.Errorf(status.Internal, "issue getting account from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// TODO: rework to not call GetAccount
|
|
|
|
return s.GetAccount(account.Id)
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAccountBySetupKey(setupKey string) (*Account, error) {
|
2023-10-12 15:42:36 +02:00
|
|
|
var key SetupKey
|
|
|
|
result := s.db.Select("account_id").First(&key, "key = ?", strings.ToUpper(setupKey))
|
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting setup key from the store: %s", result.Error)
|
|
|
|
return nil, status.Errorf(status.Internal, "issue getting setup key from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if key.AccountID == "" {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
|
|
|
|
return s.GetAccount(key.AccountID)
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetTokenIDByHashedToken(hashedToken string) (string, error) {
|
2023-10-12 15:42:36 +02:00
|
|
|
var token PersonalAccessToken
|
|
|
|
result := s.db.First(&token, "hashed_token = ?", hashedToken)
|
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return "", status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting token from the store: %s", result.Error)
|
|
|
|
return "", status.Errorf(status.Internal, "issue getting account from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return token.ID, nil
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetUserByTokenID(tokenID string) (*User, error) {
|
2023-10-12 15:42:36 +02:00
|
|
|
var token PersonalAccessToken
|
|
|
|
result := s.db.First(&token, "id = ?", tokenID)
|
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting token from the store: %s", result.Error)
|
|
|
|
return nil, status.Errorf(status.Internal, "issue getting account from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if token.UserID == "" {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
|
|
|
|
var user User
|
|
|
|
result = s.db.Preload("PATsG").First(&user, "id = ?", token.UserID)
|
|
|
|
if result.Error != nil {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
|
|
|
|
user.PATs = make(map[string]*PersonalAccessToken, len(user.PATsG))
|
|
|
|
for _, pat := range user.PATsG {
|
2023-11-15 14:15:12 +01:00
|
|
|
user.PATs[pat.ID] = pat.Copy()
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
return &user, nil
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAllAccounts() (all []*Account) {
|
2023-10-12 15:42:36 +02:00
|
|
|
var accounts []Account
|
|
|
|
result := s.db.Find(&accounts)
|
|
|
|
if result.Error != nil {
|
|
|
|
return all
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, account := range accounts {
|
|
|
|
if acc, err := s.GetAccount(account.Id); err == nil {
|
|
|
|
all = append(all, acc)
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return all
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAccount(accountID string) (*Account, error) {
|
2024-05-07 14:30:03 +02:00
|
|
|
|
2023-10-12 15:42:36 +02:00
|
|
|
var account Account
|
|
|
|
result := s.db.Model(&account).
|
|
|
|
Preload("UsersG.PATsG"). // have to be specifies as this is nester reference
|
|
|
|
Preload(clause.Associations).
|
|
|
|
First(&account, "id = ?", accountID)
|
|
|
|
if result.Error != nil {
|
2024-05-27 12:29:28 +02:00
|
|
|
log.Errorf("error when getting account %s from the store: %s", accountID, result.Error)
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found")
|
|
|
|
}
|
|
|
|
return nil, status.Errorf(status.Internal, "issue getting account from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
// we have to manually preload policy rules as it seems that gorm preloading doesn't do it for us
|
|
|
|
for i, policy := range account.Policies {
|
|
|
|
var rules []*PolicyRule
|
|
|
|
err := s.db.Model(&PolicyRule{}).Find(&rules, "policy_id = ?", policy.ID).Error
|
|
|
|
if err != nil {
|
2023-11-28 14:23:38 +01:00
|
|
|
return nil, status.Errorf(status.NotFound, "rule not found")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
account.Policies[i].Rules = rules
|
|
|
|
}
|
|
|
|
|
|
|
|
account.SetupKeys = make(map[string]*SetupKey, len(account.SetupKeysG))
|
|
|
|
for _, key := range account.SetupKeysG {
|
|
|
|
account.SetupKeys[key.Key] = key.Copy()
|
|
|
|
}
|
|
|
|
account.SetupKeysG = nil
|
|
|
|
|
2023-11-28 13:45:26 +01:00
|
|
|
account.Peers = make(map[string]*nbpeer.Peer, len(account.PeersG))
|
2023-10-12 15:42:36 +02:00
|
|
|
for _, peer := range account.PeersG {
|
|
|
|
account.Peers[peer.ID] = peer.Copy()
|
|
|
|
}
|
|
|
|
account.PeersG = nil
|
|
|
|
|
|
|
|
account.Users = make(map[string]*User, len(account.UsersG))
|
|
|
|
for _, user := range account.UsersG {
|
|
|
|
user.PATs = make(map[string]*PersonalAccessToken, len(user.PATs))
|
|
|
|
for _, pat := range user.PATsG {
|
|
|
|
user.PATs[pat.ID] = pat.Copy()
|
|
|
|
}
|
|
|
|
account.Users[user.Id] = user.Copy()
|
|
|
|
}
|
|
|
|
account.UsersG = nil
|
|
|
|
|
2024-03-27 18:48:48 +01:00
|
|
|
account.Groups = make(map[string]*nbgroup.Group, len(account.GroupsG))
|
2023-10-12 15:42:36 +02:00
|
|
|
for _, group := range account.GroupsG {
|
|
|
|
account.Groups[group.ID] = group.Copy()
|
|
|
|
}
|
|
|
|
account.GroupsG = nil
|
|
|
|
|
2024-05-06 14:47:49 +02:00
|
|
|
account.Routes = make(map[route.ID]*route.Route, len(account.RoutesG))
|
2023-10-12 15:42:36 +02:00
|
|
|
for _, route := range account.RoutesG {
|
|
|
|
account.Routes[route.ID] = route.Copy()
|
|
|
|
}
|
|
|
|
account.RoutesG = nil
|
|
|
|
|
|
|
|
account.NameServerGroups = make(map[string]*nbdns.NameServerGroup, len(account.NameServerGroupsG))
|
|
|
|
for _, ns := range account.NameServerGroupsG {
|
|
|
|
account.NameServerGroups[ns.ID] = ns.Copy()
|
|
|
|
}
|
|
|
|
account.NameServerGroupsG = nil
|
|
|
|
|
|
|
|
return &account, nil
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAccountByUser(userID string) (*Account, error) {
|
2023-10-12 15:42:36 +02:00
|
|
|
var user User
|
|
|
|
result := s.db.Select("account_id").First(&user, "id = ?", userID)
|
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting user from the store: %s", result.Error)
|
|
|
|
return nil, status.Errorf(status.Internal, "issue getting account from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if user.AccountID == "" {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
|
|
|
|
return s.GetAccount(user.AccountID)
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAccountByPeerID(peerID string) (*Account, error) {
|
2023-11-28 13:45:26 +01:00
|
|
|
var peer nbpeer.Peer
|
2023-10-12 15:42:36 +02:00
|
|
|
result := s.db.Select("account_id").First(&peer, "id = ?", peerID)
|
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting peer from the store: %s", result.Error)
|
|
|
|
return nil, status.Errorf(status.Internal, "issue getting account from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if peer.AccountID == "" {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
|
|
|
|
return s.GetAccount(peer.AccountID)
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAccountByPeerPubKey(peerKey string) (*Account, error) {
|
2023-11-28 13:45:26 +01:00
|
|
|
var peer nbpeer.Peer
|
2023-10-12 15:42:36 +02:00
|
|
|
|
|
|
|
result := s.db.Select("account_id").First(&peer, "key = ?", peerKey)
|
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting peer from the store: %s", result.Error)
|
|
|
|
return nil, status.Errorf(status.Internal, "issue getting account from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
|
|
|
if peer.AccountID == "" {
|
|
|
|
return nil, status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
|
|
|
|
return s.GetAccount(peer.AccountID)
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) GetAccountIDByPeerPubKey(peerKey string) (string, error) {
|
2024-05-07 14:30:03 +02:00
|
|
|
var peer nbpeer.Peer
|
|
|
|
var accountID string
|
|
|
|
result := s.db.Model(&peer).Select("account_id").Where("key = ?", peerKey).First(&accountID)
|
|
|
|
if result.Error != nil {
|
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return "", status.Errorf(status.NotFound, "account not found: index lookup failed")
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting peer from the store: %s", result.Error)
|
|
|
|
return "", status.Errorf(status.Internal, "issue getting account from store")
|
|
|
|
}
|
|
|
|
|
|
|
|
return accountID, nil
|
|
|
|
}
|
|
|
|
|
2023-10-12 15:42:36 +02:00
|
|
|
// SaveUserLastLogin stores the last login time for a user in DB.
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) SaveUserLastLogin(accountID, userID string, lastLogin time.Time) error {
|
2023-10-23 16:08:21 +02:00
|
|
|
var user User
|
2023-10-12 15:42:36 +02:00
|
|
|
|
2023-10-23 16:08:21 +02:00
|
|
|
result := s.db.First(&user, "account_id = ? and id = ?", accountID, userID)
|
2023-10-12 15:42:36 +02:00
|
|
|
if result.Error != nil {
|
2024-03-10 19:09:45 +01:00
|
|
|
if errors.Is(result.Error, gorm.ErrRecordNotFound) {
|
|
|
|
return status.Errorf(status.NotFound, "user %s not found", userID)
|
|
|
|
}
|
|
|
|
log.Errorf("error when getting user from the store: %s", result.Error)
|
|
|
|
return status.Errorf(status.Internal, "issue getting user from store")
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
2023-10-23 16:08:21 +02:00
|
|
|
user.LastLogin = lastLogin
|
2023-10-12 15:42:36 +02:00
|
|
|
|
2023-10-23 16:08:21 +02:00
|
|
|
return s.db.Save(user).Error
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
2024-05-30 15:22:42 +02:00
|
|
|
func (s *SqlStore) GetPostureCheckByChecksDefinition(accountID string, checks *posture.ChecksDefinition) (*posture.Checks, error) {
|
|
|
|
definitionJSON, err := json.Marshal(checks)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
var postureCheck posture.Checks
|
|
|
|
err = s.db.Where("account_id = ? AND checks = ?", accountID, string(definitionJSON)).First(&postureCheck).Error
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return &postureCheck, nil
|
|
|
|
}
|
|
|
|
|
2024-02-22 12:27:08 +01:00
|
|
|
// Close closes the underlying DB connection
|
2024-05-16 18:28:37 +02:00
|
|
|
func (s *SqlStore) Close() error {
|
2024-02-20 15:06:32 +01:00
|
|
|
sql, err := s.db.DB()
|
|
|
|
if err != nil {
|
2024-02-22 12:27:08 +01:00
|
|
|
return fmt.Errorf("get db: %w", err)
|
2024-02-20 15:06:32 +01:00
|
|
|
}
|
|
|
|
return sql.Close()
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
// GetStoreEngine returns underlying store engine
|
|
|
|
func (s *SqlStore) GetStoreEngine() StoreEngine {
|
|
|
|
return s.storeEngine
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewSqliteStore creates a new SQLite store.
|
|
|
|
func NewSqliteStore(dataDir string, metrics telemetry.AppMetrics) (*SqlStore, error) {
|
|
|
|
storeStr := "store.db?cache=shared"
|
|
|
|
if runtime.GOOS == "windows" {
|
|
|
|
// Vo avoid `The process cannot access the file because it is being used by another process` on Windows
|
|
|
|
storeStr = "store.db"
|
|
|
|
}
|
|
|
|
|
|
|
|
file := filepath.Join(dataDir, storeStr)
|
|
|
|
db, err := gorm.Open(sqlite.Open(file), &gorm.Config{
|
|
|
|
Logger: logger.Default.LogMode(logger.Silent),
|
|
|
|
CreateBatchSize: 400,
|
|
|
|
PrepareStmt: true,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return NewSqlStore(db, SqliteStoreEngine, metrics)
|
|
|
|
}
|
|
|
|
|
|
|
|
// NewPostgresqlStore creates a new Postgres store.
|
|
|
|
func NewPostgresqlStore(dsn string, metrics telemetry.AppMetrics) (*SqlStore, error) {
|
|
|
|
db, err := gorm.Open(postgres.Open(dsn), &gorm.Config{
|
|
|
|
Logger: logger.Default.LogMode(logger.Silent),
|
|
|
|
PrepareStmt: true,
|
|
|
|
})
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
return NewSqlStore(db, PostgresStoreEngine, metrics)
|
2023-10-12 15:42:36 +02:00
|
|
|
}
|
2024-04-18 18:14:21 +02:00
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
// NewSqliteStoreFromFileStore restores a store from FileStore and stores SQLite DB in the file located in datadir.
|
|
|
|
func NewSqliteStoreFromFileStore(fileStore *FileStore, dataDir string, metrics telemetry.AppMetrics) (*SqlStore, error) {
|
|
|
|
store, err := NewSqliteStore(dataDir, metrics)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
2024-04-18 18:14:21 +02:00
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
err = store.SaveInstallationID(fileStore.InstallationID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, account := range fileStore.GetAllAccounts() {
|
|
|
|
err := store.SaveAccount(account)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2024-04-18 18:14:21 +02:00
|
|
|
}
|
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
return store, nil
|
2024-04-18 18:14:21 +02:00
|
|
|
}
|
|
|
|
|
2024-05-16 18:28:37 +02:00
|
|
|
// NewPostgresqlStoreFromFileStore restores a store from FileStore and stores Postgres DB.
|
|
|
|
func NewPostgresqlStoreFromFileStore(fileStore *FileStore, dsn string, metrics telemetry.AppMetrics) (*SqlStore, error) {
|
|
|
|
store, err := NewPostgresqlStore(dsn, metrics)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
2024-04-18 18:14:21 +02:00
|
|
|
}
|
2024-05-16 18:28:37 +02:00
|
|
|
|
|
|
|
err = store.SaveInstallationID(fileStore.InstallationID)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
|
|
|
|
for _, account := range fileStore.GetAllAccounts() {
|
|
|
|
err := store.SaveAccount(account)
|
|
|
|
if err != nil {
|
|
|
|
return nil, err
|
|
|
|
}
|
|
|
|
}
|
|
|
|
|
|
|
|
return store, nil
|
2024-04-18 18:14:21 +02:00
|
|
|
}
|