mirror of
https://github.com/netbirdio/netbird.git
synced 2025-08-18 02:50:43 +02:00
Add initial support of device posture checks (#1540)
This PR implements the following posture checks: * Agent minimum version allowed * OS minimum version allowed * Geo-location based on connection IP For the geo-based location, we rely on GeoLite2 databases which are free IP geolocation databases. MaxMind was tested and we provide a script that easily allows to download of all necessary files, see infrastructure_files/download-geolite2.sh. The OpenAPI spec should extensively cover the life cycle of current version posture checks.
This commit is contained in:
255
management/server/geolocation/geolocation.go
Normal file
255
management/server/geolocation/geolocation.go
Normal file
@@ -0,0 +1,255 @@
|
||||
package geolocation
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"crypto/sha256"
|
||||
"fmt"
|
||||
"io"
|
||||
"net"
|
||||
"os"
|
||||
"path"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
"github.com/oschwald/maxminddb-golang"
|
||||
log "github.com/sirupsen/logrus"
|
||||
)
|
||||
|
||||
const MMDBFileName = "GeoLite2-City.mmdb"
|
||||
|
||||
type Geolocation struct {
|
||||
mmdbPath string
|
||||
mux sync.RWMutex
|
||||
sha256sum []byte
|
||||
db *maxminddb.Reader
|
||||
locationDB *SqliteStore
|
||||
stopCh chan struct{}
|
||||
reloadCheckInterval time.Duration
|
||||
}
|
||||
|
||||
type Record struct {
|
||||
City struct {
|
||||
GeonameID uint `maxminddb:"geoname_id"`
|
||||
Names struct {
|
||||
En string `maxminddb:"en"`
|
||||
} `maxminddb:"names"`
|
||||
} `maxminddb:"city"`
|
||||
Continent struct {
|
||||
GeonameID uint `maxminddb:"geoname_id"`
|
||||
Code string `maxminddb:"code"`
|
||||
} `maxminddb:"continent"`
|
||||
Country struct {
|
||||
GeonameID uint `maxminddb:"geoname_id"`
|
||||
ISOCode string `maxminddb:"iso_code"`
|
||||
} `maxminddb:"country"`
|
||||
}
|
||||
|
||||
type City struct {
|
||||
GeoNameID int `gorm:"column:geoname_id"`
|
||||
CityName string
|
||||
}
|
||||
|
||||
type Country struct {
|
||||
CountryISOCode string `gorm:"column:country_iso_code"`
|
||||
CountryName string
|
||||
}
|
||||
|
||||
func NewGeolocation(datadir string) (*Geolocation, error) {
|
||||
mmdbPath := path.Join(datadir, MMDBFileName)
|
||||
|
||||
db, err := openDB(mmdbPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sha256sum, err := getSha256sum(mmdbPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
locationDB, err := NewSqliteStore(datadir)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
geo := &Geolocation{
|
||||
mmdbPath: mmdbPath,
|
||||
mux: sync.RWMutex{},
|
||||
sha256sum: sha256sum,
|
||||
db: db,
|
||||
locationDB: locationDB,
|
||||
reloadCheckInterval: 60 * time.Second, // TODO: make configurable
|
||||
stopCh: make(chan struct{}),
|
||||
}
|
||||
|
||||
go geo.reloader()
|
||||
|
||||
return geo, nil
|
||||
}
|
||||
|
||||
func openDB(mmdbPath string) (*maxminddb.Reader, error) {
|
||||
_, err := os.Stat(mmdbPath)
|
||||
|
||||
if os.IsNotExist(err) {
|
||||
return nil, fmt.Errorf("%v does not exist", mmdbPath)
|
||||
} else if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
db, err := maxminddb.Open(mmdbPath)
|
||||
if err != nil {
|
||||
return nil, fmt.Errorf("%v could not be opened: %w", mmdbPath, err)
|
||||
}
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
func getSha256sum(mmdbPath string) ([]byte, error) {
|
||||
f, err := os.Open(mmdbPath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
defer f.Close()
|
||||
|
||||
h := sha256.New()
|
||||
if _, err := io.Copy(h, f); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return h.Sum(nil), nil
|
||||
}
|
||||
|
||||
func (gl *Geolocation) Lookup(ip net.IP) (*Record, error) {
|
||||
gl.mux.RLock()
|
||||
defer gl.mux.RUnlock()
|
||||
|
||||
var record Record
|
||||
err := gl.db.Lookup(ip, &record)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &record, nil
|
||||
}
|
||||
|
||||
// GetAllCountries retrieves a list of all countries.
|
||||
func (gl *Geolocation) GetAllCountries() ([]Country, error) {
|
||||
allCountries, err := gl.locationDB.GetAllCountries()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
countries := make([]Country, 0)
|
||||
for _, country := range allCountries {
|
||||
if country.CountryName != "" {
|
||||
countries = append(countries, country)
|
||||
}
|
||||
}
|
||||
return countries, nil
|
||||
}
|
||||
|
||||
// GetCitiesByCountry retrieves a list of cities in a specific country based on the country's ISO code.
|
||||
func (gl *Geolocation) GetCitiesByCountry(countryISOCode string) ([]City, error) {
|
||||
allCities, err := gl.locationDB.GetCitiesByCountry(countryISOCode)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
cities := make([]City, 0)
|
||||
for _, city := range allCities {
|
||||
if city.CityName != "" {
|
||||
cities = append(cities, city)
|
||||
}
|
||||
}
|
||||
return cities, nil
|
||||
}
|
||||
|
||||
func (gl *Geolocation) Stop() error {
|
||||
close(gl.stopCh)
|
||||
if gl.db != nil {
|
||||
if err := gl.db.Close(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
if gl.locationDB != nil {
|
||||
if err := gl.locationDB.close(); err != nil {
|
||||
return err
|
||||
}
|
||||
}
|
||||
return nil
|
||||
}
|
||||
|
||||
func (gl *Geolocation) reloader() {
|
||||
for {
|
||||
select {
|
||||
case <-gl.stopCh:
|
||||
return
|
||||
case <-time.After(gl.reloadCheckInterval):
|
||||
if err := gl.locationDB.reload(); err != nil {
|
||||
log.Errorf("geonames db reload failed: %s", err)
|
||||
}
|
||||
|
||||
newSha256sum1, err := getSha256sum(gl.mmdbPath)
|
||||
if err != nil {
|
||||
log.Errorf("failed to calculate sha256 sum for '%s': %s", gl.mmdbPath, err)
|
||||
continue
|
||||
}
|
||||
if !bytes.Equal(gl.sha256sum, newSha256sum1) {
|
||||
// we check sum twice just to avoid possible case when we reload during update of the file
|
||||
// considering the frequency of file update (few times a week) checking sum twice should be enough
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
newSha256sum2, err := getSha256sum(gl.mmdbPath)
|
||||
if err != nil {
|
||||
log.Errorf("failed to calculate sha256 sum for '%s': %s", gl.mmdbPath, err)
|
||||
continue
|
||||
}
|
||||
if !bytes.Equal(newSha256sum1, newSha256sum2) {
|
||||
log.Errorf("sha256 sum changed during reloading of '%s'", gl.mmdbPath)
|
||||
continue
|
||||
}
|
||||
err = gl.reload(newSha256sum2)
|
||||
if err != nil {
|
||||
log.Errorf("mmdb reload failed: %s", err)
|
||||
}
|
||||
} else {
|
||||
log.Debugf("No changes in '%s', no need to reload. Next check is in %.0f seconds.",
|
||||
gl.mmdbPath, gl.reloadCheckInterval.Seconds())
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
|
||||
func (gl *Geolocation) reload(newSha256sum []byte) error {
|
||||
gl.mux.Lock()
|
||||
defer gl.mux.Unlock()
|
||||
|
||||
log.Infof("Reloading '%s'", gl.mmdbPath)
|
||||
|
||||
err := gl.db.Close()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
db, err := openDB(gl.mmdbPath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
gl.db = db
|
||||
gl.sha256sum = newSha256sum
|
||||
|
||||
log.Infof("Successfully reloaded '%s'", gl.mmdbPath)
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
func fileExists(filePath string) (bool, error) {
|
||||
_, err := os.Stat(filePath)
|
||||
if err == nil {
|
||||
return true, nil
|
||||
}
|
||||
if os.IsNotExist(err) {
|
||||
return false, fmt.Errorf("%v does not exist", filePath)
|
||||
}
|
||||
return false, err
|
||||
}
|
55
management/server/geolocation/geolocation_test.go
Normal file
55
management/server/geolocation/geolocation_test.go
Normal file
@@ -0,0 +1,55 @@
|
||||
package geolocation
|
||||
|
||||
import (
|
||||
"net"
|
||||
"os"
|
||||
"path"
|
||||
"sync"
|
||||
"testing"
|
||||
|
||||
"github.com/stretchr/testify/assert"
|
||||
|
||||
"github.com/netbirdio/netbird/util"
|
||||
)
|
||||
|
||||
// from https://github.com/maxmind/MaxMind-DB/blob/main/test-data/GeoLite2-City-Test.mmdb
|
||||
var mmdbPath = "../testdata/GeoLite2-City-Test.mmdb"
|
||||
|
||||
func TestGeoLite_Lookup(t *testing.T) {
|
||||
tempDir := t.TempDir()
|
||||
filename := path.Join(tempDir, MMDBFileName)
|
||||
err := util.CopyFileContents(mmdbPath, filename)
|
||||
assert.NoError(t, err)
|
||||
defer func() {
|
||||
err := os.Remove(filename)
|
||||
if err != nil {
|
||||
t.Errorf("os.Remove: %s", err)
|
||||
}
|
||||
}()
|
||||
|
||||
db, err := openDB(mmdbPath)
|
||||
assert.NoError(t, err)
|
||||
|
||||
geo := &Geolocation{
|
||||
mux: sync.RWMutex{},
|
||||
db: db,
|
||||
stopCh: make(chan struct{}),
|
||||
}
|
||||
assert.NotNil(t, geo)
|
||||
defer func() {
|
||||
err = geo.Stop()
|
||||
if err != nil {
|
||||
t.Errorf("geo.Stop: %s", err)
|
||||
}
|
||||
}()
|
||||
|
||||
record, err := geo.Lookup(net.ParseIP("89.160.20.128"))
|
||||
assert.NoError(t, err)
|
||||
assert.NotNil(t, record)
|
||||
assert.Equal(t, "SE", record.Country.ISOCode)
|
||||
assert.Equal(t, uint(2661886), record.Country.GeonameID)
|
||||
assert.Equal(t, "Linköping", record.City.Names.En)
|
||||
assert.Equal(t, uint(2694762), record.City.GeonameID)
|
||||
assert.Equal(t, "EU", record.Continent.Code)
|
||||
assert.Equal(t, uint(6255148), record.Continent.GeonameID)
|
||||
}
|
222
management/server/geolocation/store.go
Normal file
222
management/server/geolocation/store.go
Normal file
@@ -0,0 +1,222 @@
|
||||
package geolocation
|
||||
|
||||
import (
|
||||
"bytes"
|
||||
"fmt"
|
||||
"path/filepath"
|
||||
"runtime"
|
||||
"sync"
|
||||
"time"
|
||||
|
||||
log "github.com/sirupsen/logrus"
|
||||
"gorm.io/driver/sqlite"
|
||||
"gorm.io/gorm"
|
||||
"gorm.io/gorm/logger"
|
||||
|
||||
"github.com/netbirdio/netbird/management/server/status"
|
||||
)
|
||||
|
||||
const (
|
||||
GeoSqliteDBFile = "geonames.db"
|
||||
)
|
||||
|
||||
// SqliteStore represents a location storage backed by a Sqlite DB.
|
||||
type SqliteStore struct {
|
||||
db *gorm.DB
|
||||
filePath string
|
||||
mux sync.RWMutex
|
||||
closed bool
|
||||
sha256sum []byte
|
||||
}
|
||||
|
||||
func NewSqliteStore(dataDir string) (*SqliteStore, error) {
|
||||
file := filepath.Join(dataDir, GeoSqliteDBFile)
|
||||
|
||||
db, err := connectDB(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sha256sum, err := getSha256sum(file)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
return &SqliteStore{
|
||||
db: db,
|
||||
filePath: file,
|
||||
mux: sync.RWMutex{},
|
||||
sha256sum: sha256sum,
|
||||
}, nil
|
||||
}
|
||||
|
||||
// GetAllCountries returns a list of all countries in the store.
|
||||
func (s *SqliteStore) GetAllCountries() ([]Country, error) {
|
||||
s.mux.RLock()
|
||||
defer s.mux.RUnlock()
|
||||
|
||||
if s.closed {
|
||||
return nil, status.Errorf(status.PreconditionFailed, "geo location database is not initialized")
|
||||
}
|
||||
|
||||
var countries []Country
|
||||
result := s.db.Table("geonames").
|
||||
Select("country_iso_code", "country_name").
|
||||
Group("country_name").
|
||||
Scan(&countries)
|
||||
if result.Error != nil {
|
||||
return nil, result.Error
|
||||
}
|
||||
|
||||
return countries, nil
|
||||
}
|
||||
|
||||
// GetCitiesByCountry retrieves a list of cities from the store based on the given country ISO code.
|
||||
func (s *SqliteStore) GetCitiesByCountry(countryISOCode string) ([]City, error) {
|
||||
s.mux.RLock()
|
||||
defer s.mux.RUnlock()
|
||||
|
||||
if s.closed {
|
||||
return nil, status.Errorf(status.PreconditionFailed, "geo location database is not initialized")
|
||||
}
|
||||
|
||||
var cities []City
|
||||
result := s.db.Table("geonames").
|
||||
Select("geoname_id", "city_name").
|
||||
Where("country_iso_code = ?", countryISOCode).
|
||||
Group("city_name").
|
||||
Scan(&cities)
|
||||
if result.Error != nil {
|
||||
return nil, result.Error
|
||||
}
|
||||
|
||||
return cities, nil
|
||||
}
|
||||
|
||||
// reload attempts to reload the SqliteStore's database if the database file has changed.
|
||||
func (s *SqliteStore) reload() error {
|
||||
s.mux.Lock()
|
||||
defer s.mux.Unlock()
|
||||
|
||||
newSha256sum1, err := getSha256sum(s.filePath)
|
||||
if err != nil {
|
||||
log.Errorf("failed to calculate sha256 sum for '%s': %s", s.filePath, err)
|
||||
}
|
||||
|
||||
if !bytes.Equal(s.sha256sum, newSha256sum1) {
|
||||
// we check sum twice just to avoid possible case when we reload during update of the file
|
||||
// considering the frequency of file update (few times a week) checking sum twice should be enough
|
||||
time.Sleep(50 * time.Millisecond)
|
||||
newSha256sum2, err := getSha256sum(s.filePath)
|
||||
if err != nil {
|
||||
return fmt.Errorf("failed to calculate sha256 sum for '%s': %s", s.filePath, err)
|
||||
}
|
||||
if !bytes.Equal(newSha256sum1, newSha256sum2) {
|
||||
return fmt.Errorf("sha256 sum changed during reloading of '%s'", s.filePath)
|
||||
}
|
||||
|
||||
log.Infof("Reloading '%s'", s.filePath)
|
||||
_ = s.close()
|
||||
s.closed = true
|
||||
|
||||
newDb, err := connectDB(s.filePath)
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
s.closed = false
|
||||
s.db = newDb
|
||||
|
||||
log.Infof("Successfully reloaded '%s'", s.filePath)
|
||||
} else {
|
||||
log.Debugf("No changes in '%s', no need to reload", s.filePath)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
||||
|
||||
// close closes the database connection.
|
||||
// It retrieves the underlying *sql.DB object from the *gorm.DB object
|
||||
// and calls the Close() method on it.
|
||||
func (s *SqliteStore) close() error {
|
||||
sqlDB, err := s.db.DB()
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
return sqlDB.Close()
|
||||
}
|
||||
|
||||
// connectDB connects to an SQLite database and prepares it by setting up an in-memory database.
|
||||
func connectDB(filePath string) (*gorm.DB, error) {
|
||||
start := time.Now()
|
||||
defer func() {
|
||||
log.Debugf("took %v to setup geoname db", time.Since(start))
|
||||
}()
|
||||
|
||||
_, err := fileExists(filePath)
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
storeStr := "file::memory:?cache=shared"
|
||||
if runtime.GOOS == "windows" {
|
||||
storeStr = "file::memory:"
|
||||
}
|
||||
|
||||
db, err := gorm.Open(sqlite.Open(storeStr), &gorm.Config{
|
||||
Logger: logger.Default.LogMode(logger.Silent),
|
||||
PrepareStmt: true,
|
||||
})
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
if err := setupInMemoryDBFromFile(db, filePath); err != nil {
|
||||
return nil, err
|
||||
}
|
||||
|
||||
sql, err := db.DB()
|
||||
if err != nil {
|
||||
return nil, err
|
||||
}
|
||||
conns := runtime.NumCPU()
|
||||
sql.SetMaxOpenConns(conns)
|
||||
|
||||
return db, nil
|
||||
}
|
||||
|
||||
// setupInMemoryDBFromFile prepares an in-memory DB by attaching a file database and,
|
||||
// copies the data from the attached database to the in-memory database.
|
||||
func setupInMemoryDBFromFile(db *gorm.DB, source string) error {
|
||||
// Attach the on-disk database to the in-memory database
|
||||
attachStmt := fmt.Sprintf("ATTACH DATABASE '%s' AS source;", source)
|
||||
if err := db.Exec(attachStmt).Error; err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
err := db.Exec(`
|
||||
CREATE TABLE geonames AS SELECT * FROM source.geonames;
|
||||
`).Error
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// Detach the on-disk database from the in-memory database
|
||||
err = db.Exec("DETACH DATABASE source;").Error
|
||||
if err != nil {
|
||||
return err
|
||||
}
|
||||
|
||||
// index geoname_id and country_iso_code field
|
||||
err = db.Exec("CREATE INDEX idx_geonames_country_iso_code ON geonames(country_iso_code);").Error
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
err = db.Exec("CREATE INDEX idx_geonames_geoname_id ON geonames(geoname_id);").Error
|
||||
if err != nil {
|
||||
log.Fatal(err)
|
||||
}
|
||||
|
||||
return nil
|
||||
}
|
Reference in New Issue
Block a user