[management] Auto update geolite (#2297)

introduces helper functions to fetch and verify database versions, downloads new files if outdated, and deletes old ones. It also refactors filename handling to improve clarity and consistency, adding options to disable auto-updating via a flag. The changes aim to simplify GeoLite database management for admins.
This commit is contained in:
benniekiss 2024-09-09 12:27:42 -04:00 committed by GitHub
parent c720d54de6
commit 12c36312b5
No known key found for this signature in database
GPG Key ID: B5690EEEBB952194
14 changed files with 199 additions and 334 deletions

View File

@ -209,8 +209,8 @@ jobs:
working-directory: infrastructure_files/artifacts
run: |
sleep 30
docker compose exec management ls -l /var/lib/netbird/ | grep -i GeoLite2-City.mmdb
docker compose exec management ls -l /var/lib/netbird/ | grep -i geonames.db
docker compose exec management ls -l /var/lib/netbird/ | grep -i GeoLite2-City_[0-9]*.mmdb
docker compose exec management ls -l /var/lib/netbird/ | grep -i geonames_[0-9]*.db
test-getting-started-script:
runs-on: ubuntu-latest
@ -237,7 +237,7 @@ jobs:
run: test -f management.json
- name: test turnserver.conf file gen postgres
run: |
run: |
set -x
test -f turnserver.conf
grep external-ip turnserver.conf
@ -278,7 +278,7 @@ jobs:
run: test -f management.json
- name: test turnserver.conf file gen CockroachDB
run: |
run: |
set -x
test -f turnserver.conf
grep external-ip turnserver.conf
@ -291,21 +291,3 @@ jobs:
- name: test relay.env file gen CockroachDB
run: test -f relay.env
test-download-geolite2-script:
runs-on: ubuntu-latest
steps:
- name: Install jq
run: sudo apt-get update && sudo apt-get install -y unzip sqlite3
- name: Checkout code
uses: actions/checkout@v3
- name: test script
run: bash -x infrastructure_files/download-geolite2.sh
- name: test mmdb file exists
run: test -f GeoLite2-City.mmdb
- name: test geonames file exists
run: test -f geonames.db

1
.gitignore vendored
View File

@ -29,4 +29,3 @@ infrastructure_files/setup.env
infrastructure_files/setup-*.env
.vscode
.DS_Store
GeoLite2-City*

12
client/ui/bundled.go Normal file

File diff suppressed because one or more lines are too long

View File

@ -1,109 +0,0 @@
#!/bin/bash
# to install sha256sum on mac: brew install coreutils
if ! command -v sha256sum &> /dev/null
then
echo "sha256sum is not installed or not in PATH, please install with your package manager. e.g. sudo apt install sha256sum" > /dev/stderr
exit 1
fi
if ! command -v sqlite3 &> /dev/null
then
echo "sqlite3 is not installed or not in PATH, please install with your package manager. e.g. sudo apt install sqlite3" > /dev/stderr
exit 1
fi
if ! command -v unzip &> /dev/null
then
echo "unzip is not installed or not in PATH, please install with your package manager. e.g. sudo apt install unzip" > /dev/stderr
exit 1
fi
download_geolite_mmdb() {
DATABASE_URL="https://pkgs.netbird.io/geolocation-dbs/GeoLite2-City/download?suffix=tar.gz"
SIGNATURE_URL="https://pkgs.netbird.io/geolocation-dbs/GeoLite2-City/download?suffix=tar.gz.sha256"
# Download the database and signature files
echo "Downloading mmdb signature file..."
SIGNATURE_FILE=$(curl -s -L -O -J "$SIGNATURE_URL" -w "%{filename_effective}")
echo "Downloading mmdb database file..."
DATABASE_FILE=$(curl -s -L -O -J "$DATABASE_URL" -w "%{filename_effective}")
# Verify the signature
echo "Verifying signature..."
if sha256sum -c --status "$SIGNATURE_FILE"; then
echo "Signature is valid."
else
echo "Signature is invalid. Aborting."
exit 1
fi
# Unpack the database file
EXTRACTION_DIR=$(basename "$DATABASE_FILE" .tar.gz)
echo "Unpacking $DATABASE_FILE..."
mkdir -p "$EXTRACTION_DIR"
tar -xzvf "$DATABASE_FILE" > /dev/null 2>&1
MMDB_FILE="GeoLite2-City.mmdb"
cp "$EXTRACTION_DIR"/"$MMDB_FILE" $MMDB_FILE
# Remove downloaded files
rm -r "$EXTRACTION_DIR"
rm "$DATABASE_FILE" "$SIGNATURE_FILE"
# Done. Print next steps
echo ""
echo "Process completed successfully."
echo "Now you can place $MMDB_FILE to 'datadir' of management service."
echo -e "Example:\n\tdocker compose cp $MMDB_FILE management:/var/lib/netbird/"
}
download_geolite_csv_and_create_sqlite_db() {
DATABASE_URL="https://pkgs.netbird.io/geolocation-dbs/GeoLite2-City-CSV/download?suffix=zip"
SIGNATURE_URL="https://pkgs.netbird.io/geolocation-dbs/GeoLite2-City-CSV/download?suffix=zip.sha256"
# Download the database file
echo "Downloading csv signature file..."
SIGNATURE_FILE=$(curl -s -L -O -J "$SIGNATURE_URL" -w "%{filename_effective}")
echo "Downloading csv database file..."
DATABASE_FILE=$(curl -s -L -O -J "$DATABASE_URL" -w "%{filename_effective}")
# Verify the signature
echo "Verifying signature..."
if sha256sum -c --status "$SIGNATURE_FILE"; then
echo "Signature is valid."
else
echo "Signature is invalid. Aborting."
exit 1
fi
# Unpack the database file
EXTRACTION_DIR=$(basename "$DATABASE_FILE" .zip)
DB_NAME="geonames.db"
echo "Unpacking $DATABASE_FILE..."
unzip "$DATABASE_FILE" > /dev/null 2>&1
# Create SQLite database and import data from CSV
sqlite3 "$DB_NAME" <<EOF
.mode csv
.import "$EXTRACTION_DIR/GeoLite2-City-Locations-en.csv" geonames
EOF
# Remove downloaded and extracted files
rm -r -r "$EXTRACTION_DIR"
rm "$DATABASE_FILE" "$SIGNATURE_FILE"
echo ""
echo "SQLite database '$DB_NAME' created successfully."
echo "Now you can place $DB_NAME to 'datadir' of management service."
echo -e "Example:\n\tdocker compose cp $DB_NAME management:/var/lib/netbird/"
}
download_geolite_mmdb
echo -e "\n\n"
download_geolite_csv_and_create_sqlite_db
echo -e "\n\n"
echo "After copying the database files to the management service. You can restart the management service with:"
echo -e "Example:\n\tdocker compose restart management"

View File

@ -123,6 +123,8 @@ var (
return nil
},
RunE: func(cmd *cobra.Command, args []string) error {
flag.Parse()
ctx, cancel := context.WithCancel(cmd.Context())
defer cancel()
//nolint
@ -178,11 +180,11 @@ var (
}
}
geo, err := geolocation.NewGeolocation(ctx, config.Datadir)
geo, err := geolocation.NewGeolocation(ctx, config.Datadir, !disableGeoliteUpdate)
if err != nil {
log.WithContext(ctx).Warnf("could not initialize geo location service: %v, we proceed without geo support", err)
log.WithContext(ctx).Warnf("could not initialize geolocation service. proceeding without geolocation support: %v", err)
} else {
log.WithContext(ctx).Infof("geo location service has been initialized from %s", config.Datadir)
log.WithContext(ctx).Infof("geolocation service has been initialized from %s", config.Datadir)
}
integratedPeerValidator, err := integrations.NewIntegratedValidator(ctx, eventStore)

View File

@ -24,6 +24,7 @@ var (
logFile string
disableMetrics bool
disableSingleAccMode bool
disableGeoliteUpdate bool
idpSignKeyRefreshEnabled bool
userDeleteFromIDPEnabled bool
@ -65,6 +66,7 @@ func init() {
mgmtCmd.Flags().StringVar(&dnsDomain, "dns-domain", defaultSingleAccModeDomain, fmt.Sprintf("Domain used for peer resolution. This is appended to the peer's name, e.g. pi-server. %s. Max length is 192 characters to allow appending to a peer name with up to 63 characters.", defaultSingleAccModeDomain))
mgmtCmd.Flags().BoolVar(&idpSignKeyRefreshEnabled, idpSignKeyRefreshEnabledFlagName, false, "Enable cache headers evaluation to determine signing key rotation period. This will refresh the signing key upon expiry.")
mgmtCmd.Flags().BoolVar(&userDeleteFromIDPEnabled, "user-delete-from-idp", false, "Allows to delete user from IDP when user is deleted from account")
mgmtCmd.Flags().BoolVar(&disableGeoliteUpdate, "disable-geolite-update", true, "disables automatic updates to the Geolite2 geolocation databases")
rootCmd.MarkFlagRequired("config") //nolint
rootCmd.PersistentFlags().StringVar(&logLevel, "log-level", "info", "")

View File

@ -1,10 +1,9 @@
package geolocation
import (
"context"
"encoding/csv"
"fmt"
"io"
"net/url"
"os"
"path"
"strconv"
@ -20,26 +19,27 @@ const (
geoLiteCityZipURL = "https://pkgs.netbird.io/geolocation-dbs/GeoLite2-City-CSV/download?suffix=zip"
geoLiteCitySha256TarURL = "https://pkgs.netbird.io/geolocation-dbs/GeoLite2-City/download?suffix=tar.gz.sha256"
geoLiteCitySha256ZipURL = "https://pkgs.netbird.io/geolocation-dbs/GeoLite2-City-CSV/download?suffix=zip.sha256"
geoLiteCityMMDB = "GeoLite2-City.mmdb"
geoLiteCityCSV = "GeoLite2-City-Locations-en.csv"
)
// loadGeolocationDatabases loads the MaxMind databases.
func loadGeolocationDatabases(dataDir string) error {
files := []string{MMDBFileName, GeoSqliteDBFile}
for _, file := range files {
func loadGeolocationDatabases(ctx context.Context, dataDir string, mmdbFile string, geonamesdbFile string) error {
for _, file := range []string{mmdbFile, geonamesdbFile} {
exists, _ := fileExists(path.Join(dataDir, file))
if exists {
continue
}
log.Infof("geo location file %s not found , file will be downloaded", file)
log.WithContext(ctx).Infof("Geolocation database file %s not found, file will be downloaded", file)
switch file {
case MMDBFileName:
case mmdbFile:
extractFunc := func(src string, dst string) error {
if err := decompressTarGzFile(src, dst); err != nil {
return err
}
return copyFile(path.Join(dst, MMDBFileName), path.Join(dataDir, MMDBFileName))
return copyFile(path.Join(dst, geoLiteCityMMDB), path.Join(dataDir, mmdbFile))
}
if err := loadDatabase(
geoLiteCitySha256TarURL,
@ -49,13 +49,13 @@ func loadGeolocationDatabases(dataDir string) error {
return err
}
case GeoSqliteDBFile:
case geonamesdbFile:
extractFunc := func(src string, dst string) error {
if err := decompressZipFile(src, dst); err != nil {
return err
}
extractedCsvFile := path.Join(dst, "GeoLite2-City-Locations-en.csv")
return importCsvToSqlite(dataDir, extractedCsvFile)
extractedCsvFile := path.Join(dst, geoLiteCityCSV)
return importCsvToSqlite(dataDir, extractedCsvFile, geonamesdbFile)
}
if err := loadDatabase(
@ -79,7 +79,12 @@ func loadDatabase(checksumURL string, fileURL string, extractFunc func(src strin
}
defer os.RemoveAll(temp)
checksumFile := path.Join(temp, getDatabaseFileName(checksumURL))
checksumFilename, err := getFilenameFromURL(checksumURL)
if err != nil {
return err
}
checksumFile := path.Join(temp, checksumFilename)
err = downloadFile(checksumURL, checksumFile)
if err != nil {
return err
@ -90,7 +95,12 @@ func loadDatabase(checksumURL string, fileURL string, extractFunc func(src strin
return err
}
dbFile := path.Join(temp, getDatabaseFileName(fileURL))
dbFilename, err := getFilenameFromURL(fileURL)
if err != nil {
return err
}
dbFile := path.Join(temp, dbFilename)
err = downloadFile(fileURL, dbFile)
if err != nil {
return err
@ -104,13 +114,13 @@ func loadDatabase(checksumURL string, fileURL string, extractFunc func(src strin
}
// importCsvToSqlite imports a CSV file into a SQLite database.
func importCsvToSqlite(dataDir string, csvFile string) error {
func importCsvToSqlite(dataDir string, csvFile string, geonamesdbFile string) error {
geonames, err := loadGeonamesCsv(csvFile)
if err != nil {
return err
}
db, err := gorm.Open(sqlite.Open(path.Join(dataDir, GeoSqliteDBFile)), &gorm.Config{
db, err := gorm.Open(sqlite.Open(path.Join(dataDir, geonamesdbFile)), &gorm.Config{
Logger: logger.Default.LogMode(logger.Silent),
CreateBatchSize: 1000,
PrepareStmt: true,
@ -178,18 +188,6 @@ func loadGeonamesCsv(filepath string) ([]GeoNames, error) {
return geoNames, nil
}
// getDatabaseFileName extracts the file name from a given URL string.
func getDatabaseFileName(urlStr string) string {
u, err := url.Parse(urlStr)
if err != nil {
panic(err)
}
ext := u.Query().Get("suffix")
fileName := fmt.Sprintf("%s.%s", path.Base(u.Path), ext)
return fileName
}
// copyFile performs a file copy operation from the source file to the destination.
func copyFile(src string, dst string) error {
srcFile, err := os.Open(src)

View File

@ -1,29 +1,25 @@
package geolocation
import (
"bytes"
"context"
"fmt"
"net"
"os"
"path"
"path/filepath"
"strings"
"sync"
"time"
"github.com/oschwald/maxminddb-golang"
log "github.com/sirupsen/logrus"
)
const MMDBFileName = "GeoLite2-City.mmdb"
type Geolocation struct {
mmdbPath string
mux sync.RWMutex
sha256sum []byte
db *maxminddb.Reader
locationDB *SqliteStore
stopCh chan struct{}
reloadCheckInterval time.Duration
mmdbPath string
mux sync.RWMutex
db *maxminddb.Reader
locationDB *SqliteStore
stopCh chan struct{}
}
type Record struct {
@ -53,45 +49,56 @@ type Country struct {
CountryName string
}
func NewGeolocation(ctx context.Context, dataDir string) (*Geolocation, error) {
if err := loadGeolocationDatabases(dataDir); err != nil {
const (
mmdbPattern = "GeoLite2-City_*.mmdb"
geonamesdbPattern = "geonames_*.db"
)
func NewGeolocation(ctx context.Context, dataDir string, autoUpdate bool) (*Geolocation, error) {
mmdbGlobPattern := filepath.Join(dataDir, mmdbPattern)
mmdbFile, err := getDatabaseFilename(ctx, geoLiteCityTarGZURL, mmdbGlobPattern, autoUpdate)
if err != nil {
return nil, fmt.Errorf("failed to get database filename: %v", err)
}
geonamesDbGlobPattern := filepath.Join(dataDir, geonamesdbPattern)
geonamesDbFile, err := getDatabaseFilename(ctx, geoLiteCityZipURL, geonamesDbGlobPattern, autoUpdate)
if err != nil {
return nil, fmt.Errorf("failed to get database filename: %v", err)
}
if err := loadGeolocationDatabases(ctx, dataDir, mmdbFile, geonamesDbFile); err != nil {
return nil, fmt.Errorf("failed to load MaxMind databases: %v", err)
}
mmdbPath := path.Join(dataDir, MMDBFileName)
if err := cleanupMaxMindDatabases(ctx, dataDir, mmdbFile, geonamesDbFile); err != nil {
return nil, fmt.Errorf("failed to remove old MaxMind databases: %v", err)
}
mmdbPath := path.Join(dataDir, mmdbFile)
db, err := openDB(mmdbPath)
if err != nil {
return nil, err
}
sha256sum, err := calculateFileSHA256(mmdbPath)
if err != nil {
return nil, err
}
locationDB, err := NewSqliteStore(ctx, dataDir)
locationDB, err := NewSqliteStore(ctx, dataDir, geonamesDbFile)
if err != nil {
return nil, err
}
geo := &Geolocation{
mmdbPath: mmdbPath,
mux: sync.RWMutex{},
sha256sum: sha256sum,
db: db,
locationDB: locationDB,
reloadCheckInterval: 300 * time.Second, // TODO: make configurable
stopCh: make(chan struct{}),
mmdbPath: mmdbPath,
mux: sync.RWMutex{},
db: db,
locationDB: locationDB,
stopCh: make(chan struct{}),
}
go geo.reloader(ctx)
return geo, nil
}
func openDB(mmdbPath string) (*maxminddb.Reader, error) {
_, err := os.Stat(mmdbPath)
if os.IsNotExist(err) {
return nil, fmt.Errorf("%v does not exist", mmdbPath)
} else if err != nil {
@ -166,70 +173,6 @@ func (gl *Geolocation) Stop() error {
return nil
}
func (gl *Geolocation) reloader(ctx context.Context) {
for {
select {
case <-gl.stopCh:
return
case <-time.After(gl.reloadCheckInterval):
if err := gl.locationDB.reload(ctx); err != nil {
log.WithContext(ctx).Errorf("geonames db reload failed: %s", err)
}
newSha256sum1, err := calculateFileSHA256(gl.mmdbPath)
if err != nil {
log.WithContext(ctx).Errorf("failed to calculate sha256 sum for '%s': %s", gl.mmdbPath, err)
continue
}
if !bytes.Equal(gl.sha256sum, newSha256sum1) {
// we check sum twice just to avoid possible case when we reload during update of the file
// considering the frequency of file update (few times a week) checking sum twice should be enough
time.Sleep(50 * time.Millisecond)
newSha256sum2, err := calculateFileSHA256(gl.mmdbPath)
if err != nil {
log.WithContext(ctx).Errorf("failed to calculate sha256 sum for '%s': %s", gl.mmdbPath, err)
continue
}
if !bytes.Equal(newSha256sum1, newSha256sum2) {
log.WithContext(ctx).Errorf("sha256 sum changed during reloading of '%s'", gl.mmdbPath)
continue
}
err = gl.reload(ctx, newSha256sum2)
if err != nil {
log.WithContext(ctx).Errorf("mmdb reload failed: %s", err)
}
} else {
log.WithContext(ctx).Tracef("No changes in '%s', no need to reload. Next check is in %.0f seconds.",
gl.mmdbPath, gl.reloadCheckInterval.Seconds())
}
}
}
}
func (gl *Geolocation) reload(ctx context.Context, newSha256sum []byte) error {
gl.mux.Lock()
defer gl.mux.Unlock()
log.WithContext(ctx).Infof("Reloading '%s'", gl.mmdbPath)
err := gl.db.Close()
if err != nil {
return err
}
db, err := openDB(gl.mmdbPath)
if err != nil {
return err
}
gl.db = db
gl.sha256sum = newSha256sum
log.WithContext(ctx).Infof("Successfully reloaded '%s'", gl.mmdbPath)
return nil
}
func fileExists(filePath string) (bool, error) {
_, err := os.Stat(filePath)
if err == nil {
@ -240,3 +183,79 @@ func fileExists(filePath string) (bool, error) {
}
return false, err
}
func getExistingDatabases(pattern string) []string {
files, _ := filepath.Glob(pattern)
return files
}
func getDatabaseFilename(ctx context.Context, databaseURL string, filenamePattern string, autoUpdate bool) (string, error) {
var (
filename string
err error
)
if autoUpdate {
filename, err = getFilenameFromURL(databaseURL)
if err != nil {
log.WithContext(ctx).Debugf("Failed to update database from url: %s", databaseURL)
return "", err
}
} else {
files := getExistingDatabases(filenamePattern)
if len(files) < 1 {
filename, err = getFilenameFromURL(databaseURL)
if err != nil {
log.WithContext(ctx).Debugf("Failed to get database from url: %s", databaseURL)
return "", err
}
} else {
filename = filepath.Base(files[len(files)-1])
log.WithContext(ctx).Debugf("Using existing database, %s", filename)
return filename, nil
}
}
// strip suffixes that may be nested, such as .tar.gz
basename := strings.SplitN(filename, ".", 2)[0]
// get date version from basename
date := strings.SplitN(basename, "_", 2)[1]
// format db as "GeoLite2-Cities-{maxmind|geonames}_{DATE}.{mmdb|db}"
databaseFilename := filepath.Base(strings.Replace(filenamePattern, "*", date, 1))
return databaseFilename, nil
}
func cleanupOldDatabases(ctx context.Context, pattern string, currentFile string) error {
files := getExistingDatabases(pattern)
for _, db := range files {
if filepath.Base(db) == currentFile {
continue
}
log.WithContext(ctx).Debugf("Removing old database: %s", db)
err := os.Remove(db)
if err != nil {
return err
}
}
return nil
}
func cleanupMaxMindDatabases(ctx context.Context, dataDir string, mmdbFile string, geonamesdbFile string) error {
for _, file := range []string{mmdbFile, geonamesdbFile} {
switch file {
case mmdbFile:
pattern := filepath.Join(dataDir, mmdbPattern)
if err := cleanupOldDatabases(ctx, pattern, file); err != nil {
return err
}
case geonamesdbFile:
pattern := filepath.Join(dataDir, geonamesdbPattern)
if err := cleanupOldDatabases(ctx, pattern, file); err != nil {
return err
}
}
}
return nil
}

View File

@ -2,8 +2,8 @@ package geolocation
import (
"net"
"os"
"path"
"path/filepath"
"sync"
"testing"
@ -13,21 +13,15 @@ import (
)
// from https://github.com/maxmind/MaxMind-DB/blob/main/test-data/GeoLite2-City-Test.mmdb
var mmdbPath = "../testdata/GeoLite2-City-Test.mmdb"
var mmdbPath = "../testdata/GeoLite2-City_20240305.mmdb"
func TestGeoLite_Lookup(t *testing.T) {
tempDir := t.TempDir()
filename := path.Join(tempDir, MMDBFileName)
filename := path.Join(tempDir, filepath.Base(mmdbPath))
err := util.CopyFileContents(mmdbPath, filename)
assert.NoError(t, err)
defer func() {
err := os.Remove(filename)
if err != nil {
t.Errorf("os.Remove: %s", err)
}
}()
db, err := openDB(mmdbPath)
db, err := openDB(filename)
assert.NoError(t, err)
geo := &Geolocation{

View File

@ -1,7 +1,6 @@
package geolocation
import (
"bytes"
"context"
"fmt"
"path/filepath"
@ -17,10 +16,6 @@ import (
"github.com/netbirdio/netbird/management/server/status"
)
const (
GeoSqliteDBFile = "geonames.db"
)
type GeoNames struct {
GeoNameID int `gorm:"column:geoname_id"`
LocaleCode string `gorm:"column:locale_code"`
@ -44,31 +39,24 @@ func (*GeoNames) TableName() string {
// SqliteStore represents a location storage backed by a Sqlite DB.
type SqliteStore struct {
db *gorm.DB
filePath string
mux sync.RWMutex
closed bool
sha256sum []byte
db *gorm.DB
filePath string
mux sync.RWMutex
closed bool
}
func NewSqliteStore(ctx context.Context, dataDir string) (*SqliteStore, error) {
file := filepath.Join(dataDir, GeoSqliteDBFile)
func NewSqliteStore(ctx context.Context, dataDir string, dbPath string) (*SqliteStore, error) {
file := filepath.Join(dataDir, dbPath)
db, err := connectDB(ctx, file)
if err != nil {
return nil, err
}
sha256sum, err := calculateFileSHA256(file)
if err != nil {
return nil, err
}
return &SqliteStore{
db: db,
filePath: file,
mux: sync.RWMutex{},
sha256sum: sha256sum,
db: db,
filePath: file,
mux: sync.RWMutex{},
}, nil
}
@ -115,48 +103,6 @@ func (s *SqliteStore) GetCitiesByCountry(countryISOCode string) ([]City, error)
return cities, nil
}
// reload attempts to reload the SqliteStore's database if the database file has changed.
func (s *SqliteStore) reload(ctx context.Context) error {
s.mux.Lock()
defer s.mux.Unlock()
newSha256sum1, err := calculateFileSHA256(s.filePath)
if err != nil {
log.WithContext(ctx).Errorf("failed to calculate sha256 sum for '%s': %s", s.filePath, err)
}
if !bytes.Equal(s.sha256sum, newSha256sum1) {
// we check sum twice just to avoid possible case when we reload during update of the file
// considering the frequency of file update (few times a week) checking sum twice should be enough
time.Sleep(50 * time.Millisecond)
newSha256sum2, err := calculateFileSHA256(s.filePath)
if err != nil {
return fmt.Errorf("failed to calculate sha256 sum for '%s': %s", s.filePath, err)
}
if !bytes.Equal(newSha256sum1, newSha256sum2) {
return fmt.Errorf("sha256 sum changed during reloading of '%s'", s.filePath)
}
log.WithContext(ctx).Infof("Reloading '%s'", s.filePath)
_ = s.close()
s.closed = true
newDb, err := connectDB(ctx, s.filePath)
if err != nil {
return err
}
s.closed = false
s.db = newDb
log.WithContext(ctx).Infof("Successfully reloaded '%s'", s.filePath)
} else {
log.WithContext(ctx).Tracef("No changes in '%s', no need to reload", s.filePath)
}
return nil
}
// close closes the database connection.
// It retrieves the underlying *sql.DB object from the *gorm.DB object
// and calls the Close() method on it.

View File

@ -10,6 +10,7 @@ import (
"errors"
"fmt"
"io"
"mime"
"net/http"
"os"
"path"
@ -174,3 +175,21 @@ func downloadFile(url, filepath string) error {
_, err = io.Copy(out, bytes.NewBuffer(bodyBytes))
return err
}
func getFilenameFromURL(url string) (string, error) {
resp, err := http.Head(url)
if err != nil {
return "", err
}
defer resp.Body.Close()
_, params, err := mime.ParseMediaType(resp.Header["Content-Disposition"][0])
if err != nil {
return "", err
}
filename := params["filename"]
return filename, nil
}

View File

@ -7,6 +7,7 @@ import (
"net/http"
"net/http/httptest"
"path"
"path/filepath"
"testing"
"github.com/gorilla/mux"
@ -24,19 +25,19 @@ func initGeolocationTestData(t *testing.T) *GeolocationsHandler {
t.Helper()
var (
mmdbPath = "../testdata/GeoLite2-City-Test.mmdb"
geonamesDBPath = "../testdata/geonames-test.db"
mmdbPath = "../testdata/GeoLite2-City_20240305.mmdb"
geonamesdbPath = "../testdata/geonames_20240305.db"
)
tempDir := t.TempDir()
err := util.CopyFileContents(mmdbPath, path.Join(tempDir, geolocation.MMDBFileName))
err := util.CopyFileContents(mmdbPath, path.Join(tempDir, filepath.Base(mmdbPath)))
assert.NoError(t, err)
err = util.CopyFileContents(geonamesDBPath, path.Join(tempDir, geolocation.GeoSqliteDBFile))
err = util.CopyFileContents(geonamesdbPath, path.Join(tempDir, filepath.Base(geonamesdbPath)))
assert.NoError(t, err)
geo, err := geolocation.NewGeolocation(context.Background(), tempDir)
geo, err := geolocation.NewGeolocation(context.Background(), tempDir, false)
assert.NoError(t, err)
t.Cleanup(func() { _ = geo.Stop() })

View File

Before

Width:  |  Height:  |  Size: 21 KiB

After

Width:  |  Height:  |  Size: 21 KiB