From 972ebf56f267e5068b4137a6cb13e87b8ae0004a Mon Sep 17 00:00:00 2001 From: Ralf Becker Date: Wed, 30 Oct 2019 17:15:23 +0100 Subject: [PATCH] speed up restore of database by inserting up to 500 row in one statement (was only 10) --- api/src/Db/Backup.php | 12 +++++++----- 1 file changed, 7 insertions(+), 5 deletions(-) diff --git a/api/src/Db/Backup.php b/api/src/Db/Backup.php index 18ff21fb11..e09132ddb5 100644 --- a/api/src/Db/Backup.php +++ b/api/src/Db/Backup.php @@ -331,11 +331,11 @@ class Backup * @param boolean $convert_to_system_charset =true obsolet, it's now allways done * @param string $filename ='' gives the file name which is used in case of a zip archive. * @param boolean $protect_system_config =true should above system_config values be protected (NOT overwritten) - * @param int $insert_n_rows =10 how many rows to insert in one sql statement + * @param int $insert_n_rows =500 how many rows to insert in one sql statement * * @returns An empty string or an error message in case of failure. */ - function restore($f,$convert_to_system_charset=true,$filename='',$protect_system_config=true, $insert_n_rows=10) + function restore($f,$convert_to_system_charset=true,$filename='',$protect_system_config=true, $insert_n_rows=500) { @set_time_limit(0); ini_set('auto_detect_line_endings',true); @@ -496,10 +496,10 @@ class Backup * Restore data from a (compressed) csv file * * @param resource $f file opened with fopen for reading - * @param int|string $insert_n_rows =10 how many rows to insert in one sql statement, or string with column-name used as unique key for insert + * @param int|string $insert_n_rows =500 how many rows to insert in one sql statement, or string with column-name used as unique key for insert * @returns int number of rows read from csv file */ - function db_restore($f, $insert_n_rows=10) + function db_restore($f, $insert_n_rows=500) { $convert_to_system_charset = true; $table = null; @@ -604,7 +604,9 @@ class Backup if ($insert_n_rows > 1) { $rows[] = $data; - if (count($rows) == $insert_n_rows) + if (count($rows) == $insert_n_rows || + // check every 50 rows, if we might reach MySQLs max_allowed_packet=1MB + (!(count($rows) % 50) && strlen(json_encode($rows)) > 500000)) { $this->insert_multiple($table, $rows, $this->schemas[$table]); $rows = array();