From d0f097e1ec5d88e8d64f58d4920c63fe5a17e824 Mon Sep 17 00:00:00 2001 From: Ralf Becker Date: Tue, 3 Jul 2012 07:05:55 +0000 Subject: [PATCH] chunk size for number of rows to query was far to low for huge tables like egw_history_log which can be a couple of million rows, backup took far to long --- phpgwapi/inc/class.db_backup.inc.php | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/phpgwapi/inc/class.db_backup.inc.php b/phpgwapi/inc/class.db_backup.inc.php index 90249f450b..e5f8609bf6 100644 --- a/phpgwapi/inc/class.db_backup.inc.php +++ b/phpgwapi/inc/class.db_backup.inc.php @@ -772,7 +772,7 @@ class db_backup /** * Number of rows to select per chunk, to not run into memory limit on huge tables */ - const ROW_CHUNK = 100; + const ROW_CHUNK = 10000; /** * Backup all data in the form of a (compressed) csv file