# Daily cron job for koha.
# - dump all sites, except one called 'demo'
-dirname="/var/spool/koha"
+dirname=""
days="2"
show_help() {
filesystem. It will keep the past so many backups, discarding older ones.
Options:
- --output: the directory that the resulting files will be placed into.
- (default: /var/spool/koha)
- --days: the number of days to keep backups around for
- (default: 2)
+ --output /path The directory that the resulting files will be placed into.
+ (default: /var/spool/koha)
+ --days 2 The number of days to keep backups around for
+ (default: 2)
+ --exclude-indexes Exclude Zebra indexes from the backups (default: false)
Note: backups produced using this tool can be restored using \`koha-restore'.
EOH
}
-CMD_LINE=`getopt -o h --long days:,output:,help -n 'koha-run-backups' -- "$@"`
+exclude_indexes=""
+
+CMD_LINE=`getopt -o h --long days:,output:,help,exclude-indexes -n 'koha-run-backups' -- "$@"`
if [ $? != 0 ] ; then show_help ; exit 1 ; fi
days=$2; shift 2 ;;
--output)
dirname=$2; shift 2 ;;
+ --exclude-indexes)
+ exclude_indexes='--exclude-indexes'; shift ;;
--) shift ; break ;;
*) echo "Unknown error parsing the command line!" ; exit 1 ;;
esac
for name in $(koha-list --enabled | grep -Fxv demo)
do
- koha-dump "$name" > /dev/null
+ koha-dump ${exclude_indexes} "$name" > /dev/null
+ if [ -z "$dirname" ]; then
+ backupdir="$( xmlstarlet sel -t -v 'yazgfs/config/backupdir' /etc/koha/sites/$name/koha-conf.xml )";
+ else
+ backupdir="$dirname/$name";
+ fi
# Remove old dump files.
# FIXME: This could probably be replaced by one line of perl.
- ls "$dirname/$name/" |
+ ls "$backupdir/" |
sed "s:^$name-\([0-9-]*\)\.\(sql\|tar\)\.gz$:\1:" |
sort -u |
tac |
tac |
while read date
do
- tardump="$dirname/$name/$name-$date.tar.gz"
- sqldump="$dirname/$name/$name-$date.sql.gz"
+ tardump="$backupdir/$name-$date.tar.gz"
+ sqldump="$backupdir/$name-$date.sql.gz"
if [ -e "$tardump" ] && [ -e "$sqldump" ]
then
rm "$tardump"