#!/usr/bin/perl # Aruba site files backup v1.0 (20100623) use strict; use CGI::Pretty ":standard"; use CGI::Carp qw(fatalsToBrowser); # only for debugging $ENV{PATH} = "/bin:/usr/bin:/usr/local/bin"; my $site = url(); # regexps from http://www.willmaster.com/blog/perl/extracting-domain-name-from-url.php $site =~ s!^https?://(?:www\.)?!!i; $site =~ s!/.*!!; $site =~ s/[\?\#\:].*//; ## CONFIG my $basedir = "/web/htdocs/www.$site/"; my $webdir = "home"; ## END CONFIG chdir "$basedir" or die "Cannot chdir: $!"; print header(-type=>"application/x-tar-bzip2", -attachment=>"backup_$site.tar.bz2"); exec "tar", "cfj", "-", "--exclude=*_Backup_*", "home"; die "cannot exec tar: $!";