#! /usr/bin/env perl

#
#   Copyright (C) Dr. Heinz-Josef Claes (2001-2005)
#                 hjclaes@web.de
#   
#   This program is free software; you can redistribute it and/or modify
#   it under the terms of the GNU General Public License as published by
#   the Free Software Foundation; either version 2 of the License, or
#   (at your option) any later version.
#   
#   This program is distributed in the hope that it will be useful,
#   but WITHOUT ANY WARRANTY; without even the implied warranty of
#   MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE.  See the
#   GNU General Public License for more details.
#   
#   You should have received a copy of the GNU General Public License
#   along with this program; if not, write to the Free Software
#   Foundation, Inc., 675 Mass Ave, Cambridge, MA 02139, USA.
#


require SDBM_File;
require Tie::Hash;

use Fcntl qw(O_RDWR O_CREAT);
use File::Copy;
use POSIX;

my $VERSION = '$Id: storeBackup.pl 336 2005-08-12 21:03:30Z hjc $ ';
push @VERSION, $VERSION;

use strict;

use DB_File;           # Berkeley DB

sub libPath
{
    my $file = shift;

    my $dir;

    # Falls Datei selbst ein symlink ist, solange folgen, bis aufgelst
    if (-f $file)
    {
	while (-l $file)
	{
	    my $link = readlink($file);

	    if (substr($link, 0, 1) ne "/")
	    {
		$file =~ s/[^\/]+$/$link/;
	    }
	    else
	    {
		$file = $link;
	    }
	}

	($dir, $file) = &splitFileDir($file);
	$file = "/$file";
    }
    else
    {
	print STDERR "<$file> does not exist!\n";
	exit 1;
    }

    $dir .= "/../lib";           # Pfad zu den Bibliotheken
    my $oldDir = `/bin/pwd`;
    chomp $oldDir;
    if (chdir $dir)
    {
	my $absDir = `/bin/pwd`;
	chop $absDir;
	chdir $oldDir;

	return (&splitFileDir("$absDir$file"));
    }
    else
    {
	print STDERR "<$dir> does not exist, exiting\n";
    }
}
sub splitFileDir
{
    my $name = shift;

    return ('.', $name) unless ($name =~/\//);    # nur einfacher Dateiname

    my ($dir, $file) = $name =~ /^(.*)\/(.*)$/s;
    $dir = '/' if ($dir eq '');                   # gilt, falls z.B. /filename
    return ($dir, $file);
}
my ($req, $prog) = &libPath($0);
(@INC) = ($req, @INC);

require 'checkParam.pl';
require 'checkObjPar.pl';
require 'prLog.pl';
require 'splitLine.pl';
require 'fileDir.pl';
require 'dateTools.pl';
require 'forkProc.pl';
require 'humanRead.pl';
require 'readKeyFromFile.pl';
require 'version.pl';
require 'evalTools.pl';
require 'storeBackupLib.pl';

#/usr/include/linux/limits.h:#define ARG_MAX       131072        /* #
#bytes of args + environ for exec() */
#
#
#Aus dem Source fr "xargs" geht hervor:
#
#  orig_arg_max = ARG_MAX - 2048; /* POSIX.2 requires subtracting 2048. */
#  arg_max = orig_arg_max;

$main::execParamLength = 4 * 1024;      # Default Wert, sehr niedrig angesetzt
$main::df = 'df';                       # df mit 1k Blcken
$main::minCompressSize = 512;           # alles was kleiner ist, wird kopiert
$main::minCopyWithFork = 100 * 1024;    # alles was <= ist, wird in perl
                                        # kopiert, was > ist, mit fork/cp
my (%execParamLength) = ('AIX' => 22 * 1024,
			 'Linux' => 124 * 1024);


my $exceptDirsSep = ',';          # default value
my $compress = 'bzip2';           # default value
my $uncompress = 'bzip2 -d';      # default value
my $postfix = '.bz2';             # default value
my $queueCompress = 1000;         # default value
my $noCompress = 4;               # default value
my $queueCopy = 1000;             # default value
my $noCopy = 1;                   # default value
my $chmodMD5File = '0600';        # default value
my $tmpdir = '/tmp';              # default value
my @exceptSuffix = ('.zip', '.bz2', '.gz', '.tgz', '.jpg', '.gif', '.tiff',
		    '.tif', '.mpeg', '.mpg', '.mp3', '.ogg', '.gpg', '.png');
my $logInBackupDirFileName = '.storeBackup.log';
my $checkSumFile = '.md5CheckSums';
$main::checkSumFileVersion = '1.3';
my $keepAll = '30d';
my $keepDuplicate = '7d';

$tmpdir = $ENV{'TMPDIR'} if defined $ENV{'TMPDIR'};
my @eSuffix = &splitLine(join(', ', sort @exceptSuffix), 55, '\s+');
my $i;
for ($i = 0 ; $i < @eSuffix ; $i++)
{
    $eSuffix[$i] = "\t\t    " . $eSuffix[$i];
    $eSuffix[$i] .= "\n" if ($i < @eSuffix - 1);
}


my $Help = <<EOH;
This program copies trees to another location. Every file copied is
potentially compressed (see --exceptSuffix). The compressions after
the first compression will compaire the files with an md5 checksum
with the last stored version. If they are equal, it will only make an
hard link to it. It will also check mtime, ctime and size to recognize
idential files in older backups very fast.
The program can handel normal files, directories and symbolic links.

usage:
	$prog -f configFile [-g | --print]
or
	$prog -s sourceDir -t targetDir [-T tmpdir] [-L lockFile]
		[--unlockBeforeDel] [--exceptDirs dir1,dir2,dir3]
		[--includeDirs dir1,dir2,dir3]
		[--exceptDirsSep sep]
		[--exceptPattern rule] [--includePattern rule]
		[--exceptTypes types]
		[--precommand job] [--postcommand job] [--followLinks depth]
		[-c compress] [-u uncompress] [-p postfix]
		[--noCompress number] [--queueCompress number]
		[--noCopy number] [--queueCopy number] [--copyBWLimit kbps]
		[--withUserGroupStat] [--userGroupStatFile filename]
 		[--exceptSuffix suffixes]
		[--addExceptSuffix suffixes] [--contExceptDirsErr]
		[--compressMD5File yes|no] [--chmodMD5File] [-v]
		[-d level][--progressReport number] [--printDepth]
		[--ignoreReadError] [--doNotDelete] [--resetAtime]
		[--keepAll timePeriod] [--keepWeekday entry]
		[--keepFirstOfYear] [--keepLastOfYear]
		[--keepFirstOfMonth] [--keepLastOfMonth]
		[--firstDayOfWeek day] [--keepFirstOfWeek] [--keepLastOfWeek]
		[--keepDuplicate] [--keepMinNumber] [--keepMaxNumber]
		[-l logFile
		 [--plusLogStdout] [--withTime yes|no] [-m maxFilelen]
		 [[[-n noOfOldFiles] | [--saveLogs yes|no]]
		 [--compressWith compressprog]]
		[--logInBackupDir yes|no [--compressLogInBackupDir yes|no]
		 [--logInBackupDirFileName logFile]]
		[otherBackupDirs ...]

--file		-f  configuration file (instead of parameters)
--generate	-g  generate template of configuration file
--print		    print configuration read from configuration file an stop

--sourceDir	-s  source directory (must exist)
--targetDir	-t  target directory (must exist)
--tmpdir	-T  directory for temporary file, default is <$tmpdir>
--lockFile      -L  lock file, if exists, new instances will finish if
		    an old is allready running
--unlockBeforeDel   remove the lock file before deleting old backups
		    default is to delete the lock file after deleting
--exceptDirs	-e  directories to except from backing up (relative
		    path), wildcards are possible and should be
		    quoted to avoid replacements by the shell,
		     the directories have to be separated with
		    --exceptDirsSep
--includeDirs	-i  directories to include in the backup (relative
		    path), wildcards are possible and have to be
		    quoted, the directories have to be separated by
		    --exceptDirsSep
--exceptDirsSep     Separator for --exceptDirs and --includeDirs,
		    default is <$exceptDirsSep>
--exceptPattern     Files to exclude from backing up. You can define a rule
		    with pattern. See the config or (better) the README file for
		    a detailed description.
--includePattern    Files to include in the backug up - like exceptPattern
--contExceptDirsErr continue if one or more of the exceptional directories
		    do not exist (default is to stop processing)
--exceptTypes	    do not save the specified types of files, allowed: Sbcfpl
		    S - file is a socket
		    b - file is a block special file
		    c - file is a character special file
		    f - file is a plain file
		    p - file is a named pipe
		    l - file is a symbolic link
		    Sbc is not yet implemented, but you can suppress a 
		    warning if you specify them with this option
--precommand	    exec job before starting the backup, checks lockFile (-L)
		    before starting (e.g. can be used for rsync)
		    stops execution if job returs exit status != 0
--postcommand	    exec job after finishing the backup,
		    but before erasing of old backups
		    reports if job returs exit status != 0
--followLinks	    follow symbolic links like directories up to depth
		    default = 0 -> do not follow links
--compress	-c  compress command (with options), default is <$compress>
--uncompress	-u  uncompress command (with options), default is <$uncompress>
--postfix	-p  postfix to add after compression, default is <$postfix>
--noCompress	    maximal number of parallel compress operations,
		    default = $noCompress
--queueCompress	    length of queue to store files before compression,
		    default = $queueCompress
--noCopy	    maximal number of parallel copy operations,
		    default = $noCopy
--queueCopy	    length of queue to store files before copying,
		    default = $queueCopy
--copyBWLimit	    maximum bandwidth, KBytes per second per copying process
		    $0 uses rsync for this option
		    default = 0 -> no limit, use cp for copying
--withUserGroupStat write statistics about used space in log file
--userGroupStatFile write statistics about used space in name file
		    will be overridden each time
--exceptSuffix	    do not compress or copy files with the following
		    suffix (uppercase included):
@eSuffix
--addExceptSuffix   like --exceptSuffix, but do not replace defaults, add
--compressMD5File   default is 'yes', if you do not want this, say 'no'
--chmodMD5File	    permissions of .md5CheckSumFile and corresponding
		    .storeBackupLinks directory, default is $chmodMD5File
--verbose	-v  verbose messages about --exceptPattern and --includePattern
--debug		-d  generate debug messages, levels are 0 (none, default),
		     1 (some), 2 (many) messages
--resetAtime	    reset access time in the source directory - but this will
		    change ctime (time of last modification of file status
		    information)
--doNotDelete	    test only, do not delete any backup
--keepAll	    keep backups which are not older than the specified amount
		    of time. This is like a default value for all days in
		    --keepWeekday. Begins deleting at the end of the script
		    the time range has to be specified in format 'dhms', e.g.
		       10d4h means 10 days and 4 hours
		    default = $keepAll;
--keepWeekday	    keep backups for the specified days for the specified
		    amount of time. Overwrites the default values choosen in
		    --keepAll. 'Mon,Wed:40d Sat:60d10m' means:
			keep backups of Mon and Wed 40days + 5mins
			keep backups of Sat 60days + 10mins
			keep backups of the rest of the days like spcified in
				--keepAll (default $keepAll)
		    if you also use the 'archive flag' it means to not
		    delete the affected directories via --keepMaxNumber:
		       a10d4h means 10 days and 4 hours and 'archive flag'
		    e.g. 'Mon,Wed:a40d Sat:60d10m' means:
			keep backups of Mon and Wed 40days + 5mins + 'archive'
			keep backups of Sat 60days + 10mins
			keep backups of the rest of the days like specified in
				--keepAll (default $keepAll)
--keepFirstOfYear   do not delete the first backup of a year
		    format is timePeriod with possible 'archive flag'
--keepLastOfYear    do not delete the last backup of a year
		    format is timePeriod with possible 'archive flag'
--keepFirstOfMonth  do not delete the first backup of a month
		    format is timePeriod with possible 'archive flag'
--keepLastOfMonth   do not delete the last backup of a month
		    format is timePeriod with possible 'archive flag'
--firstDayOfWeek    default: 'Sun'. This value is used for calculating
		    --keepFirstOfWeek and --keepLastOfWeek
--keepFirstOfWeek   do not delete the first backup of a week
		    format is timePeriod with possible 'archive flag'
--keepLastOfWeek    do not delete the last backup of a week
		    format is timePeriod with possible 'archive flag'
--keepDuplicate     keep multiple backups of one day up to timePeriod
		    format is timePeriod, 'archive flag' is not possible
		    default = $keepDuplicate;
--keepMinNumber	    Keep that miminum of backups. Multiple backups of one
		    day are counted as one backup.
--keepMaxNumber	    Try to keep only that maximum of backups. If you have more
		    backups, the following sequence of deleting will happen:
		    - delete all duplicates of a day, beginning with the old
		      once, except the oldest of every day
		    - if this is not enough, delete the rest of the backups
		      beginning with the oldest, but *never* a backup with
		      the 'archive flag' or the last backup
--progressReport     print progress report after each 'number' files
--printDepth	     print depth of actual readed directory during backup
--ignoreReadError    ignore read errors in source directory; not readable
		     directories does not cause $prog
		     to stop processing
--logFile	-l   log file (default is STDOUT)
--plusLogStdout	     if you specify a log file with --logFile you can
		     additionally print the output to STDOUT with this flag
--withTime	-w   output in logfile with time: 'yes' or 'no'
		     default = 'yes'
--maxFilelen	-m   maximal length of file, default = 1e6
--noOfOldFiles	-n   number of old log files, default = 5
--saveLogs	     save log files with date and time instead of deleting the
		     old (with [-noOldFiles]): 'yes' or 'no', default = 'no'
--compressWith	     compress saved log files (e.g. with 'gzip -9')
		     default is 'bzip2'
--logInBackupDir     write log file (also) in the backup directory:
		     'yes' or 'no', default is 'no'
		     Be aware that this log does not contain all error
		     messages of the one specified with --logFile!
--compressLogInBackupDir
		     compress the log file in the backup directory:
		     'yes' or 'no', default is 'yes'
--logInBackupDirFileName
		     filename to use for writing the above log file,
		     default is '$logInBackupDirFileName'

otherBackupDirs	     List of other backup directories to consider for
		     hard linking. Format (examples):
		     /backupDir/2002.08.29_08.25.28 -> consider this backupDir
		     or
		     0:/backupDir    -> last (youngest) backup in /backupDir
		     1:/backupDir    -> first before last backup in /backupDir
		     n:/backupDir    -> n'th before last backup in /backupDir
		     3-5:/backupDir  -> 3rd, 4th and 5th in /backupDir
		     all:/backupDir  -> all in /backupDir

Copyright (c) 2001-2005 by Heinz-Josef Claes
Published under the GNU General Public License
EOH
    ;
# '

my $exceptSuffix = join(',', @exceptSuffix);
my $templateConfigFile = <<EOC;
# configuration file for storeBackup.pl

# the file format is very simple:
# if you specify a path, you have to quote the following characters:
# backslash ->  '\\\\'
# lf        ->  '\\n'
# tab       ->  '\\t'
# space     ->  '\\ '
# this replacement is done for the following options:
# sourceDir, targetDir, logFile, lockFile, userGroupStatFile,
# logInBackupDirFileName, compressWith, exceptDirs
#
# If the options to one key a spread over multiple lines, continuation
# lines have to begin with a white space (blank or tab)! An option
# (like 'sourceDirs') has to begin in the first column.
#
# you can use single quotes to mask special characters

# source directory (*** must be specified ***)
sourceDir=

# target directory (*** must be specified ***)
targetDir=

# directory for temporary file, default is /tmp
tmpDir=

# List of other backup directories to consider for
# hard linking. Format (examples):
# /backupDir/2002.08.29_08.25.28 -> consider this backupDir
# or
# 0:/backupDir    -> last (youngest) backup in /backupDir
# 1:/backupDir    -> first before last backup in /backupDir
# n:/backupDir    -> n'th before last backup in /backupDir
# 3-5:/backupDir  -> 3rd, 4th and 5th in /backupDir
# all:/backupDir  -> all in /backupDir
# This option is respective useful, if you want to hard link
# to backup trees from different backups. If you make these
# backups in order (never parallel), you should use 0:backupDir to
# the other backup dirs. If it is possible, that they can run in
# parallel, you should use 1:backupDir to avoid useless copies in
# the backup. This option should be used for *all* backups, which
# should share hard links (vice versa). Naturally, all the backups
# with joined hard links have to be in the same file system!
otherBackupDirs=

# lock file, if exist, new instances will finish if
# an old is allready running
lockFile=

# remove the lock files before deleting old backups
# default ('no') is to delete the lock file after deleting
# possible values are 'yes' and 'no'
unlockBeforeDel=

# Directories to except from backing up (relative path).
# You can use shell type wildcards.
# These directories have to be separated by space or newline.
exceptDirs=

# Directories to include in the backup (relative path inside of the backup).
# You can use shell type wildcards.
# These directories have to be separated by space or newline.
includeDirs=

# This option gives you the possibility to exclude a combination of perl
# regular expressions. These patterns have to describe a file name with its
# relative path in the backup. You have to mask a '/' with '\\/'
# inside the pattern. If you are not familiar with perl pattern
# matching, you should tye `man perlretut` and read some documentation.
# The combination of patterns can be made with 'and', 'or', 'not,
# '(' or ')'. If you want to use one of the keywords as a pattern,
# it has to be written differently, eg. write 'and' as 'a[n]d'.
# !!! '(' and ')' _have_to_be_separated_ by white space!!!
# You can say:
# exceptPattern = ( \\/opt\\/ or \\/optional\\/ ) and not \\/(.+?)\\/myproc\\/
# This means: Exclude the directories '/opt/' and '/optional/'. But do
# not exclude all directories beginning with /*/myproc/.
# If here is defined a rule, and it matches, then the file will not
# be saved.
exceptPattern=

# For explanations, see 'exceptPattern'.
# All include / except paramters will be checked:
#
# General rule: A file which is
#
# not in 'exceptDirs' and
# in 'includeDirs' and
# does not match 'exceptPattern' (with full relative path) and
# matches 'includePattern' (with full relative path)
#
# will be saved! In all cases you have to define _relative_ paths from your
# sourceDir! if you additionally use 'followLinks', interpret the
# specified symbolic links as directories.
includePattern=

# continue if one or more of the exceptional directories
# do not exist (no is stopping processing)
contExceptDirsErr=no

# do not save the specified types of files, allowed: Sbcfpl
# S - file is a socket
# b - file is a block special file
# c - file is a character special file
# f - file is a plain file
# p - file is a named pipe
# l - file is a symbolic link
# Sbc is not yet implemented, but you can suppress a 
# warning if you specify them with this option
exceptTypes=

# exec job before starting the backup, checks lockFile (-L)
# before starting (e.g. can be used for rsync)
# stops execution if job returs exit status != 0
precommand=

# exec job after finishing the backup,
# but before erasing of old backups
# reports if job returs exit status != 0
postcommand=

# follow symbolic links like directories up to depth
# 0 -> do not follow links
followLinks=0

# compress command (with options), default is <$compress>
compress=$compress

# uncompress command (with options), default is <$uncompress>
uncompress=$uncompress

# postfix to add after compression, default is <$postfix>
postfix=$postfix

# maximal number of parallel compress operations,
# default = $noCompress
noCompress=$noCompress

# length of queue to store files before compression,
# default = $queueCompress
queueCompress=$queueCompress

# maximal number of parallel copy operations,
# default = $noCopy
noCopy=$noCopy

# length of queue to store files before copying,
# default = $queueCopy
queueCopy=$queueCopy

# maximum bandwidth, KBytes per second per copying process
# storeBackup uses rsync for this option, make shure it is installed
# default = 0 -> no limit, use cp for copying
copyBWLimit=0

# write statistics about used space in log file
# default is 'no'
withUserGroupStat=no

# write statistics about used space in name file
#		    will be overridden each time
# if no file name is given, nothing will be written
# format is:
# identifier uid userName value
# identifier gid groupName value
userGroupStatFile=

# do not compress or copy files with the following
# suffix (uppercase included):
# (if you set this to '.*', no files will be compressed)
exceptSuffix=@exceptSuffix


# like --exceptSuffix, but do not replace defaults, add
addExceptSuffix=

# default is 'yes', if you do not want this, say 'no'
compressMD5File=yes

# permissions of .md5checkSumFile, default is $chmodMD5File
chmodMD5File=$chmodMD5File

# verbose messages, about exceptPattern and includePattern
# default is 'no'
verbose=no

# generate debug messages, levels are 0 (none, default),
# 1 (some), 2 (many) messages
debug=0

# reset access time in the source directory - but this will
# change ctime (time of last modification of file status
# information
# default is 'no', if you want this, say 'yes'
resetAtime=

# do not delete any old backup (e.g. specified via --keepAll or
# --keepWeekday) but print a message. This is for testing.
# Values are 'on' and 'off'. Default is 'off' which means to delete.
doNotDelete=

# keep backups which are not older than the specified amount
# of time. This is like a default value for all days in
# --keepWeekday. Begins deleting at the end of the script
# the time range has to be specified in format 'dhms', e.g.
# 10d4h means 10 days and 4 hours
# default = $keepAll;
# An archive flag is not possible with this parameter (see below).
keepAll=

# keep backups for the specified days for the specified
# amount of time. Overwrites the default values choosen in
# --keepAll. 'Mon,Wed:40d Sat:60d10m' means:
# keep backups of Mon and Wed 40days + 5mins
# keep backups of Sat 60days + 10mins
# keep backups of the rest of the days like spcified in
# --keepAll (default $keepAll)
# you can also set the 'archive flag'.
# 'Mon,Wed:a40d Sat:60d10m' means:
# keep backups of Mon and Wed 40days + 5mins + 'archive'
# keep backups of Sat 60days + 10mins
# keep backups of the rest of the days like specified in
# --keepAll (default $keepAll)
# If you also use the 'archive flag' it means to not
# delete the affected directories via --keepMaxNumber:
# a10d4h means 10 days and 4 hours and 'archive flag'
keepWeekday=

# do not delete the first backup of a year
# format is timePeriod with possible 'archive flag'
keepFirstOfYear=

# do not delete the last backup of a year
# format is timePeriod with possible 'archive flag'
keepLastOfYear=

# do not delete the first backup of a month
# format is timePeriod with possible 'archive flag'
keepFirstOfMonth=

# do not delete the last backup of a month
# format is timePeriod with possible 'archive flag'
keepLastOfMonth=

# default: 'Sun'. This value is used for calculating
# --keepFirstOfWeek and --keepLastOfWeek
firstDayOfWeek=

# do not delete the first backup of a week
# format is timePeriod with possible 'archive flag'
keepFirstOfWeek=

# do not delete the last backup of a week
# format is timePeriod with possible 'archive flag'
keepLastOfWeek=

# keep multiple backups of one day up to timePeriod
# format is timePeriod, 'archive flag' is not possible
# default is $keepDuplicate
keepDuplicate=

# Keep that miminum of backups. Multiple backups of one
# day are counted as one backup.
keepMinNumber=

# Try to keep only that maximum of backups. If you have more
# backups, the following sequence of deleting will happen:
# - delete all duplicates of a day, beginning with the old
#   once, except the oldest of every day
# - if this is not enough, delete the rest of the backups
#   beginning with the oldest, but *never* a backup with
#   the 'archive flag' or the last backup
keepMaxNumber=

# print progress report after each 'number' files
# Default is 0, which means no reports.
progressReport=

# print depth of actual readed directory during backup
# default is 'no', values are 'yes' and 'no'
printDepth=

# ignore read errors in source directory; not readable
# directories does not cause storeBackup.pl to stop processing
# Values are 'on' and 'off'. Default is 'off' which means not
# to ignore them
ignoreReadError=

# name of the log file (default is STDOUT)
logFile=

# if you specify a log file with --logFile you can
# additionally print the output to STDOUT with this flag
# Values are 'on' and 'off'. Default is 'off'.
plusLogStdout=

# output in logfile with time: 'yes' or 'no'
# default = 'yes'
withTime=

# maximal length of file, default = 1e6
maxFilelen=

# number of old log files, default = 5
noOfOldFiles=

# save log files with date and time instead of deleting the
# old (with [-noOldFiles]): 'yes' or 'no', default = 'no'
saveLogs=

# compress saved log files (e.g. with 'gzip -9')
# default is 'bzip2'
compressWith=

# write log file (also) in the backup directory:
# 'yes' or 'no', default is 'no'
# Be aware that this log does not contain all error
# messages of the one specified with --logFile!
# Some errors are possible before the backup
# directory is created.
logInBackupDir=

# compress the log file in the backup directory:
# 'yes' or 'no', default is 'yes'
compressLogInBackupDir=

# filename to use for writing the above log file,
# default is '$logInBackupDirFileName'
logInBackupDirFileName=

EOC
    ;


&printVersions(\@ARGV, '-V');

my $onlyIf = '[-s] and [-t] and not [-f]';
my (@onlyIf) = ('-only_if' => $onlyIf);
my $CheckPar =
    CheckParam->new('-allowLists' => 'yes',
		    '-list' => [Option->new('-option' => '-f',
                                            '-alias' => '--file',
                                            '-param' => 'yes'),
                                Option->new('-option' => '-g',
                                            '-alias' => '--generate',
                                            '-only_if' => '[-f]'),
                                Option->new('-option' => '--print',
                                            '-only_if' => '[-f]'),
                                Option->new('-option' => '-s',
					    '-alias' => '--sourceDir',
					    '-param' => 'yes',
					    '-only_if' =>
                                            '[-t] and not ([-f])'),
				Option->new('-option' => '-t',
					    '-alias' => '--targetDir',
					    '-param' => 'yes',
					    '-only_if' =>
                                            '[-s] and not ([-f])'),
				Option->new('-option' => '-T',
					    '-alias' => '--tmpdir',
					    '-default' => $tmpdir,
                                            @onlyIf),
				Option->new('-option' => '-L',
					    '-alias' => '--lockFile',
					    '-param' => 'yes',
                                            @onlyIf),
				Option->new('-option' => '--unlockBeforeDel',
					    '-param' => 'yes',
                                            '-only_if' => '-L and ' . $onlyIf
                                            ),
				Option->new('-option' => '-e',
					    '-alias' => '--exceptDirs',
					    '-param' => 'yes',
                                            @onlyIf),
				Option->new('-option' => '-i',
					    '-alias' => '--includeDirs',
					    '-param' => 'yes',
                                            @onlyIf),
				Option->new('-option' => '--exceptDirsSep',
					    '-default' => $exceptDirsSep,
                                            @onlyIf),
				Option->new('-option' => '--exceptPattern',
					    '-param' => 'yes',
                                            @onlyIf),
				Option->new('-option' => '--includePattern',
					    '-param' => 'yes',
                                            @onlyIf),
			        Option->new('-option' => '--contExceptDirsErr',
                                            @onlyIf),
			        Option->new('-option' => '--exceptTypes',
                                            '-param' => 'yes',
                                            '-pattern' => '\A[Sbcfpl]+\Z',
                                            @onlyIf),
				Option->new('-option' => '--precommand',
					    '-param' => 'yes',
                                            @onlyIf),
				Option->new('-option' => '--postcommand',
					    '-param' => 'yes',
                                            @onlyIf),
				Option->new('-option' => '--followLinks',
					    '-default' => 0,
                                            @onlyIf),
				Option->new('-option' => '-c',
					    '-alias' => '--compress',
					    '-default' => $compress,
                                            @onlyIf),
				Option->new('-option' => '-u',
					    '-alias' => '--uncompress',
					    '-default' => $uncompress,
                                            @onlyIf),
				Option->new('-option' => '-p',
					    '-alias' => '--postfix',
					    '-default' => $postfix,
                                            @onlyIf),
				Option->new('-option' => '--noCompress',
					    '-default' => $noCompress,
					    '-pattern' => '\A\d+\Z',
                                            @onlyIf),
				Option->new('-option' => '--queueCompress',
					    '-default' => $queueCompress,
					    '-pattern' => '\A\d+\Z',
                                            @onlyIf),
				Option->new('-option' => '--noCopy',
					    '-default' => $noCopy,
					    '-pattern' => '\A\d+\Z',
                                            @onlyIf),
				Option->new('-option' => '--queueCopy',
					    '-default' => $queueCopy,
					    '-pattern' => '\A\d+\Z',
                                            @onlyIf),
				Option->new('-option' => '--copyBWLimit',
					    '-default' => 0,
					    '-pattern' => '\A\d+\Z',
                                            @onlyIf),
				Option->new('-option' => '--withUserGroupStat',
                                            @onlyIf),
				Option->new('-option' => '--userGroupStatFile',
					    '-param' => 'yes',
                                            @onlyIf),
				Option->new('-option' => '--exceptSuffix',
					    '-param' => 'yes',
					    '-only_if' =>
					    'not [--addExceptSuffix] and ' .
                                            $onlyIf),
				Option->new('-option' => '--addExceptSuffix',
					    '-param' => 'yes',
					    '-only_if' =>
					    'not [--exceptSuffix] and ' .
                                            $onlyIf),
				Option->new('-option' => '--compressMD5File',
					    '-default' => 'yes',
					    '-pattern' => '\Ayes\Z|\Ano\Z',
                                            @onlyIf),
                                Option->new('-option' => '--chmodMD5File',
                                            '-default' => $chmodMD5File,
                                            '-pattern' => '\A0[0-7]{3,4}\Z',
                                            @onlyIf),
				Option->new('-option' => '-v',
					    '-alias' => '--verbose',
                                            @onlyIf),
				Option->new('-option' => '-d',
					    '-alias' => '--debug',
					    '-default' => 0,
					    '-pattern' => '\A[012]\Z',
                                            @onlyIf),
				Option->new('-option' => '--resetAtime'),
				Option->new('-option' => '--doNotDelete'),
				Option->new('-option' => '--keepAll',
                                            '-default' => $keepAll),
				Option->new('-option' => '--keepWeekday',
					    '-param' => 'yes'),
				Option->new('-option' => '--keepFirstOfYear',
					    '-param' => 'yes'),
				Option->new('-option' => '--keepLastOfYear',
					    '-param' => 'yes'),
				Option->new('-option' => '--keepFirstOfMonth',
					    '-param' => 'yes'),
				Option->new('-option' => '--keepLastOfMonth',
					    '-param' => 'yes'),
                                Option->new('-option' => '--firstDayOfWeek',
                                            '-default' => 'Sun'),
				Option->new('-option' => '--keepFirstOfWeek',
					    '-param' => 'yes'),
				Option->new('-option' => '--keepLastOfWeek',
					    '-param' => 'yes'),
                                Option->new('-option' => '--keepDuplicate',
					    '-default' => $keepDuplicate),
                                Option->new('-option' => '--keepMinNumber',
					    '-default' => 0,
                                            '-pattern' => '\A\d+\Z'),
                                Option->new('-option' => '--keepMaxNumber',
					    '-default' => 0,
                                            '-pattern' => '\A\d+\Z'),
                                Option->new('-option' => '--ignoreReadError',
                                            @onlyIf),
				Option->new('-option' => '-l',
					    '-alias' => '--logFile',
					    '-default' => '',
					    '-only_if' =>
                                            '[-s] and [-t] and not ([-f])'),
				Option->new('-option' => '--plusLogStdout'),
				Option->new('-option' => '-w',
					    '-alias' => '--withTime',
					    '-default' => 'yes',
                                            '-only_if' => "$onlyIf and [-l]",
                                            '-pattern' => '\Ayes\Z|\Ano\Z'),
				Option->new('-option' => '-m',
					    '-alias' => '--maxFilelen',
					    '-default' => 1e6,
					    '-pattern' => '\A[e\d]+\Z',
                                            '-only_if' =>"$onlyIf and [-l]"),
				Option->new('-option' => '-n',
					    '-alias' => '--noOfOldFiles',
					    '-default' => '5',
					    '-pattern' => '\A\d+\Z',
                                            '-only_if' =>"$onlyIf and [-l]"),
                                Option->new('-option' => '--saveLogs',
                                            '-default' => 'no',
                                            '-only_if' => "$onlyIf and [-l]",
                                            '-pattern' => '\Ayes\Z|\Ano\Z'),
                                Option->new('-option' => '--compressWith',
                                            '-default' => 'bzip2',
                                            '-only_if' =>"$onlyIf and [-l]"),
				Option->new('-option' => '--logInBackupDir',
					    '-default' => 'no',
					    '-pattern' => '\Ayes\Z|\Ano\Z',
					    @onlyIf),
				Option->new('-option' =>
					    '--compressLogInBackupDir',
					    '-default' => 'yes',
					    '-pattern' => '\Ayes\Z|\Ano\Z',
					    '-only_if' => "$onlyIf and " .
                                            "[--logInBackupDir]"),
                                Option->new('-option' =>
                                            '--logInBackupDirFileName',
                                            '-default' =>
                                            $logInBackupDirFileName,
                                            '-only_if' => "$onlyIf and " .
                                            "[--logInBackupDir]"),
				Option->new('-option' => '--progressReport',
					    '-default' => 0,
					    '-pattern' => '\A\d+\Z',
                                            @onlyIf),
				Option->new('-option' => '--printDepth',
                                            @onlyIf)
				]
		    );

$CheckPar->check('-argv' => \@ARGV,
                 '-help' => $Help
                 );

# Auswertung der Parameter
my $configFile = $CheckPar->getOptWithPar('-f');
my $generateConfigFile = $CheckPar->getOptWithoutPar('-g');
my $print = $CheckPar->getOptWithoutPar('--print');

my $sourceDir = $CheckPar->getOptWithPar('-s');
my $targetDir = $CheckPar->getOptWithPar('-t');
$tmpdir = $CheckPar->getOptWithPar('-T');
my $lockFile = $CheckPar->getOptWithPar('-L');
my $unlockBeforeDel = $CheckPar->getOptWithPar('--unlockBeforeDel');
my $exceptDirs = $CheckPar->getOptWithPar('-e');
my $includeDirs = $CheckPar->getOptWithPar('-i');
$exceptDirsSep = $CheckPar->getOptWithPar('--exceptDirsSep');
my $exceptPattern = $CheckPar->getOptWithPar('--exceptPattern');
my $includePattern = $CheckPar->getOptWithPar('--includePattern');
my $contExceptDirsErr = $CheckPar->getOptWithoutPar('--contExceptDirsErr');
my $exceptTypes = $CheckPar->getOptWithPar('--exceptTypes');
my $precommand = $CheckPar->getOptWithPar('--precommand');
my $postcommand = $CheckPar->getOptWithPar('--postcommand');
my $followLinks = $CheckPar->getOptWithPar('--followLinks');
$followLinks = 0 unless $followLinks;
$compress = $CheckPar->getOptWithPar('-c');
$uncompress = $CheckPar->getOptWithPar('-u');
$postfix = $CheckPar->getOptWithPar('-p');
$noCompress = $CheckPar->getOptWithPar('--noCompress');
$queueCompress = $CheckPar->getOptWithPar('--queueCompress');
$noCopy = $CheckPar->getOptWithPar('--noCopy');
$queueCopy = $CheckPar->getOptWithPar('--queueCopy');
my $copyBWLimit = $CheckPar->getOptWithPar('--copyBWLimit');
my $withUserGroupStat = $CheckPar->getOptWithoutPar('--withUserGroupStat');
my $userGroupStatFile = $CheckPar->getOptWithPar('--userGroupStatFile');
my $exceptSuffix = $CheckPar->getOptWithPar('--exceptSuffix');
my $addExceptSuffix = $CheckPar->getOptWithPar('--addExceptSuffix');
my $compressMD5File = $CheckPar->getOptWithPar('--compressMD5File');
$chmodMD5File = $CheckPar->getOptWithPar('--chmodMD5File');
my $verbose = $CheckPar->getOptWithoutPar('-v');
my $debug = $CheckPar->getOptWithPar('-d');
my $resetAtime = $CheckPar->getOptWithoutPar('--resetAtime');
my $doNotDelete = $CheckPar->getOptWithoutPar('--doNotDelete');
$keepAll = $CheckPar->getOptWithPar('--keepAll');
my $keepWeekday = $CheckPar->getOptWithPar('--keepWeekday');
my $keepFirstOfYear = $CheckPar->getOptWithPar('--keepFirstOfYear');
my $keepLastOfYear = $CheckPar->getOptWithPar('--keepLastOfYear');
my $keepFirstOfMonth = $CheckPar->getOptWithPar('--keepFirstOfMonth');
my $keepLastOfMonth = $CheckPar->getOptWithPar('--keepLastOfMonth');
my $firstDayOfWeek = $CheckPar->getOptWithPar('--firstDayOfWeek');
my $keepFirstOfWeek = $CheckPar->getOptWithPar('--keepFirstOfWeek');
my $keepLastOfWeek = $CheckPar->getOptWithPar('--keepLastOfWeek');
my $keepDuplicate = $CheckPar->getOptWithPar('--keepDuplicate');
my $keepMinNumber = $CheckPar->getOptWithPar('--keepMinNumber');
my $keepMaxNumber = $CheckPar->getOptWithPar('--keepMaxNumber');
my $ignoreReadError = $CheckPar->getOptWithoutPar('--ignoreReadError');
$ignoreReadError = $ignoreReadError ? 'yes' : 'no';
my $logFile = $CheckPar->getOptWithPar('-l');
my $plusLogStdout = $CheckPar->getOptWithoutPar('--plusLogStdout');
my $withTime = $CheckPar->getOptWithPar('-w');
my $maxFilelen = $CheckPar->getOptWithPar('-m');
my $noOfOldFiles = $CheckPar->getOptWithPar('-n');
my $saveLogs = $CheckPar->getOptWithPar('--saveLogs');
my $compressWith = $CheckPar->getOptWithPar('--compressWith');
my $logInBackupDir = $CheckPar->getOptWithPar('--logInBackupDir');
my $compressLogInBackupDir =
 $CheckPar->getOptWithPar('--compressLogInBackupDir');
my $logInBackupDirFileName =
 $CheckPar->getOptWithPar('--logInBackupDirFileName');
my $progressReport = $CheckPar->getOptWithPar('--progressReport');
my $printDepth = $CheckPar->getOptWithoutPar('--printDepth');
$printDepth = $printDepth ? 'yes' : 'no';
my (@otherBackupDirs) = $CheckPar->getListPar();


my (@exceptDirs, @includeDirs, @exceptPattern, @includePattern,
    @precommand, @postcommand, $prLog);

if ($sourceDir)     # Paramter in der Kommandozeile gewhlt
{
    $chmodMD5File = oct $chmodMD5File;

    (@exceptDirs) = split(/$exceptDirsSep+/, $exceptDirs);
    (@includeDirs) = split(/$exceptDirsSep+/, $includeDirs);

    if ($exceptSuffix)
    {
	@exceptSuffix = split(/,/, $exceptSuffix);
    }
    else
    {
	$exceptSuffix = join(',', @exceptSuffix);
    }
    if ($addExceptSuffix)
    {
	$exceptSuffix = "$addExceptSuffix,$exceptSuffix";
	@exceptSuffix = split(/,/, $exceptSuffix);
    }

    my $err;
    ($err, @exceptPattern) =
	&readKeyFromFile::_splitQuotedLine($exceptPattern)
	if $exceptPattern;
    $prLog->print('-kind' => 'E',
		  '-str' =>
		  ['unbalanced quotes for param <exceptPattern>:',
		   '<$exceptPattern>'],
		  '-exit' => 1)
	if $err;

    ($err, @includePattern) =
	&readKeyFromFile::_splitQuotedLine($includePattern)
	if $includePattern;
    $prLog->print('-kind' => 'E',
		  '-str' =>
		  ['unbalanced quotes for param <includePattern>:',
		   '<$includePattern>'],
		  '-exit' => 1)
	if $err;

    ($err, @precommand) =
	&readKeyFromFile::_splitQuotedLine($precommand)
	if $precommand;
    $prLog->print('-kind' => 'E',
		  '-str' =>
		  ['unbalanced quotes for param <precommand>:',
		   '<$precommand>'],
		  '-exit' => 1)
	if $err;

    ($err, @postcommand) =
	&readKeyFromFile::_splitQuotedLine($postcommand)
	if $postcommand;
    $prLog->print('-kind' => 'E',
		  '-str' =>
		  ['unbalanced quotes for param <postcommand>:',
		   '<$postcommand>'],
		  '-exit' => 1)
	if $err;
}
elsif ($configFile)   # Konfigurationsdatei gewhlt
{
    if ($generateConfigFile)   # Konfigurationsdatei schreiben
    {
	if (-e $configFile)
	{
	    print "<$configFile> already exists, overwrite (y/n) ";
	    my $answer = <STDIN>;
	    if ($answer ne "y\n")
	    {
		print "finishing\n";
		exit 0;
	    }
	}
	open(FILE, "> $configFile") or
	    die "cannot open <$configFile>\n";
	print FILE $templateConfigFile;
	close(FILE);
	exit 0;
    }
    die "cannot open <$configFile>" unless -f $configFile;

    my $prLog = printLog->new();
    my $rcf = readConfigFile->new('-configFile' => $configFile,
				  '-print' => $print,
				  '-prLog' => $prLog,
				  '-tmpdir' => $tmpdir,
				  '-compress' => $compress,
				  '-uncompress' => $uncompress,
				  '-postfix' => $postfix,
				  '-noCompress' => $noCompress,
				  '-queueCompress' => $queueCompress,
				  '-noCopy' => $noCopy,
				  '-queueCopy' => $queueCopy,
				  '-exceptSuffix' => \@exceptSuffix,
				  '-chmodMD5File' => $chmodMD5File,
				  '-keepAll' => $keepAll,
				  '-keepDuplicate' => $keepDuplicate,
				  '-verbose' => $verbose,
				  '-logInBackupDirFileName' =>
				  $logInBackupDirFileName);
    $rcf->print() if $print;

    $sourceDir = $rcf->get('sourceDir');
    die "no source directory specified!\n\n$Help" unless $sourceDir;
    $targetDir = $rcf->get('targetDir');
    die "no target directory specified!\n\n$Help" unless $targetDir;
    $tmpdir = $rcf->get('tmpDir');
    die "no temporary directory specified!\n\n$Help" unless $tmpdir;
    (@otherBackupDirs) = @{$rcf->get('otherBackupDirs')};
    $lockFile = $rcf->get('lockFile');
    $unlockBeforeDel = $rcf->get('unlockBeforeDel');
    $unlockBeforeDel = undef unless $unlockBeforeDel eq 'yes';
    (@exceptDirs) = @{$rcf->get('exceptDirs')};
    (@includeDirs) = @{$rcf->get('includeDirs')};
    (@exceptPattern) = @{$rcf->get('exceptPattern')};
    $exceptPattern = join(' ', @exceptPattern);
    (@includePattern) = @{$rcf->get('includePattern')};
    $includePattern = join(' ', @includePattern);
    $contExceptDirsErr = $rcf->get('contExceptDirsErr');
    $contExceptDirsErr = ($contExceptDirsErr eq 'yes') ? 1 : undef;
    $exceptTypes = $rcf->get('exceptTypes');
    (@precommand) = @{$rcf->get('precommand')};
    $precommand = join(' ', @precommand);
    (@postcommand) = @{$rcf->get('postcommand')};
    $postcommand = join(' ', @postcommand);
    $followLinks = $rcf->get('followLinks');
    $compress = join(' ', @{$rcf->get('compress')});
    $uncompress = join(' ', @{$rcf->get('uncompress')});
    $postfix = $rcf->get('postfix');
    $noCompress = $rcf->get('noCompress');
    $queueCompress = $rcf->get('queueCompress');
    $noCopy = $rcf->get('noCopy');
    $queueCopy = $rcf->get('queueCopy');
    $copyBWLimit = $rcf->get('copyBWLimit');
    $withUserGroupStat = $rcf->get('withUserGroupStat');
    $withUserGroupStat = undef if $withUserGroupStat ne 'yes';
    $userGroupStatFile = $rcf->get('userGroupStatFile');
    my (@addExceptSuffix) = @{$rcf->get('addExceptSuffix')};
    @exceptSuffix = (@addExceptSuffix, @{$rcf->get('exceptSuffix')});
    $compressMD5File = $rcf->get('compressMD5File');
    $chmodMD5File = $rcf->get('chmodMD5File');
    $verbose = $rcf->get('verbose');
    $verbose = ($verbose eq 'yes') ? 1 : undef;
    $debug = $rcf->get('debug');
    $resetAtime = ($resetAtime eq 'on') ? 1 : undef;
    $doNotDelete = ($doNotDelete eq 'on') ? 1 : undef;
    $keepAll = $rcf->get('keepAll');
    $keepWeekday = join(' ', @{$rcf->get('keepWeekday')});
    $keepFirstOfYear = $rcf->get('keepFirstOfYear');
    $keepLastOfYear = $rcf->get('keepLastOfYear');
    $keepFirstOfMonth = $rcf->get('keepFirstOfMonth');
    $keepLastOfMonth = $rcf->get('keepLastOfMonth');
    $firstDayOfWeek = $rcf->get('firstDayOfWeek');
    $keepFirstOfWeek = $rcf->get('keepFirstOfWeek');
    $keepLastOfWeek = $rcf->get('keepLastOfWeek');
    $keepDuplicate = $rcf->get('keepDuplicate');
    $keepMinNumber = $rcf->get('keepMinNumber');
    $keepMaxNumber = $rcf->get('keepMaxNumber');
    $ignoreReadError = $rcf->get('ignoreReadError');
    $ignoreReadError = $ignoreReadError ? 'yes' : 'no';
    $logFile = $rcf->get('logFile');
    $plusLogStdout = $rcf->get('plusLogStdout') eq 'on' ? 1 : 0;
    $withTime = $rcf->get('withTime');
    $maxFilelen = $rcf->get('maxFilelen');
    $noOfOldFiles = $rcf->get('noOfOldFiles');
    $saveLogs = $rcf->get('saveLogs');
    $compressWith = join(' ', @{$rcf->get('compressWith')});
    $logInBackupDir = $rcf->get('logInBackupDir');
    $compressLogInBackupDir = $rcf->get('compressLogInBackupDir');
    $logInBackupDirFileName = $rcf->get('logInBackupDirFileName');
    $progressReport = $rcf->get('progressReport');
    $printDepth = $rcf->get('printDepth');

    # Delimiter berechnen
    $exceptDirsSep = join('', (@exceptDirs, @includeDirs));
    my $sep;
    foreach $sep (',', ':', ';', '+', '!', '^', '~'. "\001", "\002", "\003")
    {
	my $a = index($exceptDirsSep, $sep);
	if (index($exceptDirsSep, $sep) < 0)   # kommt nicht vor
	{
	    $exceptDirsSep = $sep;
	    last;
	}
    }

}
else    # keine von beiden Mglichkeiten gewhlt
{
    print "You have to chose option (-f) or (-s and -t)!\n$Help";
    exit 1;
}

my (@par);
if ($logFile eq '')
{
    push @par, ('-filedescriptor', *STDOUT);
}
else
{
    push @par, ('-file' => $logFile);
}
my ($prLogKind) = ['A:BEGIN',
		   'Z:END',
		   'I:INFO',
		   'W:WARNING',
		   'E:ERROR',
		   'P:PROGRESS',
		   'S:STATISTIC',
		   'D:DEBUG'];
my $prLog1 = printLog->new('-kind' => $prLogKind,
			   @par,
			   '-withTime' => $withTime,
			   '-maxFilelen' => $maxFilelen,
			   '-noOfOldFiles' => $noOfOldFiles,
			   '-saveLogs' => $saveLogs,
			   '-compressWith' => $compressWith);

$prLog = printLogMultiple->new('-prLogs' => [$prLog1]);

$prLog->print('-kind' => 'A',
	      '-str' => ["backing up directory <$sourceDir> to <$targetDir>"]);
if ($plusLogStdout)
{
    my $p = printLog->new('-kind' => $prLogKind,
			  '-filedescriptor', *STDOUT);
    $prLog->add('-prLogs' => [$p]);
}

$prLog->print('-kind' => 'E',
	      '-str' => ["cannot write to target directory <$targetDir>"],
	      '-exit' => 1)
    unless (-w $targetDir);

# OS-Typ feststellen, um ARG_MAX zu setzen
# Default wird vorsichtshalber auf 4 KB gesetzt!
{
    my $uname = forkProc->new('-exec' => 'uname',
			      '-outRandom' => "$tmpdir/uname-",
			      '-prLog' => $prLog);
    $uname->wait();
    my $out = $uname->getSTDOUT();
    my $os = '';
    if (exists $execParamLength{$$out[0]})
    {
	$main::execParamLength = $execParamLength{$$out[0]};
	$os = ' (' . $$out[0] . ')';
    }
    $prLog->print('-kind' => 'I',
		  '-str' => ['setting ARG_MAX to ' . $main::execParamLength .
			     $os]);
    $out = $uname->getSTDERR();
    $prLog->print('-kind' => 'E',
		  '-str' => ["STDERR of <uname>:", @$out])
	if (@$out > 0);
}


$prLog->print('-kind' => 'E',
	      '-str' => ["source directory <$sourceDir> does not exist"],
	      '-exit' => 1)
    unless (-d $sourceDir);
$prLog->print('-kind' => 'E',
	      '-str' => ["target directory <$targetDir> does not exist"],
	      '-exit' => 1)
    unless (-d $targetDir);

#
# sourceDir und targetDir normalisieren
#
$targetDir = &::absolutePath($targetDir);
$sourceDir = &::absolutePath($sourceDir);

#
# initialise include and exclude pattern
#
my $exclPatt = inclExclPattern->new('-pattern' => $exceptPattern,
				    '-pattLine' => \@exceptPattern,
				    '-keyName' => 'exceptPattern',
				    '-debug' => $debug,
				    '-verbose' => $verbose,
				    '-prLog' => $prLog,
				    '-tmpfile' =>
				    &::uniqFileName("$tmpdir/req-"));
my $inclPatt = inclExclPattern->new('-pattern' => $includePattern,
				    '-pattLine' => \@includePattern,
				    '-keyName' => 'includePattern',
				    '-debug' => $debug,
				    '-verbose' => $verbose,
				    '-prLog' => $prLog,
				    '-tmpfile' =>
				    &::uniqFileName("$tmpdir/req-"));


my $startDate = dateTools->new();

#
# otherBackupDirs ermitteln
#
if (@otherBackupDirs > 0)
{
    my (@obd, $d);

    (@obd) = (@otherBackupDirs);
    (@otherBackupDirs) = ();
    foreach $d (@obd)
    {
	if ($d =~ /\Aall:(.*)/)              # alle einer Sicherungsreihe
	{
	    my $dir = $1;
	    my $asbd =
		allStoreBackupDirs->new('-rootDir' => $dir,
					'-checkSumFile' => $checkSumFile,
					'-prLog' => $prLog,
					'-absPath' => 0);
	    my $x;
	    foreach $x (sort {$b cmp $a} ($asbd->getAllFinishedDirs()))
	    {
		push @otherBackupDirs, "$dir/$x";
	    }
	}
	elsif ($d =~ /\A(\d+)-(\d+):(.*)/)   # von - bis Bereich
	{
	    my ($from, $to, $dir) = ($1, $2, $3);
	    $prLog->print('-kind' => 'E',
			  '-str' => ["invalid range in param <$d>, exiting"],
			  '-exit' => 1)
		if ($from > $to);
	    my $i;
	    foreach $i ($from .. $to)
	    {
		push @otherBackupDirs, "$i:$dir";
	    }
	}
	else
	{
	    push @otherBackupDirs, $d;
	}
    }

    my $dir;
    (@obd) = ();
    foreach $d (@otherBackupDirs)
    {
	if ($d =~ /\A(\d+):(.*)/)     # Isolieren / Feststellen der Dirs
	{
	    my ($n, $b1) = ($1, $2);
	    my $asbd =
		allStoreBackupDirs->new('-rootDir' => $b1,
					'-checkSumFile' => $checkSumFile,
					'-prLog' => $prLog,
					'-absPath' => 0);
	    my (@d) =
		sort {$b cmp $a} ($asbd->getAllFinishedDirs());
	    if (exists $d[$n])
	    {
		$dir = $b1 . '/' . $d[$n];
	    }
	    else
	    {
		$prLog->print('-kind' => 'W',
			      '-str' =>
			      ["cannot opendir backup directory <$d>"]);
		next;
	    }
	}
	else
	{
	    if (-f "$d/$checkSumFile.notFinished")
	    {
		$prLog->print('-kind' => 'W',
			      '-str' =>
			      ["$d/$checkSumFile not finished, skipping"]);
		next;
	    }
	    else
	    {
		$dir = $d;
	    }
	}

	if (-d $dir)             # korrekte aufsammeln
	{
	    push @obd, $dir;
	}
	else
	{
	    $prLog->print('-kind' => 'W',
			  '-str' =>
			  ["backup directory <$dir> does not exist"]);
	}
    }
    (@otherBackupDirs) = (@obd);     # umkopieren
}
#print "otherBackupDirs = <@otherBackupDirs>\n";
if ($verbose and @otherBackupDirs)
{
    my (@obd) = ();
    my $o;
    foreach $o (@otherBackupDirs)
    {
	push @obd, "   $o";
    }
    $prLog->print('-kind' => 'I',
		  '-str' => ["otherBackupDirs =", @obd]);
}

my $aktDate = dateTools->new();

$main::stat = Statistic->new('-startDate' =>
			     $precommand ? $startDate : undef,
			     '-aktDate' => $aktDate,
			     '-userGroupStatFile' => $userGroupStatFile,
			     '-exceptSuffix' => $exceptSuffix,
			     '-prLog' => $prLog,
			     '-progressReport' => $progressReport,
			     '-withUserGroupStat' => $withUserGroupStat,
			     '-userGroupStatFile' => $userGroupStatFile,
			     '-compress' => $compress);


#
# check if all excludeDirs and includeDirs are relative Paths
#
{
    my $error = 0;
    my $d;
    foreach $d (@exceptDirs)
    {
	if ($d =~ /\A\//o)
	{
	    $prLog->print('-kind' => 'E',
			  '-str' =>
			  ["exceptDir <$d> is not a relative path!"]);
	    $error = 1;
	}
    }
    foreach $d (@includeDirs)
    {
	if ($d =~ /\A\//o)
	{
	    $prLog->print('-kind' => 'E',
			  '-str' =>
			  ["includeDir <$d> is not a relative path!"]);
	    $error = 1;
	}
    }
    $prLog->print('-kind' => 'E',
		  '-str' => ["exiting"],
		  '-exit' => 1)
	if $error;
}

#
# exception- und include-Liste berprfen und evaluieren
#
(@exceptDirs) = &evalExceptionList(\@exceptDirs, $sourceDir,
				   'exceptDir', 'excluding', $prLog);
(@includeDirs) = &evalExceptionList(\@includeDirs, $sourceDir,
				    'includeDir', 'including', $prLog);
$prLog->print('-kind' => 'I',
	      '-str' => ["exceptPattern = " . $exceptPattern])
    if $exceptPattern;
$prLog->print('-kind' => 'I',
	      '-str' => ["includePattern = " . $includePattern])
    if $includePattern;

#
# berprfen, ob Backup Target im Backup Source Tree liegt
#
my $targetInSource = 0;
if (index($targetDir . '/', $sourceDir . '/') == 0)  # liegt drin!
{
    $targetInSource = 1;                 # Annahme: es gibt keine Ausnahme
    if (@exceptDirs > 0)                 # testen, ob vielleicht im vom
    {                                    # vom Backup ausgenommenen Tree
	my $e;
	foreach $e (@exceptDirs)
	{
	    if (&::isSubDir($e, $targetDir))
	    {
		$targetInSource = 0;     # doch Ausnahme gefunden
		$prLog->print('-kind' => 'I',
			      '-str' =>
			      ["target directory <$targetDir> is in " .
			       "exception <$e> of source directory " .
			       "<$sourceDir>, ok"]);
		last;
	    }
	}
    }
    if ($targetInSource == 1 and
	@includeDirs > 0)            # check, if not in include paths
    {
	my $i;
	my $targetInSource = 0;      # assumption: target is not in source
	foreach $i (@includeDirs)
	{
	    if (&::isSubDir($i, $targetDir))
	    {
		$targetInSource = 1;
		last;
	    }
	}
    }
}
$prLog->print('-kind' => 'E',
	      '-str' =>
	      ["target directory <$targetDir> cannot be part of the " .
	       "source directory <$sourceDir>",
	       "define an exception with --exceptDirs or choose another " . 
	       "target directory"],
	      '-exit' => 1)
    if ($targetInSource);

#
# check if all exceptDirs are subdirectories of includeDirs or
# generate a warning
#
if (@exceptDirs and @includeDirs)
{
    my $e;
    foreach $e (@exceptDirs)
    {
	my $i;
	my $isIn = 0;
	foreach $i (@includeDirs)
	{
	    if (&::isSubDir($i, $e))
	    {
		$isIn = 1;
		last;
	    }
	}
	$prLog->print('-kind' => 'W',
		      '-str' => ["except dir <$e> is not part of the backup"])
	    unless $isIn;
    }
}

#
# lock file berprfen
#
if ($lockFile)
{
    if (-f $lockFile)
    {
	open(FILE, "< $lockFile") or
	    $prLog->print('-kind' => 'E',
			  '-str' => ["cannot read lock file <$lockFile>"],
			  '-exit' => 1);
	my $pid = <FILE>;
	chop $pid;
	close(FILE);
	$prLog->print('-kind' => 'E',
		      '-str' => ["strange format in lock file <$lockFile>, " .
				 "line is <$pid>\n"],
		      '-exit' => 1)
	    unless ($pid =~ /\A\d+\Z/o);
	if (kill(0, $pid) == 1)   # alte Instanz luft noch
	{
	    $prLog->print('-kind' => 'E',
			  '-str' => ["cannot start, old instance with pid " .
				     "<$pid> is allready running"],
			  '-exit' => 1);
	}
	else
	{
	    $prLog->print('-kind' => 'I',
			  '-str' => ["removing old lock file of process <$pid>"]
			  );
	}
    }

    $prLog->print('-kind' => 'I',
		  '-str' => ["creating lock file <$lockFile>"]);

    &::checkDelSymLink($lockFile, $prLog, 0x01);
    open(FILE, "> $lockFile") or
	$prLog->print('-kind' => 'E',
		      '-str' => ["cannot create lock file <$lockFile>"],
		      '-exit' => 1);
    print FILE "$$\n";
    close(FILE);
}

# prepare exceptTypes 
my (%exTypes, $et);
foreach $et (split(//, $exceptTypes))
{
    $exTypes{$et} = 0;         # this is a flag and and also a counter
}


#
# precommand ausfhren
#
if ($precommand)
{
    $prLog->print('-kind' => 'I',
		  '-str' => ["starting pre command <$precommand> ..."]);
    my ($preComm, @preParam) = (@precommand);
    my $preco = forkProc->new('-exec' => $preComm,
			      '-param' => \@preParam,
			      '-workingDir' => '.',
			      '-outRandom' => "$tmpdir/precomm-",
			      '-prLog' => $prLog);
    $preco->wait();
    my $out = $preco->getSTDOUT();
    $prLog->print('-kind' => 'W',
		  '-str' => ["STDOUT of <$precommand>:", @$out])
	if (@$out > 0);
    $out = $preco->getSTDERR();
    $prLog->print('-kind' => 'E',
		  '-str' => ["STDERR of <$precommand>:", @$out])
	if (@$out > 0);

    my $status = $preco->get('-what' => 'status');
    if ($status == 0)
    {
	$prLog->print('-kind' => 'I',
		      '-str' =>
		      ["pre command <$precommand> finished with status 0"]);
    }
    else
    {
	$prLog->print('-kind' => 'E',
		      '-str' => ["pre command <$precommand> finished with " .
				 "status $status, exiting"]);
	unlink $lockFile if $lockFile;
	exit 1;
    }
}


#
# Erzeugen der bentigten Objekte
#
my $adminDirs = adminDirectories->new('-targetDir' => $targetDir,
				      '-checkSumFile' => $checkSumFile,
				      '-tmpdir' => $tmpdir,
				      '-chmodMD5File' => $chmodMD5File,
				      '-prLog' => $prLog,
				      '-aktDate' => $aktDate,
				      '-debugMode' => $debug);

my $setResetDirTimes =
    setResetDirTimes->new('-tmpDir' => $tmpdir,
			  '-sourceDir' => $sourceDir,
			  '-targetDir' => $adminDirs->getAktDir(),
			  '-prLog' => $prLog);

my $prLog2 = undef;
if ($logInBackupDir eq 'yes')    # auch in BackupDirs herinloggen
{
    $logInBackupDirFileName =
	$adminDirs->getAktDir() . "/$logInBackupDirFileName",

    $prLog2 = printLog->new('-kind' => $prLogKind,
			    '-file' => $logInBackupDirFileName,
			    '-withTime' => 'yes',
			    '-maxFilelen' => 1e9,
			    '-noOfOldFiles' => 1);
    $prLog->add('-prLogs' => [$prLog2]);
}

my $delOld =
    deleteOldBackupDirs->new('-targetDir' => $targetDir,
			     '-doNotDelete' => $doNotDelete,
			     '-checkSumFile' => $checkSumFile,
			     '-prLog' => $prLog,
			     '-today' => $aktDate,
			     '-keepFirstOfYear' => $keepFirstOfYear,
			     '-keepLastOfYear' => $keepLastOfYear,
			     '-keepFirstOfMonth' => $keepFirstOfMonth,
			     '-keepLastOfMonth' => $keepLastOfMonth,
			     '-firstDayOfWeek' => $firstDayOfWeek,
			     '-keepFirstOfWeek' => $keepFirstOfWeek,
			     '-keepLastOfWeek' => $keepLastOfWeek,
			     '-keepAll' => $keepAll,
			     '-keepWeekday' => $keepWeekday,
			     '-keepDuplicate' => $keepDuplicate,
			     '-keepMinNumber' => $keepMinNumber,
			     '-keepMaxNumber' => $keepMaxNumber,
			     '-statDelOldBackupDirs' => $main::stat,
			     '-alsoCheckLastBackup' => 'yes'
			     );
$delOld->checkBackups();

my $onlyMD5Check = undef;        # Annahme
if ($adminDirs->getPrevDir())
{
    $onlyMD5Check = 1 if (@otherBackupDirs > 0);     # Einschalten

    $prLog->print('-kind' => 'I',
		  '-str' => ["previous backup is in <" .
			     $adminDirs->getPrevDir() . ">",
			     "follow links depth is $followLinks"]);
}
else
{
    $prLog->print('-kind' => 'I',
		  '-str' => ["first backup directory",
			     "follow links depth is $followLinks"]);
}
#print "onlyMD5Check = <$onlyMD5Check>\n";

#print "aktDir = ", $adminDirs->getAktDir(), "\n";
#print "aktInfoFile", $adminDirs->getAktInfoFile(), "\n";
#print "prevDir = ", $adminDirs->getPrevDir(), "\n";
#print "oldInfoFile = ", $adminDirs->getOldInfoFile(), "\n";

my $indexDir = indexDir->new();

my $aktFilename =
    aktFilename->new('-infoFile' => $adminDirs->getAktInfoFile(),
		     '-compressMD5File' => $compressMD5File,
		     '-sourceDir' => $sourceDir,
		     '-followLinks' => $followLinks,
		     '-compress' => $compress,
		     '-uncompress' => $uncompress,
		     '-postfix' => $postfix,
		     '-exceptSuffix' => \@exceptSuffix,
		     '-exceptPattern' => $exclPatt,
		     '-includePattern' => $inclPatt,
		     '-exceptTypes' => $exceptTypes,
		     '-exceptDirsSep' => $exceptDirsSep,
		     '-exceptDirs' => \@exceptDirs,
		     '-includeDirs' => \@includeDirs,
		     '-aktDate' => $aktDate,
		     '-chmodMD5File' => $chmodMD5File,
		     '-indexDir' => $indexDir,
		     '-prLog' => $prLog);

my $oldFilename =
    oldFilename->new('-prevDir' => $adminDirs->getPrevDir(),
		     '-dbmBaseName' => "$tmpdir/dbm",
		     '-indexDir' => $indexDir,
		     '-onlyMD5Check' => $onlyMD5Check,
		     '-progressReport' => $progressReport,
		     '-aktDir' => $adminDirs->getAktDir(),
		     '-otherBackupDirs' => \@otherBackupDirs,
		     '-onlyMD5Check' => $onlyMD5Check,
		     '-prLog' => $prLog,
		     '-checkSumFile' => $checkSumFile);

$aktFilename->setDBMmd5($oldFilename->getDBMmd5());

my $readDirAndCheck =
    readDirCheckSizeTime->new('-adminDirs' => $adminDirs,
			      '-oldFilename' => $oldFilename,
			      '-aktFilename' => $aktFilename,
			      '-dir' => $sourceDir,
			      '-followLinks' => $followLinks,
			      '-exceptDirs' => [@exceptDirs],
			      '-includeDirs' => [@includeDirs],
			      '-aktDir' => $adminDirs->getAktDir(),
			      '-postfix' => $postfix,
			      '-onlyMD5Check' => $onlyMD5Check,
			      '-exceptPattern' => $exclPatt,
			      '-includePattern' => $inclPatt,
			      '-exTypes' => \%exTypes,
			      '-resetAtime' => $resetAtime,
			      '-debugMode' => $debug,
			      '-verbose' => $verbose,
			      '-tmpdir' => $tmpdir,
			      '-prLog' => $prLog,
			      '-ignoreReadError' => $ignoreReadError,
			      '-printDepth' => $printDepth);

my $parForkCopy = parallelForkProc->new('-maxParallel' => $noCopy,
					'-prLog' => $prLog);
my $parForkCompr = parallelForkProc->new('-maxParallel' => $noCompress,
					 '-prLog' => $prLog);

# signal handling
(@main::cleanup) =      # Objekte verfgbar machen
    ($prLog, $oldFilename, $aktFilename, $parForkCopy, $parForkCompr, $tmpdir);
$SIG{INT} = \&cleanup;
$SIG{TERM} = \&cleanup;


my $fifoCopy = fifoQueue->new('-maxLength' => $queueCopy,
			      '-prLog' => $prLog);
my $fifoCompr = fifoQueue->new('-maxLength' => $queueCompress,
			       '-prLog' => $prLog);

my $scheduler = 
    Scheduler->new('-aktFilename' => $aktFilename,
		   '-oldFilename' => $oldFilename,
		   '-readDirAndCheck' => $readDirAndCheck,
		   '-setResetDirTimes' => $setResetDirTimes,
		   '-parForkCopy' => $parForkCopy,
		   '-fifoCopy' => $fifoCopy,
		   '-copyBWLimit' => $copyBWLimit,
		   '-parForkCompr' => $parForkCompr,
		   '-compress' => $compress,
		   '-postfix' => $postfix,
		   '-fifoCompr' => $fifoCompr,
		   '-exceptSuffix' => \@exceptSuffix,
		   '-targetDir' => $adminDirs->getAktDir(),
		   '-aktInfoFile' => $checkSumFile,
		   '-resetAtime' => $resetAtime,
		   '-tmpdir' => $tmpdir,
		   '-prLog' => $prLog,
		   '-debugMode' => $debug);

$main::tinyWaitScheduler = tinyWaitScheduler->new('-firstFast' => 1,
						  '-noOfWaitSteps' => 20);
$scheduler->normalOperation();   # die eigentliche Verarbeitung

$setResetDirTimes->writeTimes(); # set atime, mtime for directories

$aktFilename->closeInfoFile();
$oldFilename->readDBMFilesSize();
$oldFilename->delDBMFiles();     # dbm files lschen


#
# postcommand ausfhren
#

if ($postcommand)
{
    $prLog->print('-kind' => 'I',
		  '-str' => ["starting post command <$postcommand> ..."]);
    my ($postComm, @postParam) = (@postcommand);
    my $postco = forkProc->new('-exec' => $postComm,
			      '-param' => \@postParam,
			      '-workingDir' => '.',
			      '-outRandom' => "$tmpdir/postcomm-",
			      '-prLog' => $prLog);
    $postco->wait();
    my $out = $postco->getSTDOUT();
    $prLog->print('-kind' => 'W',
		  '-str' => ["STDOUT of <$postcommand>:", @$out])
	if (@$out > 0);
    $out = $postco->getSTDERR();
    $prLog->print('-kind' => 'E',
		  '-str' => ["STDERR of <$postcommand>:", @$out])
	if (@$out > 0);

    my $status = $postco->get('-what' => 'status');
    if ($status == 0)
    {
	$prLog->print('-kind' => 'I',
		      '-str' =>
		      ["post command <$postcommand> finished with status 0"]);
    }
    else
    {
	$prLog->print('-kind' => 'E',
		      '-str' => ["post command <$postcommand> finished " .
				 "with status $status"]);
	unlink $lockFile if $lockFile;
	exit 1;
    }
}

# lock file lschen
if ($lockFile and $unlockBeforeDel)
{
    $prLog->print('-kind' => 'I',
		  '-str' => ["removing lock file <$lockFile>"]);
    unlink $lockFile;
    $lockFile = undef;
}

#
# jetzt noch alte Backups lschen
#
$delOld->deleteBackups();


# Gre von .md5CheckSum-Datei noch fr Statistik bercksichtigen
$main::stat->setSizeMD5CheckSum($adminDirs->getAktInfoFile(),
				$compressMD5File);
$main::stat->setUsedSizeQueues($fifoCopy->getMaxUsedLength(),
			       $fifoCompr->getMaxUsedLength());
$main::stat->print('-exTypes' => \%exTypes);

# lock file lschen
if ($lockFile)
{
    $prLog->print('-kind' => 'I',
		  '-str' => ["removing lock file <$lockFile>"]);
    unlink $lockFile;
}

if ($compressLogInBackupDir eq 'yes')    # log file im BackupDir noch
{                                        # komprimieren
    $prLog->sub('-prLogs' => [$prLog2]);
    my $compressLog = forkProc->new('-exec' => 'bzip2',
				    '-param' => [$logInBackupDirFileName],
				    '-outRandom' => "$tmpdir/comprLog-",
				    '-prLog' => $prLog);
    $compressLog->wait();
}

$prLog->print('-kind' => 'Z',
	      '-str' => ["backing up directory <$sourceDir> to <" .
			 $adminDirs->getAktDir() . ">"]);

wait;
POSIX:_exit 0;


##################################################
sub cleanup
{
    my $signame = shift;

    my ($prLog, $oldFilename, $aktFilename, $parForkCopy, $parForkCompr,
	$tmpdir) = (@main::cleanup);

    $prLog->print('-kind' => 'E',
		  '-str' => ["caught signal $signame, terminating"]);

    # Dateien schlieen, aufrumen
    $aktFilename->delInfoFile();
    $oldFilename->delDBMFiles();     # dbm files lschen
    unlink "$tmpdir/storeBackup-dirs.$$";

    # laufende Prozesse abschieen
    $parForkCopy->signal('-value' => 2);
    $parForkCompr->signal('-value' => 2);

    $prLog->print('-kind' => 'Z',
		  '-str' => ["backing up directory <$sourceDir>"]);

    exit 1;
}


##################################################
#
# exception-Liste berprfen und evaluieren
#
sub evalExceptionList
{
    my $exceptDirs = shift;   # Pointer auf Liste mit Ausnahme-Directories
    my $sourceDir = shift;
    my $exceptDir = shift;
    my $excluding = shift;
    my $prLog = shift;

    my $e;
    my $flag = 0;
    my (@allExceptDirs);
    foreach $e (@$exceptDirs)
    {
	my (@a) = ("$sourceDir/$e");
	my (@e) = <@a>;        # wildcards auflsen, rechts muss Array stehen
	unless (@e)            # this happens if path does not exist
	{
	    $prLog->print('-kind' => 'E',
			  '-str' =>
			  ["<$sourceDir/$e>: path or pattern of $exceptDir " .
			   "does not exist"]);
	    $flag = 1;
	}
	(@a) = ();             # wird jetzt zum Aufsammeln verwendet
	my $e1;
	foreach $e1 (@e)
	{
	    next if -l $e1 or not -d $e1;

	    my $a = &::absolutePath($e1);
	    if ($a)
	    {
		push @a, $a;
	    }
	    else
	    {
		$flag = 1;
		$prLog->print('-kind' => 'E',
			      '-str' => ["$exceptDir <$e1> does not exist"])
		    if $prLog;
	    }
	}
	unless (@a)
	{
	    $prLog->print('-kind' => 'E',
			  '-str' => ["no directory resulting from " .
				     "pattern <$e>"]);
	    $flag = 1;
	}
	push @allExceptDirs, @a;
	if (@e == 1 and $a[0] eq $e)
	{
	    $prLog->print('-kind' => 'I',
			  '-str' => ["$excluding <$a[0]>"])
		if $prLog;
	}
	elsif (@a != 0)
	{
	    my (@p, $p);
	    foreach $p (@a)
	    {
		push @p, "    $excluding <$p>";
	    }
	    $prLog->print('-kind' => 'I',
			  '-str' => ["$excluding <$e>:", @p])
		if $prLog;
	}
    }
    if ($flag and not $contExceptDirsErr)
    {
	$prLog->print('-kind' => 'E',
		      '-str' => ["exiting"])
	    if $prLog;
	exit 1;
    }
    return (@allExceptDirs);
}


##################################################
# Erzeugt und verwaltet DBM Dateien mit Informationen
# ber bestehende Backup Verzeichnisse
package oldFilename;

sub new
{
    my $class = shift;
    my $self = {};

    my (%params) = ('-prevDir'         => undef,
		    '-dbmBaseName'     => undef,
		    '-indexDir'        => undef,
		    '-onlyMD5Check'    => undef,
		    '-progressReport'  => undef,
		    '-aktDir'          => undef,
		    '-otherBackupDirs' => [],
		    '-onlyMD5Check'    => undef,
		    '-prLog'           => undef,
		    '-checkSumFile'    => undef,
		    '-debugMode'       => 'no'
		    );

    &::checkObjectParams(\%params, \@_, 'oldFilename::new',
			 ['-prevDir', '-dbmBaseName', '-indexDir',
			  '-aktDir', '-otherBackupDirs', '-onlyMD5Check',
			  '-prLog', '-checkSumFile']);
    &::setParamsDirect($self, \%params);

    my $prevDir = $self->{'prevDir'};
    my $otherBackupDirs = $self->{'otherBackupDirs'};
    my $onlyMD5Check = $self->{'onlyMD5Check'};
    my $prLog = $self->{'prLog'};

    my ($DBMfilename, $DBMmd5);
    $self->{'DBMfilenameString'} = $DBMfilename = $self->{'dbmBaseName'} .
	".file.$$";
    $self->{'DBMmd5String'} = $DBMmd5 = $self->{'dbmBaseName'} . ".md5.$$";

    # Einlesen der alten Datei und Erzeugen der beiden dbm-Files
    my (%DBMfilename, %DBMmd5);
    $self->{'DBMfilename'} = \%DBMfilename;
    $self->{'DBMmd5'} = \%DBMmd5;
    &::checkDelSymLink($DBMfilename, $prLog, 0x01);
    if (-e $DBMfilename)
    {
	$prLog->print('-kind' => 'W',
		      '-str' => ["deleting <$DBMfilename>"]);
	unlink $DBMfilename or
	    $prLog->print('-kind' => 'E',
			  '-str' =>
			  ["cannot delete <$DBMfilename>, exiting"],
			  '-exit' => 1);
    }
    dbmopen(%DBMfilename, $DBMfilename, 0600);
    &::checkDelSymLink($DBMmd5, $prLog, 0x01);
    if (-e $DBMmd5)
    {
	$prLog->print('-kind' => 'W',
		      '-str' => ["deleting <$DBMfilename>"]);
	unlink $DBMmd5 or
	    $prLog->print('-kind' => 'E',
			  '-str' =>
			  ["cannot delete <$DBMfilename>, exiting"],
			  '-exit' => 1);
    }
    dbmopen(%DBMmd5, $DBMmd5, 0600);

    # Liste mit allen Directories erstellen
    my (@backupDirs) = ($self->{'aktDir'});
    push @backupDirs, $prevDir if $prevDir;
    push @backupDirs, @$otherBackupDirs;
    $self->{'backupDirs'} = \@backupDirs;

    my (@bd, $dir, %inode, $devDir);
    my $dev = undef;
    foreach $dir (@backupDirs)
    {
	my ($_dev, $_inode) = (stat($dir))[0,1];
	if ($dev)                 # berprfen, ob alle im selben device
	{
	    if ($dev ne $_dev)
	    {
		rmdir $self->{'aktDir'};
		$prLog->print('-kind' => 'E',
			      '-str' => ["<$devDir> and <$dir> are " .
					 "not on the same device"],
			      '-exit' => 1);
	    }
	}
	else
	{
	    $dev = $_dev;        # merken
	    $devDir = $dir;
	}
 
	if (exists $inode{$_inode})
	{
	    $prLog->print('-kind' => 'I',
			  '-str' => ["<$dir> is the same directory as <" .
				     $inode{$_inode} . ">, ignoring"]);
	    next;
	}
	else
	{
	    $inode{$_inode} = $dir;
	}

	push @bd, $dir;
    }
    @backupDirs = @bd;

#print "backupDirs = @backupDirs\n";
    my $i;
    my $noEntriesInDBM = 0;
    for ($i = 1 ; $i < @backupDirs ; $i++)
    {
	my $d = $backupDirs[$i];

	if (-f "$d/$checkSumFile.bz2" or -f "$d/$checkSumFile")
	{
#print "$i -> $d\n";
	    $noEntriesInDBM +=
		&::buildDBMs('-dbmKeyIsFilename' => \%DBMfilename,
			     '-dbmKeyIsMD5Sum' => \%DBMmd5,
			     '-indexDir' => $self->{'indexDir'},
			     '-previousDir' => $prevDir,
			     '-backupRoot' => $d,
			     '-backupDirIndex' => $i,
			     '-noBackupDir' => scalar @backupDirs,
			     '-checkSumFile' => $checkSumFile,
			     '-checkSumFileVersion'
			     => $main::checkSumFileVersion,
			     '-progressReport' => $progressReport,
			     '-prLog' => $prLog);
	}
    }
    $prLog->print('-kind' => 'I',
		  '-str' => ["$noEntriesInDBM entries in dbm files"]);

    bless $self, $class;
}


########################################
sub getIndexDir
{
    my $self = shift;

    return $self->{'indexDir'};
}


########################################
sub getDBMmd5
{
    my $self = shift;

    return $self->{'DBMmd5'};
}


########################################
sub getPrevDir
{
    my $self = shift;

    return $self->{'prevDir'};
}


########################################
sub getInodebackupComprCtimeMtimeSizeMD5
{
    my $self = shift;

    my $filename = shift;

    my $DBMfilename = $self->{'DBMfilename'};
    my $filename = $self->{'indexDir'}->setIndex($filename);

    if (exists $$DBMfilename{$filename})
    {
	return unpack('aIIIH32', $$DBMfilename{$filename});
    }
    else
    {
	return ();
    }
}


########################################
# returns ($inodeBackup $compr $backupDirIndex $backupDir $filename)
sub getFilename
{
    my $self = shift;

    my $md5sum = shift;

    my $DBMmd5 = $self->{'DBMmd5'};

#print "-2-$md5sum ($DBMmd5)\n";
    my $md5pack = pack('H32', $md5sum);
    if (exists $$DBMmd5{$md5pack})
    {
#print "\tgefunden\n";
	my (@r) = unpack('IaSa*', $$DBMmd5{$md5pack});
	my $backupDirs = $self->{'backupDirs'};
	my $f = $self->{'indexDir'}->replaceIndex($r[3]);
	return (@r[0..2], $$backupDirs[$r[2]], $f);
    }
    return ();
}


########################################
sub deleteEntry
{
    my $self = shift;

    my $md5sum = shift;
    my $f = shift;

    my $DBMmd5 = $self->{'DBMmd5'};
    my $md5pack = pack('H32', $md5sum);
    delete $$DBMmd5{$md5pack};

    my $DBMfilename = $self->{'DBMfilename'};
    $f = $self->{'indexDir'}->setIndex($f);
    delete $$DBMfilename{$f};
}


########################################
sub readDBMFilesSize
{
    my $self = shift;

    my $size = 0;
    my $f;
    foreach $f ($self->{'DBMfilenameString'}, $self->{'DBMmd5String'})
    {
	$main::stat->addSumDBMFiles( (stat($f))[7] );
    }
}


########################################
sub delDBMFiles
{
    my $self = shift;

    dbmclose(%{$self->{'md5'}});
    dbmclose(%{$self->{'filename'}});

    my $f1 = $self->{'DBMfilenameString'};
    my $f2 = $self->{'DBMmd5String'};

    if ($onlyMD5Check)
    {
	$self->{'prLog'}->print('-kind' => 'I',
				'-str' => ["unlink $f1, $f2"]);
    }
    else
    {
	$self->{'prLog'}->print('-kind' => 'I',
				'-str' => ["unlink $f2"]);
    }

    unlink <$f1*>;
    unlink <$f2*>;
}


##################################################
# schreibt neue Meta-Informationen in dbms + .md5CheckSum
package aktFilename;

sub new
{
    my $class = shift;
    my $self = {};

    my (%params) = ('-infoFile'        => undef,
		    '-compressMD5File' => undef,
		    '-sourceDir'       => undef,
		    '-followLinks'     => undef,
		    '-compress'        => undef,
		    '-uncompress'      => undef,
		    '-postfix'         => undef,
		    '-exceptSuffix'    => [],
		    '-exceptDirsSep'   => undef,
		    '-exceptDirs'      => [],
		    '-includeDirs'     => [],
		    '-exceptPattern'   => undef,
		    '-includePattern'  => undef,
		    '-exceptTypes'     => undef,
		    '-aktDate'         => undef,
		    '-prLog'           => undef,
		    '-chmodMD5File'    => undef,
		    '-indexDir'        => undef,
		    '-debugMode'       => 'no');

    &::checkObjectParams(\%params, \@_, 'aktFilename::new',
			 ['-infoFile',
			  '-compressMD5File', '-sourceDir', '-followLinks',
			  '-compress', '-uncompress', '-postfix',
			  '-exceptSuffix', '-exceptDirsSep', '-exceptDirs',
			  '-includeDirs', '-exceptPattern', '-exceptTypes',
			  '-includePattern', '-aktDate',
			  '-prLog', '-chmodMD5File', '-indexDir']);
    &::setParamsDirect($self, \%params);

    my $prLog = $self->{'prLog'};

    my (@infoLines) = ("version=" . $main::checkSumFileVersion,
		       "date=" .
		       $self->{'aktDate'}->getDateTime('-format' =>
						       '%Y.%M.%D %h.%m.%s'),
		       "sourceDir=" . $self->{'sourceDir'},
		       "followLinks=" . $self->{'followLinks'},
		       "compress=" . $self->{'compress'},
		       "uncompress=" . $self->{'uncompress'},
		       "postfix=" . $self->{'postfix'},
		       "exceptSuffix=" .
		       join(',', sort @{$self->{'exceptSuffix'}}),
		       "exceptDirsSep=" . $self->{'exceptDirsSep'},
		       "exceptDirs=" . join($params{'-exceptDirsSep'},
					    @{$self->{'exceptDirs'}}),
		       "includeDirs=" . join($params{'-exceptDirsSep'},
					    @{$self->{'includeDirs'}}),
		       "exceptPattern=" .
		       $self->{'exceptPattern'}->getPattern(),
		       "includePattern=" .
		       $self->{'includePattern'}->getPattern(),
		       "exceptTypes=" . $self->{'exceptTypes'}
		       );
    my $infoFile = $self->{'infoFile'};

    my $wcsf = writeCheckSumFile->new('-checkSumFile' => $infoFile,
				      '-infoLines' => \@infoLines,
				      '-prLog' => $prLog,
				      '-chmodMD5File' => $self->{'chmodMD5File'},
				      '-compressMD5File' =>
				      $self->{'compressMD5File'});
    $self->{'writeCheckSumFile'} = $wcsf;

    bless $self, $class;
}


########################################
sub setDBMmd5
{
    my $self = shift;

    $self->{'DBMmd5'} = shift;
}


########################################
# fr normale Dateien
sub store
{
    my $self = shift;

    my (%params) = ('-filename'    => undef,
		    '-md5sum'      => undef,
		    '-compr'       => undef,
		    '-dev'         => undef,
		    '-inode'       => undef,
		    '-inodeBackup' => undef,
		    '-ctime'       => undef,
		    '-mtime'       => undef,
		    '-atime'       => undef,
		    '-size'        => undef,
		    '-uid'         => undef,
		    '-gid'         => undef,
		    '-mode'        => undef,
		    '-storeInDBM'  => 1      # Default: speichern,
		                             #          0 = nicht speichern
		    );

    &::checkObjectParams(\%params, \@_, 'aktFilename::store',
			 ['-filename', '-md5sum', '-compr', '-dev', '-inode',
			  '-inodeBackup', '-ctime', '-mtime', '-atime',
			  '-size', '-uid', '-gid', '-mode']);
    my $filename = $params{'-filename'};
    my $md5sum = $params{'-md5sum'};
    my $compr = $params{'-compr'};
    my $dev = $params{'-dev'};
    my $inode = $params{'-inode'};
    my $inodeBackup = $params{'-inodeBackup'};
    my $ctime = $params{'-ctime'};
    my $mtime = $params{'-mtime'};
    my $atime = $params{'-atime'};
    my $size = $params{'-size'};
    my $uid = $params{'-uid'};
    my $gid = $params{'-gid'};
    my $mode = $params{'-mode'};

    if ($params{'-storeInDBM'})
    {
	my $DBMmd5 = $self->{'DBMmd5'};

	my $md5pack = pack('H32', $md5sum);
	my $f = $self->{'indexDir'}->setIndex($filename);
	$$DBMmd5{$md5pack} = pack('IaSa*', $inodeBackup, $compr,
				  0, $f)
	    unless exists $$DBMmd5{$md5pack};
    }	                            # $backupDirIndex ist immer 0

    $self->{'writeCheckSumFile'}->write('-filename' => $filename,
					'-md5sum' => $md5sum,
					'-compr' => $compr,
					'-dev' => $dev,
					'-inode' => $inode,
					'-inodeBackup' => $inodeBackup,
					'-ctime' => $ctime,
					'-mtime' => $mtime,
					'-atime' => $atime,
					'-size' => $size,
					'-uid' => $uid,
					'-gid' => $gid,
					'-mode' => $mode
					);
}


########################################
sub storeDir
{
    my $self = shift;

    my (%params) = ('-dir'   => undef,
		    '-dev'   => undef,
		    '-inode' => undef,
		    '-ctime' => undef,
		    '-mtime' => undef,
		    '-atime' => undef,
		    '-uid'   => undef,
		    '-gid'   => undef,
		    '-mode'  => undef
		    );

    &::checkObjectParams(\%params, \@_, 'aktFilename::storeDir',
			 ['-dir', '-dev', '-inode', '-ctime', '-mtime',
			  '-atime', '-uid', '-gid', '-mode']);

    my $dir = $params{'-dir'};
    my $dev = $params{'-dev'};
    my $inode = $params{'-inode'};
    my $ctime = $params{'-ctime'};
    my $mtime = $params{'-mtime'};
    my $atime = $params{'-atime'};
    my $uid = $params{'-uid'};
    my $gid = $params{'-gid'};
    my $mode = $params{'-mode'};

    my $inodeBackup = 0;    # irrelevant

    $self->{'writeCheckSumFile'}->write('-filename' => $dir,
					'-md5sum' => 'dir',
					'-compr' => 0,
					'-dev' => $dev,
					'-inode' => $inode,
					'-inodeBackup' => $inodeBackup,
					'-ctime' => $ctime,
					'-mtime' => $mtime,
					'-atime' => $atime,
					'-size' => 0,
					'-uid' => $uid,
					'-gid' => $gid,
					'-mode' => $mode
					);
}


########################################
sub storeSymlink
{
    my $self = shift;

    my (%params) = ('-symlink' => undef,
		    '-dev'   => undef,
		    '-inode' => undef,
		    '-ctime'   => undef,
		    '-mtime'   => undef,
		    '-atime'   => undef,
		    '-uid'     => undef,
		    '-gid'     => undef,
		    );

    &::checkObjectParams(\%params, \@_, 'aktFilename::storeSymlink',
			 ['-symlink', '-dev', '-inode', '-ctime', '-mtime',
			  '-atime', '-uid', '-gid']);

    my $symlink = $params{'-symlink'};
    my $dev = $params{'-dev'};
    my $inode = $params{'-inode'};
    my $ctime = $params{'-ctime'};
    my $mtime = $params{'-mtime'};
    my $atime = $params{'-atime'};
    my $uid = $params{'-uid'};
    my $gid = $params{'-gid'};

    my $inodeBackup = 0;   # irrelevant

    $self->{'writeCheckSumFile'}->write('-filename' => $symlink,
					'-md5sum' => 'symlink',
					'-compr' => 0,
					'-dev' => $dev,
					'-inode' => $inode,
					'-inodeBackup' => $inodeBackup,
					'-ctime' => $ctime,
					'-mtime' => $mtime,
					'-atime' => $atime,
					'-size' => 0,
					'-uid' => $uid,
					'-gid' => $gid,
					'-mode' => 0
					);
}


########################################
sub storeNamedPipe
{
    my $self = shift;

    my (%params) = ('-pipe'  => undef,
		    '-dev'   => undef,
		    '-inode' => undef,
		    '-ctime' => undef,
		    '-mtime' => undef,
		    '-atime' => undef,
		    '-uid'   => undef,
		    '-gid'   => undef,
		    '-mode'  => undef
		    );

    &::checkObjectParams(\%params, \@_, 'aktFilename::storeNamedPipe',
			 ['-pipe', '-ctime', '-mtime', '-atime',
			  '-uid', '-gid', '-mode']);

    my $pipe = $params{'-pipe'};
    my $dev = $params{'-dev'};
    my $inode = $params{'-inode'};
    my $ctime = $params{'-ctime'};
    my $mtime = $params{'-mtime'};
    my $atime = $params{'-atime'};
    my $uid = $params{'-uid'};
    my $gid = $params{'-gid'};
    my $mode = $params{'-mode'};

    my $inodeBackup = 0;   # irrelevant

    $self->{'writeCheckSumFile'}->write('-filename' => $pipe,
					'-md5sum' => 'pipe',
					'-compr' => 0,
					'-dev' => $dev,
					'-inode' => $inode,
					'-inodeBackup' => $inodeBackup,
					'-ctime' => $ctime,
					'-mtime' => $mtime,
					'-atime' => $atime,
					'-size' => 0,
					'-uid' => $uid,
					'-gid' => $gid,
					'-mode' => $mode
					);
}


########################################
# for signal handling
sub delInfoFile
{
    my $self = shift;

    unlink $self->{'infoFile'};
}


########################################
sub closeInfoFile
{
    my $self = shift;

    $self->{'writeCheckSumFile'}->DESTROY();
}


##################################################
package fifoQueue;

sub new
{
    my $class = shift;
    my $self = {};

    my (%params) = ('-maxLength'   => undef,
		    '-prLog'       => undef,
		    '-prLogDebug'  => 'D',
		    '-debugMode'   => 'no'
		    );

    &::checkObjectParams(\%params, \@_, 'fifoQueue::new',
			 ['-maxLength', '-prLog']);
    &::setParamsDirect($self, \%params);

    @{$self->{'queue'}} = ();

    $self->{'maxUsedLength'} = 0;   # for statistics

    bless $self, $class;
}


########################################
sub setDebugMode
{
    my $self = shift;

    my (%params) = ('-debugMode'   => undef
		    );

    &::checkObjectParams(\%params, \@_, 'fifoQueue::setDebugMode',
			 ['-debugMode']);

    $self->{'debugMode'} = $params{'-debugMode'};
}


########################################
sub getMaxLength
{
    my $self = shift;

    return $self->{'maxLength'};
}


########################################
sub getMaxUsedLength
{
    my $self = shift;

    return $self->{'maxUsedLength'};
}


########################################
sub getNoUsedEntries
{
    my $self = shift;

    return scalar @{$self->{'queue'}};
}


########################################
sub getNoFreeEntries
{
    my $self = shift;

    return $self->{'maxLength'} - @{$self->{'queue'}};
}


########################################
sub add
{
    my $self = shift;

    my (%params) = ('-value'   => undef
		    );

    &::checkObjectParams(\%params, \@_, 'fifoQueue::add',
			 ['-value']);

    push @{$self->{'queue'}}, $params{'-value'};

    $self->{'maxUsedLength'} = @{$self->{'queue'}}
        if ($self->{'maxUsedLength'} < @{$self->{'queue'}});
}


########################################
sub get
{
    my $self = shift;

    return @{$self->{'queue'}} > 0 ? shift @{$self->{'queue'}} : undef;
}


##################################################
package readDirCheckSizeTime;
our @ISA = qw( recursiveReadDir );

########################################
sub new
{
    my $class = shift;

    my (%params) = ('-dir'            => undef, # zu durchsuchendes directory
		    '-adminDirs'      => undef, # Objekt mit Infos von
		                                # Verzeichnissen
		    '-oldFilename'    => undef, # Objekt mit alten DBMs etc.
		    '-aktFilename'    => undef, # Objekt fr neue Meta Infos
		    '-aktDir'         => undef, # zu sicherndes Directory
		    '-followLinks'    => 0,     # Tiefe, bis zu der symlinks
		                                # gefolgt werden soll
		    '-exceptDirs'     => [],    # Ausnahmeverzeichnisse
		    '-includeDirs'    => [],    # only include these dirs
		    '-postfix'        => undef, # Postfix, der nach Kompr.
		                                # angehngt werden soll
		    '-onlyMD5Check'   => undef, # 1, wenn @otherBackupDirs >0
		    '-includePattern' => undef,
		    '-exceptPattern'  => undef,
		    '-exTypes'        => undef,
		    '-resetAtime'     => undef,
		    '-debugMode'      => undef,
		    '-verbose'        => undef,
		    '-tmpdir'         => undef,
		    '-prLog'          => undef,
		    '-prLogError'     => 'E',
		    '-prLogWarn'      => 'W',
		    '-exitIfError'    => 1,      # Errorcode bei Fehler
		    '-ignoreReadError' => 'no',
		    '-printDepth'     => undef
		    );

    &::checkObjectParams(\%params, \@_, 'readDirCheckSizeTime::new',
			 ['-dir', '-oldFilename', '-aktDir', '-exTypes',
			  '-postfix', '-onlyMD5Check', '-adminDirs', '-prLog',
			  '-printDepth']);

    chop $params{'-dir'} if $params{'dir'} =~ /\/\Z/o;   # cut trailing '/'

    my $self = recursiveReadDir->new('-dirs' => [$params{'-dir'}],
				     '-followLinks' => $params{'-followLinks'},
				     '-exceptDirs' => $params{'-exceptDirs'},
				     '-includeDirs' => $params{'-includeDirs'},
				     '-prLog' => $params{'-prLog'},
				     '-prLogError' => $params{'-prLogError'},
				     '-prLogWarn' => $params{'-prLogWarn'},
				     '-verbose' => $params{'-verbose'},
				     '-exitIfError' => $params{'-exitIfError'},
				     '-printDepth' => $params{'-printDepth'},
				     '-printDepthPrlogKind' => 'P'
				     );
    &::setParamsDirect($self, \%params);
    $self->{'oldInfoFile'} = $params{'-adminDirs'}->getOldInfoFile();
    $self->{'aktInfoFile'} = $params{'-adminDirs'}->getAktInfoFile();

    $self->{'md5Fork'} = undef;      # es luft kein paralleles md5sum

    bless $self, $class;
}


########################################
# liefert Basisverzeichnis, dazu relativen Dateinamen und Filetyp
sub next
{
    my $self = shift;

    my ($f, $types);
    my $n = ($f, $types) = $self->recursiveReadDir::next();

    if ($self->{'md5Fork'} and $n == 0)
    {
	$self->readDir();
	return () if (@{$self->{'files'}} == 0);
        $f = shift @{$self->{'files'}};
	$types = shift @{$self->{'types'}};
    }
    elsif ($n == 0)
    {
	return ();
    }

    my $md5 = shift @{$self->{'md5'}};
    # $f zerlegen in vorgegebenen Teil und relativen
    my $dir = $self->{'dir'};
    my $file = &::substractPath($f, $dir);

    return ($dir, $file, $md5, $types);
}


########################################
# wird von %inProgress in Scheduler::normalOperation bentigt
sub push
{
    my $self = shift;
    my $list = shift;     # Liste mit Listen von ($dir, $file, $md5, $types)

    my $l;
    foreach $l (@$list)
    {
	my ($dir, $file, $md5, $type) = (@$l);
	push @{$self->{'files'}}, "$dir/$file";
	push @{$self->{'md5'}}, $md5;
	push @{$self->{'types'}}, $type;
    }
}


########################################
sub readDir
{
    my $self = shift;

    my $prLog = $self->{'prLog'};
    my $postfix = $self->{'postfix'};
    my $aktFilename = $self->{'aktFilename'};
    my $debugMode = $self->{'debugMode'};
    my $verbose = $self->{'verbose'};
    my $onlyMD5Check = $self->{'onlyMD5Check'};

    my $exceptPattern = $self->{'exceptPattern'};
    my $includePattern = $self->{'includePattern'};
    my $exinclPattFlag =
	$exceptPattern->hasPattern() + $includePattern->hasPattern();
    my $exTypes = $self->{'exTypes'};
    my (@rest) = ();          # fr die plain files, die nicht im alten
                              # Backup waren oder sich gendert haben
    my (@md5CalcRest) = ();   # falls m5sum berechnet werden soll,
    my (@md5CalcMd5) = ();    # aber die md5sum schon ber dmb(filename),
    my (@md5CalcTypes) = ();  # zur Verfgung stehen, wird hier zwischengespeichert

    my $oldFilename = $self->{'oldFilename'};
    my $tmpdir = $self->{'tmpdir'};

    my (@f1, @t1);               # fr alles, was nicht plain file ist
    unless ($self->{'md5Fork'})  # es luft kein paralleles md5sum
    {
	# Directory einlesen
	while (42)
	{
	    $self->recursiveReadDir::readDir();
	    if (@{$self->{'files'}} == 0)             # leeres directory
	    {
		return if (@{$self->{'dirs'}} == 0);   # nix mehr da
	    }
	    last;
	}

	# Eingelesene Dateien in $self->{'files'} filtern
	my ($f, $t, $i);
	my $dir = $self->{'dir'};     # zu durchsuchendes Directory

	my $prevDir = $oldFilename->getPrevDir(); # Verzeichnis des
                                 # letzten gelaufenen Backups oder leer
	my $aktDir = $self->{'aktDir'};   # aktuelles Backupverzeichnis
	for ($i = 0 ; $i < @{$self->{'files'}} ; $i++)
	{
	    $f = $self->{'files'}[$i];
	    $t = $self->{'types'}[$i];

	    my $relFileName;
	    if ($dir eq '/')
	    {
		$relFileName = substr($f, 1);
	    }
	    else
	    {
		$relFileName = substr($f, length($dir) + 1);
	    }

	    if (exists $$exTypes{$t})
	    {
		++$$exTypes{$t};
		$prLog->print('-kind' => 'D',
			      '-str' => ["exceptType $t <$relFileName>"])
		    if $debug > 0;
		next;
	    }

	    # write an error message for unsupported file types
	    if ($t eq 'S')
	    {
		$prLog->print('-kind' => 'E',
			      '-str' => ["unsupported file type 'socket'" .
					 " <$relFileName>"]);
		next;
	    }
	    if ($t eq 'b')
	    {
		$prLog->print('-kind' => 'E',
			      '-str' => ["unsupported file type 'block " .
					 "special file'  <$relFileName>"]);
		next;
	    }
	    if ($t eq 'c')
	    {
		$prLog->print('-kind' => 'E',
			      '-str' => ["unsupported file type 'character '" .
					 "special file'  <$relFileName>"]);
		next;
	    }

	    # check exceptPattern and includePattern
	    if ($t ne 'd' and $exinclPattFlag)
	    {
		if ($exceptPattern->hasPattern() == 1 and
		    $exceptPattern->checkRule($relFileName) == 1)
		{
		    $main::stat->incr_noExcludePattern();
		    next;
		}

		if ($includePattern->hasPattern() == 1 and
		    $includePattern->checkRule($relFileName) == 0)
		{
		    $main::stat->incr_noIncludePattern();
		    next;
		}
	    }

	    if ($t ne 'f')       # alles, was nicht plain file
	    {                    # ist, merken
		push @f1, $f;
		push @t1, $t;
		next;
	    }

	    #
	    # ab hier ist alles nur noch plain file (in for Schleife)
	    #
	    my ($dev, $inode, $mode, $uid, $gid, $actCtime, $actMtime,
		$actAtime, $actSize) =
		    (stat($f))[0, 1, 2, 4, 5, 10, 9, 8, 7];
	    $mode &= 07777;
	    my ($oldCompr, $oldCtime, $oldMtime, $oldSize, $md5sum);
	    my $n = ($oldCompr, $oldCtime, $oldMtime, $oldSize, $md5sum) =
		$oldFilename->getInodebackupComprCtimeMtimeSizeMD5($relFileName);
	    if ($n == 0)    # nicht im Hash gefunden (aus Datei .md5CheckSums
	    {               # -> nher untersuchen!
		push @rest, $f;
		next;
	    }

	    if ($actCtime != $oldCtime or $actMtime != $oldMtime
		or $actSize != $oldSize)    # hat sicher gegenber letzem Mal
	    {                               # verndert
		push @rest, $f;             # -> nher untersuchen!
		next;
	    }

	    #
	    # brig: plain files, die sich gegenber dem letzten Backup
	    # nicht verndert haben
	    #

	    if ($onlyMD5Check or not $prevDir)
                           # mit md5 Summe ber dbm(md5) gehen, dadurch
	    {              # Doppelte vermeiden
		push @md5CalcRest, $f;
		push @md5CalcMd5, $md5sum;
		push @md5CalcTypes, 'f';
	    }
	    else
	    {
		# Scheint gleich zu sein, Link auf alte Datei setzen
		my $linkOld = "$prevDir/$relFileName";
		my $linkNew = "$aktDir/$relFileName";
		# postfix bercksichtigen
		if ($actSize != 0 and $oldCompr eq 'c')
		{
		    if (-e "$f$postfix")    # version mit .bz2 gibt's schon!
		    {
			$oldCompr = 'u';
			unless (-f $linkOld)  # gibt's auch nicht
			{
			    push @rest, $f;
			    next;
			}
		    }
		    else
		    {
			$linkOld .= $postfix;
			$linkNew .= $postfix;
		    }
		}
		if (link $linkOld, $linkNew)
		{
		    chown $uid, $gid, $linkNew;
		    chmod $mode, $linkNew;
		    utime $actAtime, $actMtime, $linkNew;
		    my $inodeBackup = (stat($linkNew))[1];

		    $prLog->print('-kind' => 'D',
				  '-str' =>
				  ["link $linkOld $linkNew"])
			if ($debugMode == 2);
		    if ($oldCompr eq 'u')     # nicht komprimiert
		    {
			$main::stat->addSumUnchangedCopy($oldSize);
		    }
		    else
		    {
			my $comprSize = (stat($linkOld))[7];
			$main::stat->addSumUnchangedCompr($comprSize);
		    }
		    $main::stat->addSumOrigFiles($oldSize, $uid, $gid);
		    $aktFilename->store('-filename' => $relFileName,
					'-md5sum' => $md5sum,
					'-compr' => $oldCompr,
					'-dev' => $dev,
					'-inode' => $inode,
					'-inodeBackup' => $inodeBackup,
					'-ctime' => $oldCtime,
					'-mtime' => $oldMtime,
					'-atime' => $actAtime,
					'-size' => $oldSize,
					'-uid' => $uid,
					'-gid' => $gid,
					'-mode' => $mode,
					'-storeInDBM' => 0  # in dbm sinnlos
					);
		}
		else
		{
		    $prLog->print('-kind' => 'W',
				  '-str' =>
				  ["(with old) cannot link (direct) " .
				   "<$linkOld> <$linkNew>"]);
		    push @rest, $f;  # normal behandeln, nicht direkt linken
		    $oldFilename->deleteEntry($md5sum,  # in Zukunft nicht mehr
					      $relFileName); # mit dieser Datei
						             # linken
		}
	    }
	}
    }

           # $self->{'md5Fork'} == undef -> Es luft kein paralleles md5sum
    my ($l, @md5, @types);
    my $md5Fork = $self->{'md5Fork'};
    if (@rest == 0 and @md5CalcRest == 0 and not $self->{'md5Fork'})
    {
	push @rest, undef;
	push @md5, undef;
	push @types, 'repeat';
    }
    else
    {
	if (@rest > 0 or $self->{'md5Fork'})
	{
	    # MD5 Summen fr @rest berechnen
	    if (@rest > 0 and not $self->{'md5Fork'})
	    {
		$self->{'md5Fork'} = forkMD5->new('-param' => [@rest],
						  '-prLog' => $prLog,
						  '-tmpdir' => $tmpdir,
						  '-resetAtime' =>
						  $self->{'resetAtime'});
		@rest = ();
	    }

	    $main::tinyWaitScheduler->wait();
	    if ($self->{'md5Fork'}->processRuns())
	    {
		if (@rest == 0)
		{
		    push @rest, undef;
		    push @md5, undef;
		    push @types, 'repeat';
		}
	    }
	    else     # md5sum ist fertig, das kommt neu dazu
	    {        # -> nher untersuchen
		$main::tinyWaitScheduler->reset();
		my $stderr = $self->{'md5Fork'}->getSTDERR();
		$prLog->print('-kind' => 'E',
			      '-str' =>
			      ["fork of md5sum generated the following errors:",
			       @$stderr])
		    if (@$stderr > 0);
		my $stdout = $self->{'md5Fork'}->getSTDOUT();
		foreach $l (@$stdout)
		{
		    if ($l =~ /\A\\/)  # "\\" am Zeilenanfang -> es wird gequotet
		    {
			$l =~ s/\\n/\n/g;   # "\n" im Namen wird von md5sum zu
			# "\\n" gemacht, zurckkonvertieren!
			$l =~ s/\A\\//;     # "\\" am Zeilenende entfernen
		    }
		    my ($md5, $f) = $l =~ /\A(\w+)\s+(.*)/s;
		    push @rest, $f;
		    push @md5, $md5;
		    push @types, 'f';
		}
		$self->{'md5Fork'} = undef;  # job ist fertig
	    }
	}

	# jetzt fr 'onlyMD5Check' die schon bekannten md5 Summen hinzufgen
	if (@md5CalcRest > 0)
	{
	    push @rest, (@md5CalcRest);
	    push @md5, (@md5CalcMd5);
	    push @types, (@md5CalcTypes);
	}
    }

    # jetzt noch die "nicht-plain-files" hinzufgen
    foreach $l (@f1)
    {
	push @rest, $l;
	push @md5, undef;
	push @types, shift @t1;
    }
#print "dir = ", $self->{'dir'}, "\n";
#print "rest (", scalar(@rest), ") = @rest\n";
#print "md5 = (", scalar(@md5), ") @md5\n";
#print "types = (", scalar(@types), ") @types\n";
    $self->{'files'} = \@rest;
    $self->{'md5'} = \@md5;
    $self->{'types'} = \@types;
}


##################################################
# stellt fest, welches das neue Directory ist, lscht alte
package adminDirectories;

sub new
{
    my $class = shift;
    my $self = {};

    my (%params) = ('-targetDir'    => undef,
		    '-checkSumFile' => undef,
		    '-tmpdir'       => undef,
		    '-chmodMD5File' => undef,
		    '-prLog'        => undef,
		    '-aktDate'      => undef,
		    '-debugMode'    => 0
		    );

    &::checkObjectParams(\%params, \@_, 'adminDirectories::new',
			 ['-targetDir', '-checkSumFile', '-chmodMD5File',
			  '-tmpdir', '-prLog']);
    &::setParamsDirect($self, \%params);

# weitere Variablen:
# 'aktDate', 'baseDir', 'aktDir', 'prevDir', 'oldDirs'

    my $targetDir = $self->{'targetDir'};
    my $chmodMD5File = $self->{'chmodMD5File'};
    my $prLog = $self->{'prLog'};

    my $aktDate = $self->{'aktDate'};
    $self->{'baseDir'} = $targetDir;
    my $aktDir = $self->{'aktDir'} = $targetDir . '/' .
	$aktDate->getDateTime('-format' => '%Y.%M.%D_%h.%m.%s');

    my $asbd = allStoreBackupDirs->new('-rootDir' => $targetDir,
				       '-checkSumFile' => $checkSumFile,
				       '-prLog' => $prLog);
    $self->{'prevDir'} = $asbd->getFinishedPrev();

# Neues Verzeichnis anlegen
    $prLog->print('-kind' => 'E',
		  ["cannot create <$aktDir>, exiting"],
		  '-exit' => 1)
	unless (mkdir $aktDir);
    chmod 0755, $aktDir;
    my $chmodDir = $chmodMD5File;
    $chmodDir |= 0100 if $chmodDir & 0400;
    $chmodDir |= 0010 if $chmodDir & 0040;
    $chmodDir |= 0001 if $chmodDir & 0004;
    mkdir "$aktDir/.storeBackupLinks", $chmodDir;

    my $debugMode = $self->{'debugMode'};
    if ($debugMode > 0)
    {
   	$prLog->print('-kind' => 'I',
		      '-str' => ["new directory is <$aktDir>",
				 $self->{'prevDir'} ?
				 "previous directory is <" .
				 $self->{'prevDir'} . ">" :
				 'no previous directory, first use']);
    }

    my (@oldDirs) = $asbd->getAllDirs();
    $self->{'oldDirs'} = \@oldDirs;

    bless $self, $class;
}


########################################
# sind sortiert: ltestes zuerst
sub getOldDirs
{
    my $self = shift;

    return $self->{'oldDirs'};
}


########################################
sub getAktDir
{
    my $self = shift;
    return $self->{'aktDir'};       # String
}


########################################
sub getAktInfoFile
{
    my $self = shift;

    my $aktDir = $self->{'aktDir'};
    if ($aktDir)
    {
	return $aktDir . '/' . $self->{'checkSumFile'};
    }
    else
    {
	return undef;
    }
}


########################################
sub getPrevDir
{
    my $self = shift;
    return $self->{'prevDir'};       # String
}


########################################
sub getOldInfoFile
{
    my $self = shift;

    my $prevDir = $self->{'prevDir'};
    if ($prevDir)
    {
	return $prevDir . '/' . $self->{'checkSumFile'};
    }
    else
    {
	return undef;
    }
}


##################################################
# Splittet die Parameterliste (falls zu lang) auf
# Stellt nach auen ein Interface analog forkProc zur Verfgung
# (luft im Hintergrund als fork/exec)
# Arbeitet *alle* ab, erst dann wird Ergebnis geliefert
package forkMD5;

sub new
{
    my $class = shift;
    my $self = {};

    my (%params) = ('-param'      => [],
		    '-prLog'      => undef,
		    '-tmpdir'     => undef,
		    '-resetAtime' => undef
		    );

    &::checkObjectParams(\%params, \@_, 'forkMD5::new',
			 ['-prLog', '-tmpdir']);
    &::setParamsDirect($self, \%params);


    (@{$self->{'resultSTDERR'}}) = ();
    (@{$self->{'resultSTDOUT'}}) = ();

    bless $self, $class;

    # cache atime and mtime in object
    my (@atime, @mtime, $p);
    foreach $p (@{$self->{'param'}})
    {
	my ($atime, $mtime) = (stat($p))[8, 9];
	push @atime, $atime;
	push @mtime, $mtime;
    }

    # store information to restore atime (and mtime)
    @{$self->{'allParam'}} = @{$self->{'param'}};
    $self->{'atime'} = \@atime;
    $self->{'mtime'} = \@mtime;

    $self->_startJob();

    return $self;
}


########################################
sub _startJob
{
    my $self = shift;

    $self->{'fork'} = undef;

    my $prLog = $self->{'prLog'};

    do
    {
	my $l = 0;      # akkumulierte Lnge der Paramter in Byte
	my $i;
	my $param = $self->{'param'};    # Pointer auf Parameter Vektor

	for ($i = 0 ; $i < @$param ; $i++)
	{
	    my $l1 = 1 + length $$param[$i];    # 1 Byte fr '\0' in C
	    if ($l + $l1 > $main::execParamLength)
	    {
		last;
	    }
	    $l += $l1;
	}

	if ($i == 0)      # der erste pat berhaupt nicht rein
	{                 # (ist alleine schon zu lang)
	    my $aktPar = shift @{$self->{'param'}};   # erten "wegwerfen"
	    $prLog->print('-kind' => 'E',
			  '-str' => ["parameter to long: cannot exec " .
				     "md5sum $aktPar"]);
	    return if @{$self->{'param'}} == 0;
	}
	else         # ok, die mglichen aus dem groen Vektor rausholen
	{
	    my (@aktPar) = splice(@{$self->{'param'}}, 0, $i);
	    $main::stat->incr_noForksMD5();
	    $main::stat->add_noMD5edFiles(scalar @aktPar);
	    $main::tinyWaitScheduler->reset();
	    $self->{'fork'} = forkProc->new('-exec' => 'md5sum',
					    '-param' => [@aktPar],
					    '-prLog' => $prLog,
					    '-workingDir' => '.',
					    '-outRandom' =>
					    $self->{'tmpdir'} . '/fork-md5');
	    # Anzahl Bytes berechnen
	    my $sum = 0;
	    my $p;
	    foreach $p (@aktPar)
	    {
		$sum += (stat($p))[7];
	    }
	    $main::stat->addSumMD5Sum($sum);
	    return;
	}

    } while ($self->{'fork'} == undef);
}



########################################
# returns 1 if process still running
# returns 0 if process is not running
sub processRuns
{
    my $self = shift;

    if ($self->{'fork'})    # Prozess noch nicht ausgewertet
    {
	if ($self->{'fork'}->processRuns())  # Job luft noch
	{
	    return 1;
	}
	else                    # Job ist fertig
	{
	    push @{$self->{'resultSTDERR'}}, @{$self->{'fork'}->getSTDERR()};
	    push @{$self->{'resultSTDOUT'}}, @{$self->{'fork'}->getSTDOUT()};

	    if (@{$self->{'param'}} > 0)     # noch was brig
	    {
		$self->_startJob();
		return 1;
	    }
	    else
	    {
		return 0;                   # fertig!
	    }
	}
    }
    else
    {
	return 0;
    }
}


########################################
sub getSTDERR
{
    my $self = shift;

    return $self->{'resultSTDERR'};
}


########################################
sub getSTDOUT
{
    my $self = shift;

    return $self->{'resultSTDOUT'};
}


########################################
sub DESTROY
{
    my $self = shift;

    my $atime = $self->{'atime'};
    my $mtime = $self->{'mtime'};
    my $param = $self->{'allParam'};
    my $i;
    for ($i = 0 ; $i < @$param ; $i++)
    {
	utime $$atime[$i], $$mtime[$i], $$param[$i]
	    if $self->{'resetAtime'};
    }
}

##################################################
package Scheduler;

sub new
{
    my $class = shift;

    my $self = {};

    my (%params) = ('-aktFilename'      => undef,
		    '-oldFilename'      => undef,
		    '-readDirAndCheck'  => undef,
		    '-setResetDirTimes' => undef,
		    '-parForkCopy'      => undef,
		    '-fifoCopy'         => undef,
		    '-copyBWLimit'      => undef,
		    '-parForkCompr'     => undef,
		    '-fifoCompr'        => undef,
		    '-compress'         => undef,
		    '-postfix'          => undef,
		    '-exceptSuffix'     => undef,
		    '-targetDir'        => undef,
		    '-aktInfoFile'      => undef,
		    '-resetAtime'       => undef,
		    '-tmpdir'           => undef,
		    '-prLog'            => undef,
		    '-debugMode'        => 0
		    );

    &::checkObjectParams(\%params, \@_, 'Scheduler::new',
			 ['-aktFilename', '-oldFilename', '-readDirAndCheck',
			  '-setResetDirTimes', '-parForkCopy', '-fifoCopy',
			  '-copyBWLimit', '-parForkCompr', '-fifoCompr',
			  '-compress', '-postfix', '-exceptSuffix',
			  '-targetDir', '-aktInfoFile', '-resetAtime',
			  '-prLog']);
    &::setParamsDirect($self, \%params);

    my ($compressCommand, @options) = split(/\s+/, $params{'-compress'});
    $self->{'compressCommand'} = $compressCommand;
    $self->{'compressOptions'} = \@options;

    bless $self, $class;
}


########################################
# Idee:
#    berwachung der forks in parForkCopy und parForkCompr
#    Wenn diese mit neuen Daten gefttert wurden, Auffllen
#    von fifoCopy und fifoCompr ber readDirAndCheck
sub normalOperation
{
    my $self = shift;

    my $aktFilename = $self->{'aktFilename'};
    my $oldFilename = $self->{'oldFilename'};
    my $readDirAndCheck = $self->{'readDirAndCheck'};
    my $setResetDirTimes = $self->{'setResetDirTimes'};
    my $parForkCopy = $self->{'parForkCopy'};
    my $fifoCopy = $self->{'fifoCopy'};
    my $copyBWLimit = $self->{'copyBWLimit'};
    my $parForkCompr = $self->{'parForkCompr'};
    my $fifoCompr = $self->{'fifoCompr'};
    my $compress = $self->{'compress'};
    my $compressCommand = $self->{'compressCommand'};
    my $compressOptions = $self->{'compressOptions'};
    my $postfix = $self->{'postfix'};
    my $exceptSuffixPattern =
	join('\Z|', @{$self->{'exceptSuffix'}}) . '\Z';
    my $targetDir = $self->{'targetDir'};
    my $aktInfoFile = $self->{'aktInfoFile'};
    my $resetAtime = $self->{'resetAtime'};
    my $tmpdir = $self->{'tmpdir'};
    my $prLog = $self->{'prLog'};
    my $debugMode = $self->{'debugMode'};

    my $readFilesWithoutForkCheck = 
	$parForkCopy->getMaxParallel() + $parForkCompr->getMaxParallel();
    my $countReadFilesWithoutForkCheck;   # aktueller Zhler, immer < als oben

    my $continueReadFiles = 1;
    my (%inProgress) = (); # $inProgress{$md5} = [[$dir, $file, $md5, $types],
                           #                      [$dir, $file, $md5, $types],
                           #                      [$dir, $file, $md5, $types]]
                           # Puffer fr Dateien, die gerade komprimiert oder
                           # kopiert werden. $inProgress{$md5} = [] bedeutet,
                           # da eine Datei mit der md5-Summe in Bearbeitung
                           # ist, aber keine gleichartigen in der Schlange sind
                           # -> Variable ist Merker + Puffer zugleich

    my ($copy, @copyPar) =
	($copyBWLimit == 0) ? ('cp') : ('rsync', "--bwlimit=$copyBWLimit");

    while (42)    # Schleife ber alles
    {
	$countReadFilesWithoutForkCheck = 0;

	# Warteschlangen fllen (in jeder ist Platz)
	while ($continueReadFiles == 1 and
	       $fifoCopy->getNoFreeEntries() > 0 and
	       $fifoCompr->getNoFreeEntries() > 0)
	{
	    last if                    # erst mal wieder nach den forks sehen!
		(++$countReadFilesWithoutForkCheck >
		 $readFilesWithoutForkCheck);
	    my ($dir, $file, $md5, $type);
	    my $n = ($dir, $file, $md5, $type) =
		$readDirAndCheck->next();

	    if ($n == 0)         # nix mehr zu holen!
	    {
		$continueReadFiles = 0;
		last;
	    }

	    last if ($type eq 'repeat'); #nachsehen, ob schon forks fertig sind

	    $file =~ s/\\\\/\\/go;     # \\ durch \ ersetzen, sonst
	    $dir =~ s/\\\\/\\/go;      # klappt's nicht

	    # Rechte etc. der Originaldatei lesen
	    my ($dev, $inode, $mode, $uid, $gid, $origSize) =
		(stat("$dir/$file"))[0, 1, 2,4,5,7];
	    $mode &= 07777;

	    if ($file eq $aktInfoFile or $file eq "$aktInfoFile.bz2")
	    {
		$prLog->print('-kind' => 'E',
			      '-str' => ["cannot handle <$file>, " .
					 "collision with info file"]);
		next;
	    }

	    if ($type eq 'd')            # directory anlegen
	    {
		$prLog->print('-kind' => 'E',
			      '-str' =>
			      ["cannot create directory <$targetDir/$file>"],
			      '-exit' => 1)
		    unless mkdir "$targetDir/$file", $mode;

		my ($ctime, $mtime, $atime) =
		    (stat("$dir/$file"))[10, 9, 8];

		chown $uid, $gid, "$targetDir/$file";
		chmod $mode, "$targetDir/$file";
		$main::stat->incr_noDirs($uid, $gid);
		$prLog->print('-kind' => 'D',
			      '-str' =>
			      ["created directory <$targetDir/$file"])
		    if ($debugMode > 0);

		$setResetDirTimes->addDir($file, $atime, $mtime);
		$aktFilename->storeDir('-dir' => $file,
				       '-dev' => $dev,
				       '-inode' => $inode,
				       '-ctime' => $ctime,
				       '-mtime' => $mtime,
				       '-atime' => $atime,
				       '-uid' => $uid,
				       '-gid' => $gid,
				       '-mode' => $mode);
		next;
	    }

	    if ($type eq 'l')            # symbolic link
	    {
		my ($uid, $gid, $atime, $mtime, $ctime) =
		    (lstat("$dir/$file"))[4, 5, 8, 9, 10];

		my $l = readlink "$dir/$file";
		symlink $l, "$targetDir/$file";

		# bei einigen Betriebssystem (z.B. Linux) wird bei Aufruf
		# des Systemcalls chmod bei symlinks nicht der Symlink selbst
		# geaendert, sondern die Datei, auf die er verweist.
		# (dann muss lchown genommen werden -> Inkompatibilitaeten!?)
		my $chown = forkProc->new('-exec' => 'chown',
					  '-param' => ['-h', "$uid:$gid",
						       "$targetDir/$file"],
					  '-outRandom' => "$tmpdir/chown-",
					  '-prLog' => $prLog);
		$chown->wait();

		utime $atime, $mtime, "$dir/$file" if $resetAtime;
		utime $atime, $mtime, "$targetDir/$file";
		$main::stat->incr_noSymLinks($uid, $gid);
		$prLog->print('-kind' => 'D',
			      '-str' =>
			      ["created symbolic link <$targetDir/$file"])
		    if ($debugMode == 2);
		$aktFilename->storeSymlink('-symlink' => $file,
					   '-dev' => $dev,
					   '-inode' => $inode,
					   '-ctime' => $ctime,
					   '-mtime' => $mtime,
					   '-atime' => $atime,
					   '-uid' => $uid,
					   '-gid' => $gid);
		next;
	    }

	    if ($type eq 'p')
	    {
		my ($ctime, $mtime, $atime) =
		    (stat("$dir/$file"))[10, 9, 8];

		my $mknod = forkProc->new('-exec' => 'mknod',
					  '-param' => ["$targetDir/$file", 'p'],
					  '-outRandom' => "$tmpdir/mknod-",
					  '-prLog' => $prLog);
		$mknod->wait();
		my $out = $mknod->getSTDOUT();
		$prLog->print('-kind' => 'E',
			      '-str' =>
			      ["STDOUT of <mknod $targetDir/$file p>:", @$out])
		    if (@$out > 0);
		$out = $mknod->getSTDERR();
		$prLog->print('-kind' => 'E',
			      '-str' =>
			      ["STDERR of <mknod $targetDir/$dir p>:", @$out])
		    if (@$out > 0);
		chown $uid, $gid, "$targetDir/$file";
		chmod $mode, "$targetDir/$file";
		utime $atime, $mtime, "Dir/$file" if $resetAtime;;
		utime $atime, $mtime, "$targetDir/$file";

		$main::stat->incr_noNamedPipes($uid, $gid);
		$prLog->print('-kind' => 'D',
			      '-str' =>
			      ["created named pipe <$targetDir/$file"])
		    if ($debugMode == 2);
		$aktFilename->storeNamedPipe('-pipe' => $file,
					     '-dev' => $dev,
					     '-inode' => $inode,
					     '-ctime' => $ctime,
					     '-mtime' => $mtime,
					     '-atime' => $atime,
					     '-uid' => $uid,
					     '-gid' => $gid,
					     '-mode' => $mode);
		next;
	    }

	    # es ist eine normale Datei
	    $main::stat->addSumOrigFiles($origSize, $uid, $gid);

	    # jetzt in DBM-Files nachsehen und linken
	    my ($comprOld, $linkFile, $newFile, $oldFile);
	    my ($inodeBackup, $backupDirIndex, $backupDir);
	    my $internalOld;

	    if ((($inodeBackup, $comprOld, $backupDirIndex,
		  $backupDir, $linkFile) =
		 $oldFilename->getFilename($md5)) == 5)
	    {
		$newFile = "$targetDir/$file";
		$oldFile = "$backupDir/$linkFile";
		$internalOld = ($backupDirIndex == 0) ?
		    'internal' : 'old';
	    }
	    else             # Datei ist noch nicht bekannt
	    {
		$linkFile = undef;
	    }

	    if ($linkFile)         # existiert schon, mu verlinkt werden
	    {
		# postfix bercksichtigen
		my ($ctime, $mtime, $atime, $size) =
		    (stat("$dir/$file"))[10, 9, 8, 7];

		if ($size != 0 and $comprOld eq 'c')
		{
		    if (-e "$dir/$file$postfix") # version mit .bz2 gibt's
		    {                            # schon
			$comprOld = 'u';
			unless (-f $oldFile)     # gibt's auch nicht
			{
			    my ($mode, $uid, $gid, $size) =
				(stat("$dir/$file"))[2,4,5,7];
			    $mode &= 07777;
			    $fifoCopy->add('-value' => [$dir, $file, $uid,
							$gid, $mode, $md5]);
			    next;
			}
		    }
		    else
		    {
			$newFile .= $postfix;
			$oldFile .= $postfix;
		    }
		}

		if (link $oldFile, $newFile)
		{
		    chown $uid, $gid, $newFile;
		    chmod $mode, $newFile;
		    my $inodeBackup = (stat($newFile))[1];

		    $prLog->print('-kind' => 'D',
				  '-str' =>
				  ["link $oldFile $newFile"])
			if ($debugMode == 2);

		    if ($comprOld eq 'u')      # nicht komprimiert
		    {
			if ($internalOld eq 'internal')
			{
			    $main::stat->addSumLinkedInternalCopy($size);
			}
			else
			{
			    $main::stat->addSumLinkedOldCopy($size);
			}
		    }
		    else                # komprimiert
		    {
			if ($internalOld eq 'internal')
			{
			    $main::stat->addSumLinkedInternalCompr($size);
			}
			else
			{
			    $main::stat->addSumLinkedOldCompr($size);
			}
		    }

		    # Schreiben der Informationen
		    $aktFilename->store('-filename' => $file,
					'-md5sum' => $md5,
					'-compr' => $comprOld,
					'-dev' => $dev,
					'-inode' => $inode,
					'-inodeBackup' => $inodeBackup,
					'-ctime' => $ctime,
					'-mtime' => $mtime,
					'-atime' => $atime,
					'-size' => $size,
					'-uid' => $uid,
					'-gid' => $gid,
					'-mode' => $mode,
					'-storeInDBM' => 0  # in dbm sinnlos
					);
		}
		else
		{
		    $prLog->print('-kind' => 'W',
				  '-str' =>
				  ["(with old) cannot link (via md5) " .
				   "<$oldFile> <$newFile>"]);
		    $linkFile = undef;         # => kopieren oder komprimieren
		    $oldFilename->deleteEntry($md5,    # in Zukunft nicht mehr
					      $file);  # mit dieser Datei linken
		}
	    }

	    unless ($linkFile)     # existiert noch nicht, copy oder compress
	    {
		if (exists $inProgress{$md5}) # Auf Kompression/Kopie warten
		{
		    push @{$inProgress{$md5}}, [$dir, $file, $md5, 'f'];
		    next;
		}

		my ($mode, $uid, $gid, $size) =
		    (stat("$dir/$file"))[2,4,5,7];
		$mode &= 07777;

		if ($size < $main::minCompressSize or
		    $file =~ /$exceptSuffixPattern/i    # copy
		    or -e "$dir/$file$postfix") # Datei hat nicht .bz2, es
		{                               # existiert aber Datei mit .bz2
		    $fifoCopy->add('-value' =>
				   [$dir, $file, $uid, $gid, $mode, $md5]);
                }
		else                              # compress
		{
		    $fifoCompr->add('-value' =>
				   [$dir, $file, $uid, $gid, $mode, $md5]);
		}

                $inProgress{$md5} = [];   # merken, wird kopiert/komprimiert
	    }
	} # Ende Schleife Warteschlagen fllen

	while (42)
	{
	    my $i;

	    # nachsehen, ob ein Kopier-Job gestartet werden kann
	    my (@finishForks) = $parForkCopy->checkAll();
	    my $noFreeEntriesCopy = $parForkCopy->getNoFreeEntries();

	    # neue Kopier-Jobs einhngen
	    for ($i = 0 ; $i < $noFreeEntriesCopy ; $i++)
	    {
		last if ($fifoCopy->getNoUsedEntries() == 0);
		my ($dir, $file, $uid, $gid, $mode, $md5) =
		    @{$fifoCopy->get()};
		my ($dev, $inode, $ctime, $mtime, $atime, $size) =
		    (stat("$dir/$file"))[0, 1, 10, 9, 8, 7];
		$mode &= 07777;
		if ($size <= $main::minCopyWithFork) # direkt kopieren (ohne fork)
		{
		    $prLog->print('-kind' => 'D',
				  '-str' => ["copy $dir/$file $targetDir/$file"])
			if ($debugMode == 2);

		    my ($atime, $mtime) = (stat("$dir/$file"))[8, 9];
		    unless (::copy("$dir/$file", "$targetDir/$file"))
		    {
			$prLog->print('-kind' => 'E',
				      '-str' => ["could not copy $dir/$file " .
						 "$targetDir/$file"]);
			next;
		    }
		    chown $uid, $gid, "$targetDir/$file";
		    chmod $mode, "$targetDir/$file";
		    utime $atime, $mtime, "$dir/$file" if $resetAtime;
		    utime $atime, $mtime, "$targetDir/$file";

		    my $inodeBackup = (stat("$targetDir/$file"))[1];
		    $aktFilename->store('-filename' => $file,  # speichert in dbm
					'-md5sum' => $md5,     # .md5sum-Datei
					'-compr' => 'u',
					'-dev' => $dev,
					'-inode' => $inode,
					'-inodeBackup' => $inodeBackup,
					'-ctime' => $ctime,
					'-mtime' => $mtime,
					'-atime' => $atime,
					'-size' => $size,
					'-uid' => $uid,
					'-gid' => $gid,
					'-mode' => $mode);

		    $main::stat->addSumNewCopy($size);
                    $main::tinyWaitScheduler->reset();
		}
		else                         # mit fork/cp,rsync kopieren
		{
		    $prLog->print('-kind' => 'D',
				  '-str' =>
				  ["$copy $dir/$file $targetDir/$file"])
			if ($debugMode == 2);

		    my ($proc) =
			$parForkCopy->add('-exec' => $copy,
					  '-param' =>
					  [@copyPar, "$dir/$file",
					   "$targetDir/$file"],
					  '-workingDir' => '.',
					  '-outRandom' => "$tmpdir/stderr",
					  '-info' =>
					  [$dev, $inode, $dir, $file, $uid,
					   $gid, $mode, $md5, $ctime, $mtime,
					   $atime, $size]);

		    push @finishForks, $proc if $proc;
		}
	    }

	    foreach $i (@finishForks)    # md5-Summen speichern
	    {
		next if $i->get('-what' => 'status');
		my $stderr = $i->getSTDERR();
		my ($dev, $inode, $dir, $file, $uid, $gid, $mode, $md5,
		    $ctime, $mtime, $atime, $size) =
			@{$i->get('-what' => 'info')};
		chown $uid, $gid, "$targetDir/$file";
		chmod $mode, "$targetDir/$file";
		utime $atime, $mtime, "$dir/$file" if $resetAtime;
		utime $atime, $mtime, "$targetDir/$file";

		my $inodeBackup = (stat("$targetDir/$file"))[1];

		if (@$stderr > 0)
		{
		    $prLog->print('-kind' => 'E',
				  '-str' =>
				  ["copying <$dir/$file> -> <$targetDir/$file>" .
				   " generated the following error messages:",
				   @$stderr]);
		    next;
		}

		$prLog->print('-kind' => 'D',
			      '-str' =>
			      ["finished $copy <$dir/$file> " .
			       "<$targetDir/$file>"])
		    if ($debugMode == 2);

		$main::stat->incr_noForksCP();
		$main::stat->addSumNewCopy($size);

		$aktFilename->store('-filename' => $file,  # speichert in dbm
				    '-md5sum' => $md5,     # .md5sum-Datei
				    '-compr' => 'u',
				    '-dev' => $dev,
				    '-inode' => $inode,
				    '-inodeBackup' => $inodeBackup,
				    '-ctime' => $ctime,
				    '-mtime' => $mtime,
				    '-atime' => $atime,
				    '-size' => $size,
				    '-uid' => $uid,
				    '-gid' => $gid,
				    '-mode' => $mode);


		if (defined $inProgress{$md5} and
		    @{$inProgress{$md5}} > 0)  # gepufferte Files mit
		{                              # gleicher md5 Summe bearbeiten
		    $continueReadFiles = 1;
		    $readDirAndCheck->push($inProgress{$md5});
		}
		delete $inProgress{$md5};

		$main::tinyWaitScheduler->reset();
	    }

	    # nachsehen, ob ein Komprimier-Job gestartet werden kann
	    (@finishForks) = $parForkCompr->checkAll();
	    my $noFreeEntriesCompr = $parForkCompr->getNoFreeEntries();

	    # neue Komprimier-Jobs einhngen
	    for ($i = 0 ; $i < $noFreeEntriesCompr ; $i++)
	    {
		last if ($fifoCompr->getNoUsedEntries() == 0);
		my ($dir, $file, $uid, $gid, $mode, $md5) =
		    @{$fifoCompr->get()};
		my ($dev, $inode, $ctime, $mtime, $atime, $size) =
		    (stat("$dir/$file"))[0, 1, 10, 9, 8, 7];
		$mode &= 07777;

		$prLog->print('-kind' => 'D',
			      '-str' => ["$compress < $dir/$file > " .
					 "$targetDir/$file$postfix"])
		    if ($debugMode == 2);

		my ($proc) =
		    $parForkCompr->add('-exec' => $compressCommand,
				       '-param' => [@$compressOptions],
				       '-stdin' => "$dir/$file",
				       '-stdout' => "$targetDir/$file$postfix",
				       '-delStdout' => 'no',
				       '-workingDir' => '.',
				       '-outRandom' => "$tmpdir/stderr",
				       '-info' =>
				       [$dev, $inode, $dir, $file, $uid, $gid,
					$mode, $md5, $ctime, $mtime,
					$atime, $size]);
		push @finishForks, $proc if $proc;
	    }

	    foreach $i (@finishForks)    # md5-Summen speichern
	    {
		next if $i->get('-what' => 'status');
		my $stderr = $i->getSTDERR();
		my ($dev, $inode, $dir, $file, $uid, $gid, $mode, $md5,
		    $ctime, $mtime, $atime, $size) =
			@{$i->get('-what' => 'info')};
		chown $uid, $gid, "$targetDir/$file$postfix";
		chmod $mode, "$targetDir/$file$postfix";
		utime $atime, $mtime, "$dir/$file" if $resetAtime;
		utime $atime, $mtime, "$targetDir/$file$postfix";
		my $inodeBackup = (stat("$targetDir/$file$postfix"))[1];

		if (@$stderr > 0)
		{
		    $prLog->print('-kind' => 'E',
				  '-str' =>
				  ["compressing <$dir/$file> -> " .
				   "<$targetDir/$file$postfix>" .
				   " generated the following error messages:",
				   @$stderr]);
		    next;
		}

		$prLog->print('-kind' => 'D',
			      '-str' =>
			      ["finished $compress <$dir/$file> " .
			       "<$targetDir/$file$postfix>"])
		    if ($debugMode == 2);

		$main::stat->incr_noForksCompress();
		my $comprSize = (stat("$targetDir/$file$postfix"))[7];
		$main::stat->addSumNewCompr($comprSize, $size);

		$aktFilename->store('-filename' => $file,  # speichert in dbm
				    '-md5sum' => $md5,     # .md5sum-Datei
				    '-compr' => 'c',
				    '-dev' => $dev,
				    '-inode' => $inode,
				    '-inodeBackup' => $inodeBackup,
				    '-ctime' => $ctime,
				    '-mtime' => $mtime,
				    '-mtime' => $mtime,
				    '-atime' => $atime,
				    '-size' => $size,
				    '-uid' => $uid,
				    '-gid' => $gid,
				    '-mode' => $mode);

		if (defined $inProgress{$md5} and
		    @{$inProgress{$md5}} > 0)  # gepufferte Files mit
		{                              # gleicher md5 Summe bearbeiten
		    $continueReadFiles = 1;
		    $readDirAndCheck->push($inProgress{$md5});
		}
		delete $inProgress{$md5};

		$main::tinyWaitScheduler->reset();
	    }

	    my $noUsedFifoCopy = $fifoCopy->getNoUsedEntries();
	    my $noUsedFifoCompr = $fifoCompr->getNoUsedEntries();
#print "noUsedFifoCopy = $noUsedFifoCopy\n";
#print "noUsedFifoCompr = $noUsedFifoCompr\n";
#print "continueReadFiles = $continueReadFiles\n";
	    if ($noUsedFifoCopy == 0 and $noUsedFifoCompr == 0)  # beide
	    {                                    # Warteschlangen leer
		if ($continueReadFiles == 0)     # keine Dateien mehr da
		{
		    # alles fertig?
		    unless ($parForkCopy->getNoUsedEntries() == 0 and
			    $parForkCompr->getNoUsedEntries() == 0)
		    {
			$main::tinyWaitScheduler->wait();
			next;               # auf forks warten
		    }

		    # berprfen, ob in %inProgress noch was briggeblieben ist
		    my ($md5, $flag);
		    $flag = 0;
		    foreach $md5 (keys %inProgress)
		    {
			$flag = 1;
			$continueReadFiles = 1;
			$readDirAndCheck->push($inProgress{$md5});
		    }
		    if ($flag)
		    {
			%inProgress = ();
			$main::tinyWaitScheduler->reset();
			next;
		    }

		    $main::stat->printProgressReport();
		    return;    # jetzt ist alles fertig
		}

		last;                   # Warteschlange wieder auffllen
	    }
	    else                        # mindestens eine Warteschlange ist
	    {                           # nicht leer
		if ($continueReadFiles == 0 or            # keine Dateien mehr da
		    $fifoCopy->getNoFreeEntries() == 0 or # oder
		    $fifoCompr->getNoFreeEntries() == 0)  # eine von beiden
		{                                         # ist voll
		    $main::tinyWaitScheduler->wait();
		}
		else
		{
		    last;               # Warteschlange wieder auffllen
		}
	    }
	}    # Ende Schleife ber Verwaltung der forks
    }   # Ende Schleife ber alles
}


##################################################
package Statistic;
our @ISA = qw( statisticDeleteOldBackupDirs );

########################################
sub new
{
    my $class = shift;

    my (%params) = ('-startDate'         => undef,
		    '-aktDate'           => undef,
		    '-userGroupStatFile' => undef,     # Flag
		    '-exceptSuffix'      => undef,     # Filename (if set)
		    '-prLog'             => undef,
		    '-progressReport'    => undef,
		    '-withUserGroupStat' => undef,
		    '-userGroupStatFile' => undef,
		    '-compress'          => undef
		    );

    &::checkObjectParams(\%params, \@_, 'Statistic::new',
			 ['-prLog', '-progressReport',
			  '-withUserGroupStat', '-userGroupStatFile',
			  '-compress']);
    my $self =
	statisticDeleteOldBackupDirs->new('-prLog' => $params{'-prLog'},
					  '-kind' => 'S');

    &::setParamsDirect($self, \%params);

    $self->{'userGroupFlag'} = ($self->{'withUserGroupStat'} or
				$self->{'userGroupStatFile'}) ? 1 : undef;

    if ($self->{'userGroupFlag'})
    {
	my (%uidStatInodes) = ();
	my (%uidStatSize) = ();
	my (%gidStatInodes) = ();
	my (%gidStatSize) = ();
	$self->{'uidStatInodes'} = \%uidStatInodes;
	$self->{'uidStatSize'} = \%uidStatSize;
	$self->{'gidStatInodes'} = \%gidStatInodes;
	$self->{'gidStatSize'} = \%gidStatSize;
    }

    my (%uidSource) = ();       # Hash mit key = uid, value = size
    my (%gidSource) = ();       # Hash mit key = gid, value = size
    my (%uidBackup) = ();       # Hash mit key = uid, value = size
    my (%gidBackup) = ();       # Hash mit key = gid, value = size
    $self->{'uidSource'} = \%uidSource;
    $self->{'gidSource'} = \%gidSource;
    $self->{'uidBackup'} = \%uidBackup;
    $self->{'gidBackup'} = \%gidBackup;

    $self->{'noDirs'} = 0;
    $self->{'noFiles'} = 0;
    $self->{'noSymLinks'} = 0;
    $self->{'noNamedPipes'} = 0;
    $self->{'noSockets'} = 0;
    $self->{'noCharDev'} = 0;
    $self->{'noBlockDev'} = 0;
    $self->{'noMD5edFiles'} = 0;
    $self->{'noInternalLinkedFiles'} = 0;
    $self->{'noOldLinkedFiles'} = 0;
    $self->{'unchangedFiles'} = 0;
    $self->{'noCopiedFiles'} = 0;
    $self->{'noCompressedFiles'} = 0;
    $self->{'noForksMD5'} = 0;
    $self->{'noForksCP'} = 0;
    $self->{'noForksCompress'} = 0;
    $self->{'noExcludePattern'} = 0;
    $self->{'noIncludePattern'} = 0;

    # Plattenplatzbedarf
    $self->{'sumOrigFiles'} = 0;
    $self->{'sumMD5Sum'} = 0;
    $self->{'sumLinkedInternalCopy'} = 0;
    $self->{'sumLinkedInternalCompr'} = 0;
    $self->{'sumLinkedOldCopy'} = 0;
    $self->{'sumLinkedOldCompr'} = 0;
    $self->{'sumUnchangedCopy'} = 0;
    $self->{'sumUnchangedCompr'} = 0;
    $self->{'sumNewCopy'} = 0;
    $self->{'sumNewCompr'} = 0;
    $self->{'sumNewComprOrigSize'} = 0;
    $self->{'md5CheckSum'} = 0;
    $self->{'sumDBMFiles'} = 0;

    bless $self, $class;
}


########################################
sub incr_noDeletedOldDirs
{
    my $self = shift;

    $self->statisticDeleteOldBackupDirs::incr_noDeletedOldDirs();
}


########################################
sub incr_noDirs
{
    my $self = shift;
    my ($uid, $gid) = @_;

    ++$self->{'noDirs'};

    if ($self->{'userGroupFlag'})
    {
	++$self->{'uidStatInodes'}->{$uid};
	++$self->{'gidStatInodes'}->{$gid};
    }
}


########################################
sub add_noMD5edFiles
{
    my $self = shift;

    $self->{'noMD5edFiles'} += shift;
}


########################################
sub incr_noSymLinks
{
    my $self = shift;
    my ($uid, $gid) = @_;

    ++$self->{'noSymLinks'};
    $self->addSumOrigFiles(0, $uid, $gid);
}


########################################
sub incr_noNamedPipes
{
    my $self = shift;
    my ($uid, $gid) = @_;

    ++$self->{'noNamedPipes'};
    $self->addSumOrigFiles(0, $uid, $gid);
}


########################################
sub incr_noSockets
{
    my $self = shift;
    my ($uid, $gid) = @_;

    ++$self->{'noSockets'};
    $self->addSumOrigFiles(0, $uid, $gid);
}


########################################
sub incr_noCharDev
{
    my $self = shift;
    my ($uid, $gid) = @_;

    ++$self->{'noCharDev'};
    $self->addSumOrigFiles(0, $uid, $gid);
}


########################################
sub incr_noBlockDev
{
    my $self = shift;
    my ($uid, $gid) = @_;

    ++$self->{'noBlockDev'};
    $self->addSumOrigFiles(0, $uid, $gid);
}


########################################
sub incr_noForksMD5
{
    my $self = shift;
    ++$self->{'noForksMD5'};
}


########################################
sub incr_noForksCP
{
    my $self = shift;
    ++$self->{'noForksCP'};
}


########################################
sub incr_noForksCompress
{
    my $self = shift;
    ++$self->{'noForksCompress'};
}


########################################
sub incr_noExcludePattern
{
    my $self = shift;
    ++$self->{'noExcludePattern'};
}


########################################
sub incr_noIncludePattern
{
    my $self = shift;
    ++$self->{'noIncludePattern'};
}


########################################
sub addFreedSpace
{
    my $self = shift;

    $self->statisticDeleteOldBackupDirs::addFreedSpace(@_);
}


########################################
sub addSumOrigFiles       # in byte
{
    my $self = shift;
    my ($size, $uid, $gid) = @_;

    $self->{'sumOrigFiles'} += $size;

    ++$self->{'noFiles'};
    $self->printProgressReport()
	if ($self->{'progressReport'} and
	    $self->{'noFiles'} % $self->{'progressReport'} == 0);

    if ($self->{'userGroupFlag'})
    {

	++$self->{'uidStatInodes'}->{$uid};
	++$self->{'gidStatInodes'}->{$gid};
	$self->{'uidStatSize'}->{$uid} += $size;
	$self->{'gidStatSize'}->{$gid} += $size;
    }
}


########################################
sub printProgressReport
{
    my $self = shift;

    if ($self->{'progressReport'} > 0)
    {
	my $s = $self->{'sumNewComprOrigSize'} + $self->{'sumNewCopy'};
	$self->{'prLog'}->print('-kind' => 'P',
				'-str' =>
				[$self->{'noFiles'} . ' files processed (' .
				 (&::humanReadable($self->{'sumOrigFiles'}))[0] .
				 ', ' .
				 (&::humanReadable($s))[0] . ') (' .
				 $self->{'sumOrigFiles'} . ', ' . $s . ')']);
    }
}


########################################
sub addSumMD5Sum       # in byte
{
    my $self = shift;

    $self->{'sumMD5Sum'} += shift;
}


########################################
sub addSumLinkedInternalCopy   # byte
{
    my $self = shift;

    $self->{'sumLinkedInternalCopy'} += shift;
    ++$self->{'noInternalLinkedFiles'};
}


########################################
sub addSumLinkedInternalCompr   # byte
{
    my $self = shift;

    $self->{'sumLinkedInternalCompr'} += shift;
    ++$self->{'noInternalLinkedFiles'};
}



########################################
sub addSumLinkedOldCopy   # byte
{
    my $self = shift;

    $self->{'sumLinkedOldCopy'} += shift;
    ++$self->{'noOldLinkedFiles'};
}


########################################
sub addSumLinkedOldCompr   # byte
{
    my $self = shift;

    $self->{'sumLinkedOldCompr'} += shift;
    ++$self->{'noOldLinkedFiles'};
}


########################################
sub addSumUnchangedCopy   # byte
{
    my $self = shift;

    $self->{'sumUnchangedCopy'} += shift;
    ++$self->{'unchangedFiles'};
}


########################################
sub addSumUnchangedCompr   # byte
{
    my $self = shift;

    $self->{'sumUnchangedCompr'} += shift;
    ++$self->{'unchangedFiles'};
}


########################################
sub addSumNewCopy   # byte
{
    my $self = shift;

    $self->{'sumNewCopy'} += shift;
    ++$self->{'noCopiedFiles'};
}


########################################
sub addSumNewCompr   # byte
{
    my $self = shift;

    $self->{'sumNewCompr'} += shift;
    $self->{'sumNewComprOrigSize'} += shift;
    ++$self->{'noCompressedFiles'};
}


########################################
sub addSumDBMFiles    # byte
{
    my $self = shift;

    $self->{'sumDBMFiles'} += shift;
}


########################################
sub setSizeMD5CheckSum
{
    my $self = shift;
    my $md5CheckSum = shift;
    my $compressMD5File = shift;

    if ($compressMD5File eq 'yes')
    {
	$self->{'md5CheckSum'} = (stat("$md5CheckSum.bz2"))[7];
    }
    else
    {
	$self->{'md5CheckSum'} = (stat($md5CheckSum))[7];
    }
}


########################################
sub setUsedSizeQueues
{
    my $self = shift;

    $self->{'maxUsedCopyQueue'} = shift;
    $self->{'maxUsedComprQueue'} = shift;
}


########################################
sub print
{
    my $self = shift;

    my (%params) = ('-exTypes' => undef
		    );

    &::checkObjectParams(\%params, \@_, 'Statistic::print', ['-exTypes']);

    my $exTypes = $params{'-exTypes'};
    my (@exTypes, $et);
    my %exTypesLines = ('S' => 'socket',
			'b' => 'block special',
			'c' => 'char special',
			'f' => 'plain file',
			'p' => 'named pipe',
			'l' => 'symbolic link');
    foreach $et (keys %$exTypes)
    {
	push @exTypes, sprintf("%33s", "excluded " .
			       $exTypesLines{$et} .
			       "s ($et) = ") . $$exTypes{$et};
    }

    my (@l);
    my ($user,$system,$cuser,$csystem) = times;
    my ($trenn) = "-------+----------+----------";
    push @l, sprintf("%-7s|%10s|%10s", " [sec]", "user", "system");
    push @l, "$trenn";
    push @l, sprintf("%-7s|%10.2f|%10.2f", "process", $user, $system);
    push @l, sprintf("%-7s|%10.2f|%10.2f", "childs", $cuser, $csystem);
    push @l, "$trenn";
    my ($u, $s) = ($cuser + $user, $csystem + $system);
    my $us_str = &dateTools::valToStr('-sec' => int($u + $s + .5));
    push @l, sprintf("%-7s|%10.2f|%10.2f => %.2f ($us_str)", "sum",
		     $u, $s, $u + $s);

    my $startDate = $self->{'startDate'};
    my (@startDate) = ();
    if ($startDate)
    {
	push @startDate, '           precommand duration = ' .
	    $startDate->deltaInStr('-secondDate' => $self->{'aktDate'});
    }

    my $dEnd = dateTools->new();
    my $backupDuration =
	$self->{'aktDate'}->deltaInSecs('-secondDate' => $dEnd);
    $backupDuration = 1 if ($backupDuration == 0);   # Minimaler Wert

    my $sumTargetAll = $self->{'sumLinkedInternalCopy'} +
	$self->{'sumLinkedInternalCompr'} + $self->{'sumLinkedOldCopy'} +
	$self->{'sumLinkedOldCompr'} + $self->{'sumUnchangedCopy'} +
	$self->{'sumUnchangedCompr'} + $self->{'sumNewCopy'} +
	$self->{'sumNewCompr'};
    my $sumTargetNew = $self->{'sumNewCopy'} + $self->{'sumNewCompr'};

    my $newUsedSpace = $sumTargetNew + $self->{'md5CheckSum'} -
	$self->{'bytes'};
    my $newUsedSpaceHuman;
    if ($newUsedSpace >= 0)
    {
	($newUsedSpaceHuman) = &::humanReadable($newUsedSpace);
    }
    else
    {
	($newUsedSpaceHuman) = &::humanReadable(- $newUsedSpace);
	$newUsedSpaceHuman = "-$newUsedSpaceHuman";
    }

    my (@ug_log) = ();
    my (@ug_file) = ();
    if ($self->{'userGroupFlag'})
    {
	my $k;
	my $uidStatInodes = $self->{'uidStatInodes'};
	foreach $k (sort {$a <=> $b} keys %$uidStatInodes)
	{
	    my $name = getpwuid($k);
	    $name = '-' unless $name;
	    push @ug_log, sprintf("USER INODE  %6d - %9s = %lu",
				   $k, $name, $$uidStatInodes{$k});
	    push @ug_file, "USER_INODE $k $name " . $$uidStatInodes{$k};
	}
	my $uidStatSize = $self->{'uidStatSize'};
	foreach $k (sort {$a <=> $b} keys %$uidStatSize)
	{
	    my $name = getpwuid($k);
	    push @ug_log, sprintf("USER SIZE   %6d - %9s = %s (%lu)",
				  $k, $name,
				  (&::humanReadable($$uidStatSize{$k}))[0],
				  $$uidStatSize{$k});
	    push @ug_file, "USER_SIZE $k $name " . $$uidStatSize{$k};
	}

	my $gidStatInodes = $self->{'gidStatInodes'};
	foreach $k (sort {$a <=> $b} keys %$gidStatInodes)
	{
	    my $group = getgrgid($k);
	    push @ug_log, sprintf("GROUP INODE %6d - %9s = %lu",
				   $k, $group, $$gidStatInodes{$k});
	    push @ug_file, "GROUP_INODE $k $group " . $$gidStatInodes{$k};
	}
	my $gidStatSize = $self->{'gidStatSize'};
	foreach $k (sort {$a <=> $b} keys %$gidStatSize)
	{
	    my $group = getgrgid($k);
	    push @ug_log, sprintf("GROUP SIZE  %6d - %9s = %s (%lu)",
				  $k, $group,
				  (&::humanReadable($$gidStatSize{$k}))[0],
				  $$gidStatSize{$k});
	    push @ug_file, "GROUP_SIZE $k $group " . $$gidStatSize{$k};
	}

#	print "#################\n";
#	print join("\n", @ug_log), "\n";
#	print "#################\n";
#	print join("\n", @ug_file), "\n";

	my $file = $self->{'userGroupStatFile'};
	if ($file)
	{
	    local *FILE;
	    &::checkDelSymLink($file, $prLog, 0x01);
	    unless (open(FILE, "> $file"))
	    {
		$prLog->print('-kind' => 'E',
			      '-str' => ["cannot write statistic to <$file>"]);
		goto endUidGid;
	    }
	    print FILE join("\n", @ug_file), "\n";
	    close(FILE);
	    $prLog->print('-kind' => 'I',
			  '-str' => ["printed userGroupStatFile <$file>"]);
	}
      endUidGid:;
    }

    $self->{'prLog'}->
	print('-kind' => 'S',
	      '-str' =>
	      [@l,
	       @ug_log,
	       '                   directories = ' . $self->{'noDirs'},
	       '                         files = ' . $self->{'noFiles'},
	       '                symbolic links = ' . $self->{'noSymLinks'},
	       '                   named pipes = ' . $self->{'noNamedPipes'},
	       '     new internal linked files = ' .
	           $self->{'noInternalLinkedFiles'},
	       '              old linked files = ' . $self->{'noOldLinkedFiles'},
	       '               unchanged files = ' . $self->{'unchangedFiles'},
	       '                  copied files = ' . $self->{'noCopiedFiles'},
	       '              compressed files = ' . $self->{'noCompressedFiles'},
	       'excluded files because pattern = ' . $self->{'noExcludePattern'},
	       'included files because pattern = ' . $self->{'noIncludePattern'},
	       @exTypes,
	       '        max size of copy queue = ' . $self->{'maxUsedCopyQueue'},
	       ' max size of compression queue = ' . $self->{'maxUsedComprQueue'},

	       '               calced md5 sums = ' . $self->{'noMD5edFiles'},
	       '                   forks total = ' . ($self->{'noForksMD5'} +
						   $self->{'noForksCP'} +
						   $self->{'noForksCompress'} +
						   $self->{'noNamedPipes'}),
	       '                     forks md5 = ' . $self->{'noForksMD5'},
	       '                    forks copy = ' . $self->{'noForksCP'},
	       sprintf("%33s", "forks " . $self->{'compress'} . " = ") .
	           $self->{'noForksCompress'},

	       '                 sum of source = ' .
	           (&::humanReadable($self->{'sumOrigFiles'}))[0] .
	           ' (' . $self->{'sumOrigFiles'} . ')',
	       '             sum of target all = ' .
	           (&::humanReadable($sumTargetAll))[0] . " ($sumTargetAll)",
	       '             sum of target all = ' . sprintf("%.2f%%",
		   &percent($self->{'sumOrigFiles'}, $sumTargetAll)),
	       '             sum of target new = ' .
	           (&::humanReadable($sumTargetNew))[0] . " ($sumTargetNew)",
	       '             sum of target new = ' .  sprintf("%.2f%%",
		   &percent($self->{'sumOrigFiles'}, $sumTargetNew)),
	       '            sum of md5ed files = ' .
	           (&::humanReadable($self->{'sumMD5Sum'}))[0] .
	           ' (' . $self->{'sumMD5Sum'} . ')',
	       '            sum of md5ed files = ' . sprintf("%.2f%%",
		   &percent($self->{'sumOrigFiles'},
			    $self->{'sumMD5Sum'})),
	       '    sum internal linked (copy) = ' .
	           (&::humanReadable($self->{'sumLinkedInternalCopy'}))[0] .
	           ' (' . $self->{'sumLinkedInternalCopy'} . ')',
	       '   sum internal linked (compr) = ' .
	           (&::humanReadable($self->{'sumLinkedInternalCompr'}))[0] .
	           ' (' . $self->{'sumLinkedInternalCompr'} . ')',
	       '         sum old linked (copy) = ' .
	           (&::humanReadable($self->{'sumLinkedOldCopy'}))[0] .
	           ' (' . $self->{'sumLinkedOldCopy'} . ')',
	       '        sum old linked (compr) = ' .
	           (&::humanReadable($self->{'sumLinkedOldCompr'}))[0] .
	           ' (' . $self->{'sumLinkedOldCompr'} . ')',
	       '          sum unchanged (copy) = ' .
	           (&::humanReadable($self->{'sumUnchangedCopy'}))[0] .
	           ' (' . $self->{'sumUnchangedCopy'} . ')',
	       '         sum unchanged (compr) = ' .
	           (&::humanReadable($self->{'sumUnchangedCompr'}))[0] .
	           ' (' . $self->{'sumUnchangedCompr'} . ')',
	       '                sum new (copy) = ' .
	           (&::humanReadable($self->{'sumNewCopy'}))[0] .
	           ' (' . $self->{'sumNewCopy'} . ')',
	       '               sum new (compr) = ' .
	           (&::humanReadable($self->{'sumNewCompr'}))[0] .
	           ' (' . $self->{'sumNewCompr'} . ')',
	       '    sum new (compr), orig size = ' .
	           (&::humanReadable($self->{'sumNewComprOrigSize'}))[0] .
	           ' (' . $self->{'sumNewComprOrigSize'} . ')',
	       '                sum new / orig = ' . sprintf("%.2f%%",
	           &percent($self->{'sumNewComprOrigSize'}
			    + $self->{'sumNewCopy'},
			    $self->{'sumNewCompr'}
			    + $self->{'sumNewCopy'})),
	       '      size of md5CheckSum file = ' .
	           (&::humanReadable($self->{'md5CheckSum'}))[0] .
	           ' (' . $self->{'md5CheckSum'} . ')',
	       '    size of temporary db files = ' .
	           (&::humanReadable($self->{'sumDBMFiles'}))[0] .
	           ' (' . $self->{'sumDBMFiles'} . ')',
	       @startDate,
	       '           deleted old backups = ' . $self->{'noDeletedOldDirs'},
	       '           deleted directories = ' . $self->{'dirs'},
	       '                 deleted files = ' . $self->{'files'},
	       '          (only) removed links = ' . $self->{'links'},
	       'freed space in old directories = ' .
	       (&::humanReadable($self->{'bytes'}))[0] . ' (' .
	       $self->{'bytes'} . ')',
	       "      add. used space in files = $newUsedSpaceHuman ($newUsedSpace)",
	       '               backup duration = ' .
	       dateTools::valToStr('-sec' => $backupDuration),
	       'over all files/sec (real time) = ' .
	           sprintf("%.2f", $self->{'noFiles'} / $backupDuration),
	       ' over all files/sec (CPU time) = ' .
	           sprintf("%.2f", $self->{'noFiles'} / ($u + $s)),
	       '                     CPU usage = ' .
	           sprintf("%.2f\%", ($u + $s) / $backupDuration * 100)
	       ]);

}


########################################
sub percent
{
    my ($base, $rel) = @_;

    if ($base == 0)
    {
	return 0;
    }
    else
    {
	return 100 - ($base - $rel) * 100 / $base;
    }
}


######################################################################
package inclExclPattern;

sub new
{
    my $class = shift;
    my $self = {};

    my (%params) = ('-pattern'  => undef,
		    '-pattLine' => [],       # splitted pattern
		    '-keyName'  => undef,    # eg. 'includePattern'
		    '-debug'    => undef,    # debug == 0: no output
		                             # debug == 1: result per file
		                             # debug == 2: debugging output
		                             #             of checkLineDebug
		    '-verbose'  => undef,
		    '-prLog'    => undef,
		    '-tmpfile'  => undef
		    );

    &::checkObjectParams(\%params, \@_, 'inclExclPattern::new',
			 ['-pattern', '-pattLine',
			  '-keyName', '-debug', '-prLog', '-tmpfile']);

    &::setParamsDirect($self, \%params);

    my $prLog = $self->{'prLog'};
    my $pattLine = $self->{'pattLine'};

    $prLog->print('-kind' => 'I',
		  '-str' => [$self->{'keyName'} . " = " .
			     $self->{'pattern'}])
	if $self->{'verbose'};

    if ($params{'-pattern'})
    {
	my ($patternID, $evalString) =
	    &evalTools::makePatternIndexPatternLine($pattLine);

	$self->{'patternID'} = $patternID;
	$self->{'evalString'} = $evalString;

	#
	# to test for syntactical correctness, make one eval
	#
	&evalTools::checkLine($evalString, $self->{'keyName'},
			      'testing', $prLog);

	#
	# if debug == 0 or 1, write file with pattern line and read
	# it with require
	#
	if ($self->{'debug'} == 0 or $self->{'debug'} == 1)
	{
	    local *FILE;
	    &::checkDelSymLink($self->{'tmpfile'}, $prLog, 0x01);
	    open(FILE, "> " . $self->{'tmpfile'}) or
		$prLog->print('-kind' => 'E',
			      '-str' =>
			      ["cannot open <" . $self->{'tmpfile'} .
			       "> for generating <" . $self->{'keyName'} .
			       "> for \"perl require\""],
			      '-exit' => 1);

	    my $funcName = "::Eval" . $self->{'keyName'};
	    $self->{'funcName'} = $funcName;
	    print FILE "sub $funcName\n\{\n";
	    print FILE "\tmy \$line = shift;\n";
	    print FILE "\treturn ($evalString);\n\}\n1\n";
	    close(FILE);
	    if ($self->{'debug'} eq 1)
	    {
		open(FILE, "< " . $self->{'tmpfile'});
		my (@l) = <FILE>;
		close(FILE);
		chop @l;
		$prLog->print('-kind' => 'D',
			      '-str' => ["requiring:", @l]);
	    }
	    require $self->{'tmpfile'};
	    unlink $self->{'tmpfile'};
	    $self->{'funcPointer'} = \&$funcName;
	}
    }

    bless $self, $class;
}


########################################
sub getPattern
{
    my $self = shift;

    return $self->{'pattern'};
}


########################################
sub hasPattern
{
    my $self = shift;

    return $self->{'pattern'} ? 1 : 0;
}


########################################
sub checkWithEval
{
    my $self = shift;

    return 0 unless $self->{'pattern'};      # no pattern set!
}


########################################
sub checkRule
{
    my $self = shift;
    my $pathAndFilename = shift;

    my $debug = $self->{'debug'};
    if ($debug == 0 or $debug == 1)
    {
	my $funcPointer = $self->{'funcPointer'};
	my $ret = &$funcPointer($pathAndFilename);   # 1 = match
	$ret = 0 unless $ret;

	$prLog->print('-kind' => 'D',
		      '-str' => [$self->{'keyName'} .
				 ": <$ret> => <$pathAndFilename>"])
	    if $debug == 1;
	return $ret;
    }
    else        # with debugging output
    {
	return
	    &evalTools::checkLineDebug($self->{'pattLine'},
				       $self->{'patternID'}, $pathAndFilename,
				       $self->{'keyName'}, $self->{'prLog'});
    }
}


######################################################################
# stores Dates and Times of all directories in a file
# after backup this file is read and directory atime and mtime are set
package setResetDirTimes;

########################################
sub new
{
    my $class = shift;
    my $self = {};

    my (%params) = ('-tmpDir'    => undef,
		    '-sourceDir' => undef,
		    '-targetDir' => undef,
		    '-prLog'     => undef
		    );

    &::checkObjectParams(\%params, \@_, 'setResetDirTimes::new',
			 ['-tmpDir', '-sourceDir', '-targetDir', '-prLog']);

    &::setParamsDirect($self, \%params);

    my $tmpfile = &::uniqFileName("$tmpdir/storeBackup-dirs.");
    $self->{'tmpfile'} = $tmpfile;
    local *FILE;
    &::checkDelSymLink($tmpfile, $prLog, 0x01);
    open(FILE, "> $tmpfile") or
	$prLog->print('-kind' => 'E',
		      '-str' => ["cannot open <$tmpfile>, exiting"],
		      '-exit' => 1);
    $self->{'FILE'} = *FILE;

    bless $self, $class;
}


########################################
sub addDir
{
    my $self = shift;
    my ($relFile, $atime, $mtime) = @_;

    local *FILE = $self->{'FILE'};
    print FILE "$atime $mtime $relFile\n";
}


########################################
sub writeTimes
{
    my $self = shift;

    my $sourceDir = $self->{'sourceDir'};
    my $targetDir = $self->{'targetDir'};
    local *FILE = $self->{'FILE'};
    my $prLog = $self->{'prLog'};
    my $tmpfile = $self->{'tmpfile'};

    close(FILE) or
	$prLog->print('-kind' => 'E',
		      '-str' => ["cannot close <$tmpfile>"]);

    unless (open(FILE, "< $tmpfile"))
    {
	$prLog->print('-kind' => 'E',
		      '-str' => ["cannot read <$tmpfile>, cannot set atime " .
				 "and mtime for directories"]);
	return;
    }

    $prLog->print('-kind' => 'I',
		  '-str' => ["setting atime, mtime of directories ..."]);

    my $line;
    while ($line = <FILE>)
    {
	chop $line;
	my ($atime, $mtime, $relFile) = split(/\s/, $line, 3);
	utime $atime, $mtime, "$sourceDir/$relFile" if $resetAtime;
	utime $atime, $mtime, "$targetDir/$relFile";
    }

    close(FILE);
    unlink $tmpfile;
}


