dar-2.4.8/0000755000175000017520000000000012023053205007266 500000000000000dar-2.4.8/config.rpath0000755000175000017520000004401212023053122011515 00000000000000#! /bin/sh # Output a system dependent set of variables, describing how to set the # run time search path of shared libraries in an executable. # # Copyright 1996-2010 Free Software Foundation, Inc. # Taken from GNU libtool, 2001 # Originally by Gordon Matzigkeit , 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # # The first argument passed to this file is the canonical host specification, # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # The environment variables CC, GCC, LDFLAGS, LD, with_gnu_ld # should be set by the caller. # # The set of defined variables is at the end of this script. # Known limitations: # - On IRIX 6.5 with CC="cc", the run time search patch must not be longer # than 256 bytes, otherwise the compiler driver will dump core. The only # known workaround is to choose shorter directory names for the build # directory and/or the installation directory. # All known linkers require a `.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a shrext=.so host="$1" host_cpu=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` host_vendor=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` host_os=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` # Code taken from libtool.m4's _LT_CC_BASENAME. for cc_temp in $CC""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'` # Code taken from libtool.m4's _LT_COMPILER_PIC. wl= if test "$GCC" = yes; then wl='-Wl,' else case "$host_os" in aix*) wl='-Wl,' ;; darwin*) case $cc_basename in xlc*) wl='-Wl,' ;; esac ;; mingw* | cygwin* | pw32* | os2* | cegcc*) ;; hpux9* | hpux10* | hpux11*) wl='-Wl,' ;; irix5* | irix6* | nonstopux*) wl='-Wl,' ;; newsos6) ;; linux* | k*bsd*-gnu) case $cc_basename in ecc*) wl='-Wl,' ;; icc* | ifort*) wl='-Wl,' ;; lf95*) wl='-Wl,' ;; pgcc | pgf77 | pgf90) wl='-Wl,' ;; ccc*) wl='-Wl,' ;; como) wl='-lopt=' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) wl='-Wl,' ;; esac ;; esac ;; osf3* | osf4* | osf5*) wl='-Wl,' ;; rdos*) ;; solaris*) wl='-Wl,' ;; sunos4*) wl='-Qoption ld ' ;; sysv4 | sysv4.2uw2* | sysv4.3*) wl='-Wl,' ;; sysv4*MP*) ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) wl='-Wl,' ;; unicos*) wl='-Wl,' ;; uts4*) ;; esac fi # Code taken from libtool.m4's _LT_LINKER_SHLIBS. hardcode_libdir_flag_spec= hardcode_libdir_separator= hardcode_direct=no hardcode_minus_L=no case "$host_os" in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs=yes if test "$with_gnu_ld" = yes; then # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. # Unlike libtool, we use -rpath here, not --rpath, since the documented # option of GNU ld is called -rpath, not --rpath. hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' case "$host_os" in aix[3-9]*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs=no fi ;; amigaos*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes # Samuel A. Falvo II reports # that the semantics of dynamic libraries on AmigaOS, at least up # to version 4, is to share data among multiple programs linked # with the same dynamic library. Since this doesn't match the # behavior of shared libraries on other platforms, we cannot use # them. ld_shlibs=no ;; beos*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then : else ld_shlibs=no fi ;; interix[3-9]*) hardcode_direct=no hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; gnu* | linux* | k*bsd*-gnu) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; netbsd*) ;; solaris*) if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then ld_shlibs=no elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-rpath,$libdir`' else ld_shlibs=no fi ;; esac ;; sunos4*) hardcode_direct=yes ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; esac if test "$ld_shlibs" = no; then hardcode_libdir_flag_spec= fi else case "$host_os" in aix3*) # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test "$GCC" = yes; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix[4-9]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac fi hardcode_direct=yes hardcode_libdir_separator=':' if test "$GCC" = yes; then case $host_os in aix4.[012]|aix4.[012].*) collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac fi # Begin _LT_AC_SYS_LIBPATH_AIX. echo 'int main () { return 0; }' > conftest.c ${CC} ${LDFLAGS} conftest.c -o conftest aix_libpath=`dump -H conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` fi if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib" fi rm -f conftest.c conftest # End _LT_AC_SYS_LIBPATH_AIX. if test "$aix_use_runtimelinking" = yes; then hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' else hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" fi fi ;; amigaos*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes # see comment about different semantics on the GNU ld section ld_shlibs=no ;; bsdi[45]*) ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec=' ' libext=lib ;; darwin* | rhapsody*) hardcode_direct=no if test "$GCC" = yes ; then : else case $cc_basename in xlc*) ;; *) ld_shlibs=no ;; esac fi ;; dgux*) hardcode_libdir_flag_spec='-L$libdir' ;; freebsd1*) ld_shlibs=no ;; freebsd2.2*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; freebsd2*) hardcode_direct=yes hardcode_minus_L=yes ;; freebsd* | dragonfly*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; hpux9*) hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; hpux10*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no ;; *) hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; netbsd*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; newsos6) hardcode_direct=yes hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then hardcode_libdir_flag_spec='${wl}-rpath,$libdir' else case "$host_os" in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) hardcode_libdir_flag_spec='-R$libdir' ;; *) hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; esac fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; osf3*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) if test "$GCC" = yes; then hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else # Both cc and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi hardcode_libdir_separator=: ;; solaris*) hardcode_libdir_flag_spec='-R$libdir' ;; sunos4*) hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes ;; sysv4) case $host_vendor in sni) hardcode_direct=yes # is this really true??? ;; siemens) hardcode_direct=no ;; motorola) hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac ;; sysv4.3*) ;; sysv4*MP*) if test -d /usr/nec; then ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) ;; sysv5* | sco3.2v5* | sco5v6*) hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' hardcode_libdir_separator=':' ;; uts4*) hardcode_libdir_flag_spec='-L$libdir' ;; *) ld_shlibs=no ;; esac fi # Check dynamic linker characteristics # Code taken from libtool.m4's _LT_SYS_DYNAMIC_LINKER. # Unlike libtool.m4, here we don't care about _all_ names of the library, but # only about the one the linker finds when passed -lNAME. This is the last # element of library_names_spec in libtool.m4, or possibly two of them if the # linker has special search rules. library_names_spec= # the last element of library_names_spec in libtool.m4 libname_spec='lib$name' case "$host_os" in aix3*) library_names_spec='$libname.a' ;; aix[4-9]*) library_names_spec='$libname$shrext' ;; amigaos*) library_names_spec='$libname.a' ;; beos*) library_names_spec='$libname$shrext' ;; bsdi[45]*) library_names_spec='$libname$shrext' ;; cygwin* | mingw* | pw32* | cegcc*) shrext=.dll library_names_spec='$libname.dll.a $libname.lib' ;; darwin* | rhapsody*) shrext=.dylib library_names_spec='$libname$shrext' ;; dgux*) library_names_spec='$libname$shrext' ;; freebsd1*) ;; freebsd* | dragonfly*) case "$host_os" in freebsd[123]*) library_names_spec='$libname$shrext$versuffix' ;; *) library_names_spec='$libname$shrext' ;; esac ;; gnu*) library_names_spec='$libname$shrext' ;; hpux9* | hpux10* | hpux11*) case $host_cpu in ia64*) shrext=.so ;; hppa*64*) shrext=.sl ;; *) shrext=.sl ;; esac library_names_spec='$libname$shrext' ;; interix[3-9]*) library_names_spec='$libname$shrext' ;; irix5* | irix6* | nonstopux*) library_names_spec='$libname$shrext' case "$host_os" in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= ;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 ;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 ;; *) libsuff= shlibsuff= ;; esac ;; esac ;; linux*oldld* | linux*aout* | linux*coff*) ;; linux* | k*bsd*-gnu) library_names_spec='$libname$shrext' ;; knetbsd*-gnu) library_names_spec='$libname$shrext' ;; netbsd*) library_names_spec='$libname$shrext' ;; newsos6) library_names_spec='$libname$shrext' ;; nto-qnx*) library_names_spec='$libname$shrext' ;; openbsd*) library_names_spec='$libname$shrext$versuffix' ;; os2*) libname_spec='$name' shrext=.dll library_names_spec='$libname.a' ;; osf3* | osf4* | osf5*) library_names_spec='$libname$shrext' ;; rdos*) ;; solaris*) library_names_spec='$libname$shrext' ;; sunos4*) library_names_spec='$libname$shrext$versuffix' ;; sysv4 | sysv4.3*) library_names_spec='$libname$shrext' ;; sysv4*MP*) library_names_spec='$libname$shrext' ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) library_names_spec='$libname$shrext' ;; uts4*) library_names_spec='$libname$shrext' ;; esac sed_quote_subst='s/\(["`$\\]\)/\\\1/g' escaped_wl=`echo "X$wl" | sed -e 's/^X//' -e "$sed_quote_subst"` shlibext=`echo "$shrext" | sed -e 's,^\.,,'` escaped_libname_spec=`echo "X$libname_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_library_names_spec=`echo "X$library_names_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_hardcode_libdir_flag_spec=`echo "X$hardcode_libdir_flag_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` LC_ALL=C sed -e 's/^\([a-zA-Z0-9_]*\)=/acl_cv_\1=/' <. Submit a context # diff and a properly formatted GNU ChangeLog entry. # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. # If it is invalid, we print an error message on stderr and exit with code 1. # Otherwise, we print the canonical config type on stdout and succeed. # You can get the latest version of this script from: # http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub;hb=HEAD # This file is supposed to be the same for all GNU packages # and recognize all the CPU types, system types and aliases # that are meaningful with *any* GNU software. # Each package is responsible for reporting which valid configurations # it does not support. The user should be able to distinguish # a failure to support a valid configuration from a meaningless # configuration. # The goal of this file is to map all the various variations of a given # machine specification into a single specification in the form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or in some cases, the newer four-part form: # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # It is wrong to echo any other type of specification. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] CPU-MFR-OPSYS $0 [OPTION] ALIAS Canonicalize a configuration name. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.sub ($timestamp) Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" exit 1 ;; *local*) # First pass through any local machine types. echo $1 exit ;; * ) break ;; esac done case $# in 0) echo "$me: missing argument$help" >&2 exit 1;; 1) ;; *) echo "$me: too many arguments$help" >&2 exit 1;; esac # Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). # Here we must recognize all the valid KERNEL-OS combinations. maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` case $maybe_os in nto-qnx* | linux-gnu* | linux-dietlibc | linux-newlib* | linux-uclibc* | \ uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | knetbsd*-gnu* | netbsd*-gnu* | \ kopensolaris*-gnu* | \ storm-chaos* | os2-emx* | rtmk-nova*) os=-$maybe_os basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` ;; *) basic_machine=`echo $1 | sed 's/-[^-]*$//'` if [ $basic_machine != $1 ] then os=`echo $1 | sed 's/.*-/-/'` else os=; fi ;; esac ### Let's recognize common machines as not being operating systems so ### that things like config.sub decstation-3100 work. We also ### recognize some manufacturers as not being operating systems, so we ### can provide default operating systems below. case $os in -sun*os*) # Prevent following clause from handling this invalid input. ;; -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ -apple | -axis | -knuth | -cray | -microblaze) os= basic_machine=$1 ;; -bluegene*) os=-cnk ;; -sim | -cisco | -oki | -wec | -winbond) os= basic_machine=$1 ;; -scout) ;; -wrs) os=-vxworks basic_machine=$1 ;; -chorusos*) os=-chorusos basic_machine=$1 ;; -chorusrdb) os=-chorusrdb basic_machine=$1 ;; -hiux*) os=-hiuxwe2 ;; -sco6) os=-sco5v6 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5) os=-sco3.2v5 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco4) os=-sco3.2v4 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2.[4-9]*) os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2v[4-9]*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5v6*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco*) os=-sco3.2v2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -udk*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -isc) os=-isc2.2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -clix*) basic_machine=clipper-intergraph ;; -isc*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -lynx*) os=-lynxos ;; -ptx*) basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` ;; -windowsnt*) os=`echo $os | sed -e 's/windowsnt/winnt/'` ;; -psos*) os=-psos ;; -mint | -mint[0-9]*) basic_machine=m68k-atari os=-mint ;; esac # Decode aliases for certain CPU-COMPANY combinations. case $basic_machine in # Recognize the basic CPU types without company name. # Some are omitted here because they have special meanings below. 1750a | 580 \ | a29k \ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | am33_2.0 \ | arc | arm | arm[bl]e | arme[lb] | armv[2345] | armv[345][lb] | avr | avr32 \ | bfin \ | c4x | clipper \ | d10v | d30v | dlx | dsp16xx \ | fido | fr30 | frv \ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ | i370 | i860 | i960 | ia64 \ | ip2k | iq2000 \ | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ | maxq | mb | microblaze | mcore | mep | metag \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ | mips64octeon | mips64octeonel \ | mips64orion | mips64orionel \ | mips64r5900 | mips64r5900el \ | mips64vr | mips64vrel \ | mips64vr4100 | mips64vr4100el \ | mips64vr4300 | mips64vr4300el \ | mips64vr5000 | mips64vr5000el \ | mips64vr5900 | mips64vr5900el \ | mipsisa32 | mipsisa32el \ | mipsisa32r2 | mipsisa32r2el \ | mipsisa64 | mipsisa64el \ | mipsisa64r2 | mipsisa64r2el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ | moxie \ | mt \ | msp430 \ | nios | nios2 \ | ns16k | ns32k \ | or32 \ | pdp10 | pdp11 | pj | pjl \ | powerpc | powerpc64 | powerpc64le | powerpcle | ppcbe \ | pyramid \ | rx \ | score \ | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[34]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ | sh64 | sh64le \ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ | spu | strongarm \ | tahoe | thumb | tic4x | tic80 | tron \ | ubicom32 \ | v850 | v850e \ | we32k \ | x86 | xc16x | xscale | xscalee[bl] | xstormy16 | xtensa \ | z8k | z80) basic_machine=$basic_machine-unknown ;; m6811 | m68hc11 | m6812 | m68hc12 | picochip) # Motorola 68HC11/12. basic_machine=$basic_machine-unknown os=-none ;; m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) ;; ms1) basic_machine=mt-unknown ;; # We use `pc' rather than `unknown' # because (1) that's what they normally are, and # (2) the word "unknown" tends to confuse beginning users. i*86 | x86_64) basic_machine=$basic_machine-pc ;; # Object if more than one company name word. *-*-*) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; # Recognize the basic CPU types with company name. 580-* \ | a29k-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* \ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ | avr-* | avr32-* \ | bfin-* | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* | c54x-* | c55x-* | c6x-* \ | clipper-* | craynv-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | elxsi-* \ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ | h8300-* | h8500-* \ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* | iq2000-* \ | lm32-* \ | m32c-* | m32r-* | m32rle-* \ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ | m88110-* | m88k-* | maxq-* | mcore-* | metag-* | microblaze-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ | mips64octeon-* | mips64octeonel-* \ | mips64orion-* | mips64orionel-* \ | mips64r5900-* | mips64r5900el-* \ | mips64vr-* | mips64vrel-* \ | mips64vr4100-* | mips64vr4100el-* \ | mips64vr4300-* | mips64vr4300el-* \ | mips64vr5000-* | mips64vr5000el-* \ | mips64vr5900-* | mips64vr5900el-* \ | mipsisa32-* | mipsisa32el-* \ | mipsisa32r2-* | mipsisa32r2el-* \ | mipsisa64-* | mipsisa64el-* \ | mipsisa64r2-* | mipsisa64r2el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ | mipstx39-* | mipstx39el-* \ | mmix-* \ | mt-* \ | msp430-* \ | nios-* | nios2-* \ | none-* | np1-* | ns16k-* | ns32k-* \ | orion-* \ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* | ppcbe-* \ | pyramid-* \ | romp-* | rs6000-* | rx-* \ | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ | sparclite-* \ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | strongarm-* | sv1-* | sx?-* \ | tahoe-* | thumb-* \ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ | tile-* | tilegx-* \ | tron-* \ | ubicom32-* \ | v850-* | v850e-* | vax-* \ | we32k-* \ | x86-* | x86_64-* | xc16x-* | xps100-* | xscale-* | xscalee[bl]-* \ | xstormy16-* | xtensa*-* \ | ymp-* \ | z8k-* | z80-*) ;; # Recognize the basic CPU types without company name, with glob match. xtensa*) basic_machine=$basic_machine-unknown ;; # Recognize the various machine names and aliases which stand # for a CPU type and a company and sometimes even an OS. 386bsd) basic_machine=i386-unknown os=-bsd ;; 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) basic_machine=m68000-att ;; 3b*) basic_machine=we32k-att ;; a29khif) basic_machine=a29k-amd os=-udi ;; abacus) basic_machine=abacus-unknown ;; adobe68k) basic_machine=m68010-adobe os=-scout ;; alliant | fx80) basic_machine=fx80-alliant ;; altos | altos3068) basic_machine=m68k-altos ;; am29k) basic_machine=a29k-none os=-bsd ;; amd64) basic_machine=x86_64-pc ;; amd64-*) basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; amdahl) basic_machine=580-amdahl os=-sysv ;; amiga | amiga-*) basic_machine=m68k-unknown ;; amigaos | amigados) basic_machine=m68k-unknown os=-amigaos ;; amigaunix | amix) basic_machine=m68k-unknown os=-sysv4 ;; apollo68) basic_machine=m68k-apollo os=-sysv ;; apollo68bsd) basic_machine=m68k-apollo os=-bsd ;; aros) basic_machine=i386-pc os=-aros ;; aux) basic_machine=m68k-apple os=-aux ;; balance) basic_machine=ns32k-sequent os=-dynix ;; blackfin) basic_machine=bfin-unknown os=-linux ;; blackfin-*) basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; bluegene*) basic_machine=powerpc-ibm os=-cnk ;; c90) basic_machine=c90-cray os=-unicos ;; cegcc) basic_machine=arm-unknown os=-cegcc ;; convex-c1) basic_machine=c1-convex os=-bsd ;; convex-c2) basic_machine=c2-convex os=-bsd ;; convex-c32) basic_machine=c32-convex os=-bsd ;; convex-c34) basic_machine=c34-convex os=-bsd ;; convex-c38) basic_machine=c38-convex os=-bsd ;; cray | j90) basic_machine=j90-cray os=-unicos ;; craynv) basic_machine=craynv-cray os=-unicosmp ;; cr16) basic_machine=cr16-unknown os=-elf ;; crds | unos) basic_machine=m68k-crds ;; crisv32 | crisv32-* | etraxfs*) basic_machine=crisv32-axis ;; cris | cris-* | etrax*) basic_machine=cris-axis ;; crx) basic_machine=crx-unknown os=-elf ;; da30 | da30-*) basic_machine=m68k-da30 ;; decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) basic_machine=mips-dec ;; decsystem10* | dec10*) basic_machine=pdp10-dec os=-tops10 ;; decsystem20* | dec20*) basic_machine=pdp10-dec os=-tops20 ;; delta | 3300 | motorola-3300 | motorola-delta \ | 3300-motorola | delta-motorola) basic_machine=m68k-motorola ;; delta88) basic_machine=m88k-motorola os=-sysv3 ;; dicos) basic_machine=i686-pc os=-dicos ;; djgpp) basic_machine=i586-pc os=-msdosdjgpp ;; dpx20 | dpx20-*) basic_machine=rs6000-bull os=-bosx ;; dpx2* | dpx2*-bull) basic_machine=m68k-bull os=-sysv3 ;; ebmon29k) basic_machine=a29k-amd os=-ebmon ;; elxsi) basic_machine=elxsi-elxsi os=-bsd ;; encore | umax | mmax) basic_machine=ns32k-encore ;; es1800 | OSE68k | ose68k | ose | OSE) basic_machine=m68k-ericsson os=-ose ;; fx2800) basic_machine=i860-alliant ;; genix) basic_machine=ns32k-ns ;; gmicro) basic_machine=tron-gmicro os=-sysv ;; go32) basic_machine=i386-pc os=-go32 ;; h3050r* | hiux*) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; h8300hms) basic_machine=h8300-hitachi os=-hms ;; h8300xray) basic_machine=h8300-hitachi os=-xray ;; h8500hms) basic_machine=h8500-hitachi os=-hms ;; harris) basic_machine=m88k-harris os=-sysv3 ;; hp300-*) basic_machine=m68k-hp ;; hp300bsd) basic_machine=m68k-hp os=-bsd ;; hp300hpux) basic_machine=m68k-hp os=-hpux ;; hp3k9[0-9][0-9] | hp9[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k2[0-9][0-9] | hp9k31[0-9]) basic_machine=m68000-hp ;; hp9k3[2-9][0-9]) basic_machine=m68k-hp ;; hp9k6[0-9][0-9] | hp6[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k7[0-79][0-9] | hp7[0-79][0-9]) basic_machine=hppa1.1-hp ;; hp9k78[0-9] | hp78[0-9]) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[0-9][13679] | hp8[0-9][13679]) basic_machine=hppa1.1-hp ;; hp9k8[0-9][0-9] | hp8[0-9][0-9]) basic_machine=hppa1.0-hp ;; hppa-next) os=-nextstep3 ;; hppaosf) basic_machine=hppa1.1-hp os=-osf ;; hppro) basic_machine=hppa1.1-hp os=-proelf ;; i370-ibm* | ibm*) basic_machine=i370-ibm ;; # I'm not sure what "Sysv32" means. Should this be sysv3.2? i*86v32) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv32 ;; i*86v4*) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv4 ;; i*86v) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv ;; i*86sol2) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-solaris2 ;; i386mach) basic_machine=i386-mach os=-mach ;; i386-vsta | vsta) basic_machine=i386-unknown os=-vsta ;; iris | iris4d) basic_machine=mips-sgi case $os in -irix*) ;; *) os=-irix4 ;; esac ;; isi68 | isi) basic_machine=m68k-isi os=-sysv ;; m68knommu) basic_machine=m68k-unknown os=-linux ;; m68knommu-*) basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; m88k-omron*) basic_machine=m88k-omron ;; magnum | m3230) basic_machine=mips-mips os=-sysv ;; merlin) basic_machine=ns32k-utek os=-sysv ;; microblaze) basic_machine=microblaze-xilinx ;; mingw32) basic_machine=i386-pc os=-mingw32 ;; mingw32ce) basic_machine=arm-unknown os=-mingw32ce ;; miniframe) basic_machine=m68000-convergent ;; *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) basic_machine=m68k-atari os=-mint ;; mips3*-*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` ;; mips3*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown ;; monitor) basic_machine=m68k-rom68k os=-coff ;; morphos) basic_machine=powerpc-unknown os=-morphos ;; msdos) basic_machine=i386-pc os=-msdos ;; ms1-*) basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ;; mvs) basic_machine=i370-ibm os=-mvs ;; ncr3000) basic_machine=i486-ncr os=-sysv4 ;; netbsd386) basic_machine=i386-unknown os=-netbsd ;; netwinder) basic_machine=armv4l-rebel os=-linux ;; news | news700 | news800 | news900) basic_machine=m68k-sony os=-newsos ;; news1000) basic_machine=m68030-sony os=-newsos ;; news-3600 | risc-news) basic_machine=mips-sony os=-newsos ;; necv70) basic_machine=v70-nec os=-sysv ;; next | m*-next ) basic_machine=m68k-next case $os in -nextstep* ) ;; -ns2*) os=-nextstep2 ;; *) os=-nextstep3 ;; esac ;; nh3000) basic_machine=m68k-harris os=-cxux ;; nh[45]000) basic_machine=m88k-harris os=-cxux ;; nindy960) basic_machine=i960-intel os=-nindy ;; mon960) basic_machine=i960-intel os=-mon960 ;; nonstopux) basic_machine=mips-compaq os=-nonstopux ;; np1) basic_machine=np1-gould ;; nsr-tandem) basic_machine=nsr-tandem ;; op50n-* | op60c-*) basic_machine=hppa1.1-oki os=-proelf ;; openrisc | openrisc-*) basic_machine=or32-unknown ;; os400) basic_machine=powerpc-ibm os=-os400 ;; OSE68000 | ose68000) basic_machine=m68000-ericsson os=-ose ;; os68k) basic_machine=m68k-none os=-os68k ;; pa-hitachi) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; paragon) basic_machine=i860-intel os=-osf ;; parisc) basic_machine=hppa-unknown os=-linux ;; parisc-*) basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; pbd) basic_machine=sparc-tti ;; pbb) basic_machine=m68k-tti ;; pc532 | pc532-*) basic_machine=ns32k-pc532 ;; pc98) basic_machine=i386-pc ;; pc98-*) basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium | p5 | k5 | k6 | nexgen | viac3) basic_machine=i586-pc ;; pentiumpro | p6 | 6x86 | athlon | athlon_*) basic_machine=i686-pc ;; pentiumii | pentium2 | pentiumiii | pentium3) basic_machine=i686-pc ;; pentium4) basic_machine=i786-pc ;; pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumpro-* | p6-* | 6x86-* | athlon-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium4-*) basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pn) basic_machine=pn-gould ;; power) basic_machine=power-ibm ;; ppc) basic_machine=powerpc-unknown ;; ppc-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppcle | powerpclittle | ppc-le | powerpc-little) basic_machine=powerpcle-unknown ;; ppcle-* | powerpclittle-*) basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64) basic_machine=powerpc64-unknown ;; ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64le | powerpc64little | ppc64-le | powerpc64-little) basic_machine=powerpc64le-unknown ;; ppc64le-* | powerpc64little-*) basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ps2) basic_machine=i386-ibm ;; pw32) basic_machine=i586-unknown os=-pw32 ;; rdos) basic_machine=i386-pc os=-rdos ;; rom68k) basic_machine=m68k-rom68k os=-coff ;; rm[46]00) basic_machine=mips-siemens ;; rtpc | rtpc-*) basic_machine=romp-ibm ;; s390 | s390-*) basic_machine=s390-ibm ;; s390x | s390x-*) basic_machine=s390x-ibm ;; sa29200) basic_machine=a29k-amd os=-udi ;; sb1) basic_machine=mipsisa64sb1-unknown ;; sb1el) basic_machine=mipsisa64sb1el-unknown ;; sde) basic_machine=mipsisa32-sde os=-elf ;; sei) basic_machine=mips-sei os=-seiux ;; sequent) basic_machine=i386-sequent ;; sh) basic_machine=sh-hitachi os=-hms ;; sh5el) basic_machine=sh5le-unknown ;; sh64) basic_machine=sh64-unknown ;; sparclite-wrs | simso-wrs) basic_machine=sparclite-wrs os=-vxworks ;; sps7) basic_machine=m68k-bull os=-sysv2 ;; spur) basic_machine=spur-unknown ;; st2000) basic_machine=m68k-tandem ;; stratus) basic_machine=i860-stratus os=-sysv4 ;; sun2) basic_machine=m68000-sun ;; sun2os3) basic_machine=m68000-sun os=-sunos3 ;; sun2os4) basic_machine=m68000-sun os=-sunos4 ;; sun3os3) basic_machine=m68k-sun os=-sunos3 ;; sun3os4) basic_machine=m68k-sun os=-sunos4 ;; sun4os3) basic_machine=sparc-sun os=-sunos3 ;; sun4os4) basic_machine=sparc-sun os=-sunos4 ;; sun4sol2) basic_machine=sparc-sun os=-solaris2 ;; sun3 | sun3-*) basic_machine=m68k-sun ;; sun4) basic_machine=sparc-sun ;; sun386 | sun386i | roadrunner) basic_machine=i386-sun ;; sv1) basic_machine=sv1-cray os=-unicos ;; symmetry) basic_machine=i386-sequent os=-dynix ;; t3e) basic_machine=alphaev5-cray os=-unicos ;; t90) basic_machine=t90-cray os=-unicos ;; tic54x | c54x*) basic_machine=tic54x-unknown os=-coff ;; tic55x | c55x*) basic_machine=tic55x-unknown os=-coff ;; tic6x | c6x*) basic_machine=tic6x-unknown os=-coff ;; # This must be matched before tile*. tilegx*) basic_machine=tilegx-unknown os=-linux-gnu ;; tile*) basic_machine=tile-unknown os=-linux-gnu ;; tx39) basic_machine=mipstx39-unknown ;; tx39el) basic_machine=mipstx39el-unknown ;; toad1) basic_machine=pdp10-xkl os=-tops20 ;; tower | tower-32) basic_machine=m68k-ncr ;; tpf) basic_machine=s390x-ibm os=-tpf ;; udi29k) basic_machine=a29k-amd os=-udi ;; ultra3) basic_machine=a29k-nyu os=-sym1 ;; v810 | necv810) basic_machine=v810-nec os=-none ;; vaxv) basic_machine=vax-dec os=-sysv ;; vms) basic_machine=vax-dec os=-vms ;; vpp*|vx|vx-*) basic_machine=f301-fujitsu ;; vxworks960) basic_machine=i960-wrs os=-vxworks ;; vxworks68) basic_machine=m68k-wrs os=-vxworks ;; vxworks29k) basic_machine=a29k-wrs os=-vxworks ;; w65*) basic_machine=w65-wdc os=-none ;; w89k-*) basic_machine=hppa1.1-winbond os=-proelf ;; xbox) basic_machine=i686-pc os=-mingw32 ;; xps | xps100) basic_machine=xps100-honeywell ;; ymp) basic_machine=ymp-cray os=-unicos ;; z8k-*-coff) basic_machine=z8k-unknown os=-sim ;; z80-*-coff) basic_machine=z80-unknown os=-sim ;; none) basic_machine=none-none os=-none ;; # Here we handle the default manufacturer of certain CPU types. It is in # some cases the only manufacturer, in others, it is the most popular. w89k) basic_machine=hppa1.1-winbond ;; op50n) basic_machine=hppa1.1-oki ;; op60c) basic_machine=hppa1.1-oki ;; romp) basic_machine=romp-ibm ;; mmix) basic_machine=mmix-knuth ;; rs6000) basic_machine=rs6000-ibm ;; vax) basic_machine=vax-dec ;; pdp10) # there are many clones, so DEC is not a safe bet basic_machine=pdp10-unknown ;; pdp11) basic_machine=pdp11-dec ;; we32k) basic_machine=we32k-att ;; sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) basic_machine=sh-unknown ;; sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) basic_machine=sparc-sun ;; cydra) basic_machine=cydra-cydrome ;; orion) basic_machine=orion-highlevel ;; orion105) basic_machine=clipper-highlevel ;; mac | mpw | mac-mpw) basic_machine=m68k-apple ;; pmac | pmac-mpw) basic_machine=powerpc-apple ;; *-unknown) # Make sure to match an already-canonicalized machine name. ;; *) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; esac # Here we canonicalize certain aliases for manufacturers. case $basic_machine in *-digital*) basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` ;; *-commodore*) basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` ;; *) ;; esac # Decode manufacturer-specific aliases for certain operating systems. if [ x"$os" != x"" ] then case $os in # First match some system type aliases # that might get confused with valid system types. # -solaris* is a basic system type, with this one exception. -auroraux) os=-auroraux ;; -solaris1 | -solaris1.*) os=`echo $os | sed -e 's|solaris1|sunos4|'` ;; -solaris) os=-solaris2 ;; -svr4*) os=-sysv4 ;; -unixware*) os=-sysv4.2uw ;; -gnu/linux*) os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` ;; # First accept the basic system types. # The portable systems comes first. # Each alternative MUST END IN A *, to match a version number. # -sysv* is not here because it comes later, after sysvr4. -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ | -sym* | -kopensolaris* \ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ | -aos* | -aros* \ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ | -openbsd* | -solidbsd* \ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ | -chorusos* | -chorusrdb* | -cegcc* \ | -cygwin* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ | -mingw32* | -linux-gnu* | -linux-newlib* | -linux-uclibc* \ | -uxpv* | -beos* | -mpeix* | -udk* \ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es*) # Remember, each alternative MUST END IN *, to match a version number. ;; -qnx*) case $basic_machine in x86-* | i*86-*) ;; *) os=-nto$os ;; esac ;; -nto-qnx*) ;; -nto*) os=`echo $os | sed -e 's|nto|nto-qnx|'` ;; -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) ;; -mac*) os=`echo $os | sed -e 's|mac|macos|'` ;; -linux-dietlibc) os=-linux-dietlibc ;; -linux*) os=`echo $os | sed -e 's|linux|linux-gnu|'` ;; -sunos5*) os=`echo $os | sed -e 's|sunos5|solaris2|'` ;; -sunos6*) os=`echo $os | sed -e 's|sunos6|solaris3|'` ;; -opened*) os=-openedition ;; -os400*) os=-os400 ;; -wince*) os=-wince ;; -osfrose*) os=-osfrose ;; -osf*) os=-osf ;; -utek*) os=-bsd ;; -dynix*) os=-bsd ;; -acis*) os=-aos ;; -atheos*) os=-atheos ;; -syllable*) os=-syllable ;; -386bsd) os=-bsd ;; -ctix* | -uts*) os=-sysv ;; -nova*) os=-rtmk-nova ;; -ns2 ) os=-nextstep2 ;; -nsk*) os=-nsk ;; # Preserve the version number of sinix5. -sinix5.*) os=`echo $os | sed -e 's|sinix|sysv|'` ;; -sinix*) os=-sysv4 ;; -tpf*) os=-tpf ;; -triton*) os=-sysv3 ;; -oss*) os=-sysv3 ;; -svr4) os=-sysv4 ;; -svr3) os=-sysv3 ;; -sysvr4) os=-sysv4 ;; # This must come after -sysvr4. -sysv*) ;; -ose*) os=-ose ;; -es1800*) os=-ose ;; -xenix) os=-xenix ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) os=-mint ;; -aros*) os=-aros ;; -kaos*) os=-kaos ;; -zvmoe) os=-zvmoe ;; -dicos*) os=-dicos ;; -nacl*) ;; -none) ;; *) # Get rid of the `-' at the beginning of $os. os=`echo $os | sed 's/[^-]*-//'` echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 exit 1 ;; esac else # Here we handle the default operating systems that come with various machines. # The value should be what the vendor currently ships out the door with their # machine or put another way, the most popular os provided with the machine. # Note that if you're going to try to match "-MANUFACTURER" here (say, # "-sun"), then you have to tell the case statement up towards the top # that MANUFACTURER isn't an operating system. Otherwise, code above # will signal an error saying that MANUFACTURER isn't an operating # system, and we'll never get to this point. case $basic_machine in score-*) os=-elf ;; spu-*) os=-elf ;; *-acorn) os=-riscix1.2 ;; arm*-rebel) os=-linux ;; arm*-semi) os=-aout ;; c4x-* | tic4x-*) os=-coff ;; # This must come before the *-dec entry. pdp10-*) os=-tops20 ;; pdp11-*) os=-none ;; *-dec | vax-*) os=-ultrix4.2 ;; m68*-apollo) os=-domain ;; i386-sun) os=-sunos4.0.2 ;; m68000-sun) os=-sunos3 # This also exists in the configure program, but was not the # default. # os=-sunos4 ;; m68*-cisco) os=-aout ;; mep-*) os=-elf ;; mips*-cisco) os=-elf ;; mips*-*) os=-elf ;; or32-*) os=-coff ;; *-tti) # must be before sparc entry or we get the wrong os. os=-sysv3 ;; sparc-* | *-sun) os=-sunos4.1.1 ;; *-be) os=-beos ;; *-haiku) os=-haiku ;; *-ibm) os=-aix ;; *-knuth) os=-mmixware ;; *-wec) os=-proelf ;; *-winbond) os=-proelf ;; *-oki) os=-proelf ;; *-hp) os=-hpux ;; *-hitachi) os=-hiux ;; i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) os=-sysv ;; *-cbm) os=-amigaos ;; *-dg) os=-dgux ;; *-dolphin) os=-sysv3 ;; m68k-ccur) os=-rtu ;; m88k-omron*) os=-luna ;; *-next ) os=-nextstep ;; *-sequent) os=-ptx ;; *-crds) os=-unos ;; *-ns) os=-genix ;; i370-*) os=-mvs ;; *-next) os=-nextstep3 ;; *-gould) os=-sysv ;; *-highlevel) os=-bsd ;; *-encore) os=-bsd ;; *-sgi) os=-irix ;; *-siemens) os=-sysv4 ;; *-masscomp) os=-rtu ;; f30[01]-fujitsu | f700-fujitsu) os=-uxpv ;; *-rom68k) os=-coff ;; *-*bug) os=-coff ;; *-apple) os=-macos ;; *-atari*) os=-mint ;; *) os=-none ;; esac fi # Here we handle the case where we know the os, and the CPU type, but not the # manufacturer. We pick the logical manufacturer. vendor=unknown case $basic_machine in *-unknown) case $os in -riscix*) vendor=acorn ;; -sunos*) vendor=sun ;; -cnk*|-aix*) vendor=ibm ;; -beos*) vendor=be ;; -hpux*) vendor=hp ;; -mpeix*) vendor=hp ;; -hiux*) vendor=hitachi ;; -unos*) vendor=crds ;; -dgux*) vendor=dg ;; -luna*) vendor=omron ;; -genix*) vendor=ns ;; -mvs* | -opened*) vendor=ibm ;; -os400*) vendor=ibm ;; -ptx*) vendor=sequent ;; -tpf*) vendor=ibm ;; -vxsim* | -vxworks* | -windiss*) vendor=wrs ;; -aux*) vendor=apple ;; -hms*) vendor=hitachi ;; -mpw* | -macos*) vendor=apple ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) vendor=atari ;; -vos*) vendor=stratus ;; esac basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` ;; esac echo $basic_machine$os exit # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: dar-2.4.8/Makefile.in0000644000175000017520000005550412023053152011265 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = . DIST_COMMON = README $(am__configure_deps) $(dist_noinst_DATA) \ $(srcdir)/Makefile.am $(srcdir)/Makefile.in \ $(srcdir)/config.h.in $(top_srcdir)/configure ABOUT-NLS \ AUTHORS COPYING ChangeLog INSTALL NEWS THANKS TODO \ config.guess config.rpath config.sub depcomp install-sh \ ltmain.sh missing ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) am__CONFIG_DISTCLEAN_FILES = config.status config.cache config.log \ configure.lineno config.status.lineno mkinstalldirs = $(install_sh) -d CONFIG_HEADER = config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ html-recursive info-recursive install-data-recursive \ install-dvi-recursive install-exec-recursive \ install-html-recursive install-info-recursive \ install-pdf-recursive install-ps-recursive install-recursive \ installcheck-recursive installdirs-recursive pdf-recursive \ ps-recursive uninstall-recursive DATA = $(dist_noinst_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ distdir dist dist-all distcheck ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) distdir = $(PACKAGE)-$(VERSION) top_distdir = $(distdir) am__remove_distdir = \ { test ! -d "$(distdir)" \ || { find "$(distdir)" -type d ! -perm -200 -exec chmod u+w {} ';' \ && rm -fr "$(distdir)"; }; } am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" DIST_ARCHIVES = $(distdir).tar.gz GZIP_ENV = --best distuninstallcheck_listfiles = find . -type f -print distcleancheck_listfiles = find . -type f -print ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ SUBDIRS = man src doc misc po dist_noinst_DATA = INSTALL README THANKS TODO AUTHORS COPYING ChangeLog NEWS ABOUT-NLS ACLOCAL_AMFLAGS = -I m4 EXTRA_DIST = config.rpath m4/ChangeLog all: config.h $(MAKE) $(AM_MAKEFLAGS) all-recursive .SUFFIXES: am--refresh: @: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ echo ' cd $(srcdir) && $(AUTOMAKE) --gnu'; \ $(am__cd) $(srcdir) && $(AUTOMAKE) --gnu \ && exit 0; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ echo ' $(SHELL) ./config.status'; \ $(SHELL) ./config.status;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) $(SHELL) ./config.status --recheck $(top_srcdir)/configure: $(am__configure_deps) $(am__cd) $(srcdir) && $(AUTOCONF) $(ACLOCAL_M4): $(am__aclocal_m4_deps) $(am__cd) $(srcdir) && $(ACLOCAL) $(ACLOCAL_AMFLAGS) $(am__aclocal_m4_deps): config.h: stamp-h1 @if test ! -f $@; then \ rm -f stamp-h1; \ $(MAKE) $(AM_MAKEFLAGS) stamp-h1; \ else :; fi stamp-h1: $(srcdir)/config.h.in $(top_builddir)/config.status @rm -f stamp-h1 cd $(top_builddir) && $(SHELL) ./config.status config.h $(srcdir)/config.h.in: $(am__configure_deps) ($(am__cd) $(top_srcdir) && $(AUTOHEADER)) rm -f stamp-h1 touch $@ distclean-hdr: -rm -f config.h stamp-h1 mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs distclean-libtool: -rm -f libtool config.lt # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. # To change the values of `make' variables: instead of editing Makefiles, # (1) if the variable is set in `config.status', edit `config.status' # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ list='$(SUBDIRS)'; for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" $(RECURSIVE_CLEAN_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ rev=''; for subdir in $$list; do \ if test "$$subdir" = "."; then :; else \ rev="$$subdir $$rev"; \ fi; \ done; \ rev="$$rev ."; \ target=`echo $@ | sed s/-recursive//`; \ for subdir in $$rev; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) config.h.in $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) config.h.in $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) $(am__remove_distdir) test -d "$(distdir)" || mkdir "$(distdir)" @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done -test -n "$(am__skip_mode_fix)" \ || find "$(distdir)" -type d ! -perm -755 \ -exec chmod u+rwx,go+rx {} \; -o \ ! -type d ! -perm -444 -links 1 -exec chmod a+r {} \; -o \ ! -type d ! -perm -400 -exec chmod a+r {} \; -o \ ! -type d ! -perm -444 -exec $(install_sh) -c -m a+r {} {} \; \ || chmod -R a+r "$(distdir)" dist-gzip: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) dist-bzip2: distdir tardir=$(distdir) && $(am__tar) | bzip2 -9 -c >$(distdir).tar.bz2 $(am__remove_distdir) dist-lzma: distdir tardir=$(distdir) && $(am__tar) | lzma -9 -c >$(distdir).tar.lzma $(am__remove_distdir) dist-xz: distdir tardir=$(distdir) && $(am__tar) | xz -c >$(distdir).tar.xz $(am__remove_distdir) dist-tarZ: distdir tardir=$(distdir) && $(am__tar) | compress -c >$(distdir).tar.Z $(am__remove_distdir) dist-shar: distdir shar $(distdir) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).shar.gz $(am__remove_distdir) dist-zip: distdir -rm -f $(distdir).zip zip -rq $(distdir).zip $(distdir) $(am__remove_distdir) dist dist-all: distdir tardir=$(distdir) && $(am__tar) | GZIP=$(GZIP_ENV) gzip -c >$(distdir).tar.gz $(am__remove_distdir) # This target untars the dist file and tries a VPATH configuration. Then # it guarantees that the distribution is self-contained by making another # tarfile. distcheck: dist case '$(DIST_ARCHIVES)' in \ *.tar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).tar.gz | $(am__untar) ;;\ *.tar.bz2*) \ bzip2 -dc $(distdir).tar.bz2 | $(am__untar) ;;\ *.tar.lzma*) \ lzma -dc $(distdir).tar.lzma | $(am__untar) ;;\ *.tar.xz*) \ xz -dc $(distdir).tar.xz | $(am__untar) ;;\ *.tar.Z*) \ uncompress -c $(distdir).tar.Z | $(am__untar) ;;\ *.shar.gz*) \ GZIP=$(GZIP_ENV) gzip -dc $(distdir).shar.gz | unshar ;;\ *.zip*) \ unzip $(distdir).zip ;;\ esac chmod -R a-w $(distdir); chmod a+w $(distdir) mkdir $(distdir)/_build mkdir $(distdir)/_inst chmod a-w $(distdir) test -d $(distdir)/_build || exit 0; \ dc_install_base=`$(am__cd) $(distdir)/_inst && pwd | sed -e 's,^[^:\\/]:[\\/],/,'` \ && dc_destdir="$${TMPDIR-/tmp}/am-dc-$$$$/" \ && am__cwd=`pwd` \ && $(am__cd) $(distdir)/_build \ && ../configure --srcdir=.. --prefix="$$dc_install_base" \ $(DISTCHECK_CONFIGURE_FLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) \ && $(MAKE) $(AM_MAKEFLAGS) dvi \ && $(MAKE) $(AM_MAKEFLAGS) check \ && $(MAKE) $(AM_MAKEFLAGS) install \ && $(MAKE) $(AM_MAKEFLAGS) installcheck \ && $(MAKE) $(AM_MAKEFLAGS) uninstall \ && $(MAKE) $(AM_MAKEFLAGS) distuninstallcheck_dir="$$dc_install_base" \ distuninstallcheck \ && chmod -R a-w "$$dc_install_base" \ && ({ \ (cd ../.. && umask 077 && mkdir "$$dc_destdir") \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" install \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" uninstall \ && $(MAKE) $(AM_MAKEFLAGS) DESTDIR="$$dc_destdir" \ distuninstallcheck_dir="$$dc_destdir" distuninstallcheck; \ } || { rm -rf "$$dc_destdir"; exit 1; }) \ && rm -rf "$$dc_destdir" \ && $(MAKE) $(AM_MAKEFLAGS) dist \ && rm -rf $(DIST_ARCHIVES) \ && $(MAKE) $(AM_MAKEFLAGS) distcleancheck \ && cd "$$am__cwd" \ || exit 1 $(am__remove_distdir) @(echo "$(distdir) archives ready for distribution: "; \ list='$(DIST_ARCHIVES)'; for i in $$list; do echo $$i; done) | \ sed -e 1h -e 1s/./=/g -e 1p -e 1x -e '$$p' -e '$$x' distuninstallcheck: @$(am__cd) '$(distuninstallcheck_dir)' \ && test `$(distuninstallcheck_listfiles) | wc -l` -le 1 \ || { echo "ERROR: files left after uninstall:" ; \ if test -n "$(DESTDIR)"; then \ echo " (check DESTDIR support)"; \ fi ; \ $(distuninstallcheck_listfiles) ; \ exit 1; } >&2 distcleancheck: distclean @if test '$(srcdir)' = . ; then \ echo "ERROR: distcleancheck can only run from a VPATH build" ; \ exit 1 ; \ fi @test `$(distcleancheck_listfiles) | wc -l` -eq 0 \ || { echo "ERROR: files left in build directory after distclean:" ; \ $(distcleancheck_listfiles) ; \ exit 1; } >&2 check-am: all-am check: check-recursive all-am: Makefile $(DATA) config.h installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -f Makefile distclean-am: clean-am distclean-generic distclean-hdr \ distclean-libtool distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f $(am__CONFIG_DISTCLEAN_FILES) -rm -rf $(top_srcdir)/autom4te.cache -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) all \ ctags-recursive install-am install-strip tags-recursive .PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ all all-am am--refresh check check-am clean clean-generic \ clean-libtool ctags ctags-recursive dist dist-all dist-bzip2 \ dist-gzip dist-lzma dist-shar dist-tarZ dist-xz dist-zip \ distcheck distclean distclean-generic distclean-hdr \ distclean-libtool distclean-tags distcleancheck distdir \ distuninstallcheck dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ installdirs-am maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags tags-recursive uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/configure.ac0000444000175000017520000010217412023053123011476 00000000000000# Process this file with autoconf to produce a configure script. AC_PREREQ(2.59) AC_INIT([DAR], [2.4.8], [[http://sourceforge.net/tracker/?group_id=65612&atid=511612]]) AC_CONFIG_HEADERS([config.h]) AC_LANG([C++]) AC_CONFIG_SRCDIR([src/libdar/catalogue.cpp]) AC_DEFINE_UNQUOTED(DAR_VERSION, "AC_PACKAGE_VERSION", [dar and dar_suite version, definition in configure.ac]) AM_INIT_AUTOMAKE AM_GNU_GETTEXT([external]) AM_GNU_GETTEXT_VERSION XGETTEXT_EXTRA_OPTIONS='--keyword=dar_gettext' AM_ICONV # Checks for programs. AC_PROG_CXX AC_PROG_CC AC_PROG_LIBTOOL AC_PROG_MAKE_SET AC_PROG_RANLIB AC_MSG_CHECKING([for C++ compiler usability]) AC_COMPILE_IFELSE(AC_LANG_PROGRAM([], [ class test { public: int test; }; ]), [AC_MSG_RESULT(ok)], [AC_MSG_ERROR([No C++ compiler found])]) # Checks for libraries. AC_CHECK_LIB(socket, [socket], [], []) AC_CHECK_LIB(nsl, [endnetconfig], [], []) AC_ARG_ENABLE( [libdl-linking], AC_HELP_STRING(--disable-libdl-linking, [ignore any libdl and avoid linking against it]), [], [ AC_CHECK_LIB(dl, [dlsym], [], []) ]) # Checks for header files. AC_HEADER_DIRENT AC_HEADER_STDC AC_HEADER_SYS_WAIT AC_CHECK_HEADERS([fcntl.h netinet/in.h arpa/inet.h stdint.h stdlib.h string.h sys/ioctl.h sys/socket.h termios.h unistd.h utime.h sys/types.h signal.h errno.h sys/un.h sys/stat.h time.h fnmatch.h regex.h pwd.h grp.h stdio.h pthread.h ctype.h getopt.h limits.h stddef.h sys/utsname.h]) AC_SYS_LARGEFILE # Checks for typedefs, structures, and compiler characteristics. AC_C_CONST AC_C_INLINE AC_TYPE_OFF_T AC_TYPE_PID_T AC_TYPE_SIZE_T AC_CHECK_MEMBERS([struct stat.st_rdev]) AC_DECL_SYS_SIGLIST AC_CHECK_TYPE(size_t, [AC_CHECK_SIZEOF(size_t)], [AC_MSG_ERROR([Cannot find size_t type])], []) AC_CHECK_TYPE(time_t, [AC_CHECK_SIZEOF(time_t)], [AC_MSG_ERROR([Cannot find time_t type])], []) AC_CHECK_TYPE(off_t, [AC_CHECK_SIZEOF(off_t)], [AC_MSG_ERROR([Cannot find off_t type])], []) # Checks for library functions. AC_FUNC_FNMATCH AC_FUNC_FORK AC_PROG_GCC_TRADITIONAL AC_FUNC_LSTAT AC_HEADER_MAJOR AC_FUNC_MALLOC AC_TYPE_SIGNAL AC_FUNC_STAT AC_FUNC_UTIME_NULL AC_HEADER_TIME AC_CHECK_FUNCS([lchown mkdir regcomp rmdir strerror utime fdopendir]) AC_MSG_CHECKING([for getopt() in ]) AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_UNISTD_H #include #endif }]], [ getopt(0, 0, 0); ]), [ AC_DEFINE(HAVE_GETOPT_IN_UNISTD_H, 1, [a getopt() call is declared in ]) AC_MSG_RESULT(present) ], [AC_MSG_RESULT(absent)]) AC_MSG_CHECKING([for getopt_long() in ]) AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_UNISTD_H #include #endif }]], [ getopt_long(0, 0, 0, 0, 0); ]), [ AC_DEFINE(HAVE_GETOPT_LONG_IN_UNISTD_H, 1, [a getopt_long() call is declared in ]) AC_MSG_RESULT(present) ], [AC_MSG_RESULT(absent)]) AC_MSG_CHECKING([for optreset presence]) AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif }]], [ int x = optreset; return 0; ]), [ AC_DEFINE(HAVE_OPTRESET, 1, [the optreset external variable exists to reset getopt standard call]) AC_MSG_RESULT(available) ], [AC_MSG_RESULT([not available])]) AC_MSG_CHECKING([for Door file support]) AC_COMPILE_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_SYS_STAT_H #include #endif #if HAVE_UNISTD_H #include #endif }]], [ struct stat buf; if(S_ISDOOR(buf.st_mode)) return 0; else return 1; ]), [ AC_DEFINE(HAVE_DOOR, 1, [whether the system has the necessary routine to handle Door files]) AC_MSG_RESULT(available) ], [AC_MSG_RESULT([not available])]) # DAR's features AC_ARG_ENABLE( [libz-linking], AC_HELP_STRING(--disable-libz-linking, [disable linking with libz and disable libz compression support]), [ AC_MSG_WARN([libz compression support has been disabled by user]) local_libz="no" ], [ AC_CHECK_LIB(z, [deflate], [], [AC_MSG_WARN([library zlib not found])]) AC_CHECK_HEADER(zlib.h, [local_libz="yes" AC_DEFINE(HAVE_ZLIB_H, 1, [zlib.h header file is available]) ], [AC_MSG_WARN([Cannot find zlib.h header file]) local_libz="no" ]) if test "$local_libz" = "yes" ; then AC_LINK_IFELSE(AC_LANG_PROGRAM([[ extern "C" { #if HAVE_ZLIB_H #include #endif }]], [[ z_stream *ptr = (z_stream *)0; deflate(ptr, 0); ]]), [ AC_DEFINE(LIBZ_AVAILABLE, 1, [header and linking is available to have libz functions])], [ local_libz="no" ]) else AC_MSG_WARN([libz compression support not available]) fi ] ) AC_ARG_ENABLE( [libbz2-linking], AC_HELP_STRING(--disable-libbz2-linking, [disable linking with libbz2 and disables libbz2 compression support]), [ AC_MSG_WARN([libbz2 compression support has been disabled by user]) local_libbz2="no" ], [ AC_CHECK_LIB(bz2, [BZ2_bzCompress], [], [AC_MSG_WARN([library libbz2 not found])]) AC_CHECK_HEADER(bzlib.h, [local_libbz2="yes" AC_DEFINE(HAVE_BZLIB_H, 1, [bzlib.h header file is available]) ], [AC_MSG_WARN([Cannot find bzlib.h header file]) local_libbz2="no" ]) if test "$local_libbz2" = "yes" ; then AC_LINK_IFELSE(AC_LANG_PROGRAM([[ extern "C" { #if HAVE_BZLIB_H #include #endif }]], [[ bz_stream *ptr = (bz_stream *)0; BZ2_bzCompress(ptr, 0); ]]), [ AC_DEFINE(LIBBZ2_AVAILABLE, 1, [header and linking is available to have libbz2 functions])], [ local_libbz2="no" ]) else AC_MSG_WARN([libbz2 compression support not available]) fi ]) AC_ARG_ENABLE( [liblzo2-linking], AC_HELP_STRING(--disable-liblzo2-linking, [disable linking with liblzo2 and disables lzo compression support]), [ AC_MSG_WARN([lzo compression support has been disabled by user]) local_liblzo2="no" ], [ AC_CHECK_LIB(lzo2, [lzo1x_1_compress], [], [AC_MSG_WARN([library liblzo2 not found])]) AC_CHECK_HEADER(lzo/lzo1x.h, [local_liblzo2="yes" AC_DEFINE(HAVE_LZO_LZO1X_H, 1, [lzo/lzo1x.h header file is available]) ], [AC_MSG_WARN([Cannot find lzo/lzo1x.h header file]) local_liblzo2="no" ]) if test "$local_liblzo2" = "yes" ; then AC_LINK_IFELSE(AC_LANG_PROGRAM([[ extern "C" { #if HAVE_LZO_LZO1X_H #include #endif }]], [[ int x = lzo1x_1_compress(0, 0, 0, 0, 0); ]]), [ AC_DEFINE(LIBLZO2_AVAILABLE, 1, [header and linking is available to have lzo functions])], [ local_liblzo2="no" ]) else AC_MSG_WARN([lzo compression support not available]) fi ]) AC_ARG_ENABLE( [libgcrypt-linking], AC_HELP_STRING(--disable-libgcrypt-linking, [disable linking with libgcrypt which disables strong encryption support]), [ AC_MSG_WARN([strong encryption support has been disabled by user]) local_crypto="no" ], [ AC_CHECK_LIB(gpg-error, [gpg_err_init], [], []) AC_CHECK_LIB(gcrypt, [gcry_check_version], [], []) AC_CHECK_HEADER(gcrypt.h, [local_crypto="yes" AC_DEFINE(HAVE_GCRYPT_H, 1, [gcrypt.h header file is available]) ], [AC_MSG_WARN([Cannt find gcrypt.h header file]) local_crypto="no" ]) if test "$local_crypto" = "yes" ; then AC_DEFINE(MIN_VERSION_GCRYPT, "1.4.0", [libgcrypt minimum version]) AC_MSG_CHECKING([for libgcrypt usability]) AC_RUN_IFELSE(AC_LANG_PROGRAM([[ extern "C" { #if HAVE_GCRYPT_H #include #endif } #include using namespace std; ]], [[ if(!gcry_check_version(MIN_VERSION_GCRYPT)) { cout << "ligcrypt version too low, minimum version is " << MIN_VERSION_GCRYPT << endl; exit(1); } else exit(0); ]]), [ AC_DEFINE(CRYPTO_AVAILABLE, 1, [header and linking is available to have strong encryption works]) AC_MSG_RESULT([ok]) ], [ if test "$?" = "1" ; then AC_MSG_RESULT([failed: need libgcypt >= MIN_VERSION_GCRYPT, disabling strong encryption support]) else AC_MSG_RESULT([failed: libgcrypt is unusable, cannot even call gcry_check_version(). Disabling strong encryption support]) fi local_crypto="yes" ]) else AC_MSG_WARN([strong encryption support not available]) fi ]) AC_ARG_ENABLE( [ea-support], AC_HELP_STRING(--disable-ea-support,[disable Extended Attributes support]), [ AC_MSG_CHECKING([for Extended Attribute support]) AC_MSG_RESULT([disabled]) ], [ AC_CHECK_HEADERS([attr/xattr.h]) AC_CHECK_LIB(attr, [lgetxattr], [], []) AC_MSG_CHECKING([for Unix Extended Attribute support]) AC_LINK_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_SYS_TYPES_H #include #endif #if HAVE_ATTR_XATTR_H #include #endif }]], [ lgetxattr((char *)0, (char *)0, (void *)0, 0); ]), [ AC_DEFINE(EA_SUPPORT, [], [if defined, activates support for Extended Attributes]) local_ea_support="yes" AC_MSG_RESULT([yes]) ], [ AC_MSG_RESULT([no]) AC_CHECK_HEADERS([sys/xattr.h]) AC_CHECK_LIB(c, [fgetxattr]) AC_MSG_CHECKING([for Mac OS X Extended Attribute support]) AC_LINK_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_SYS_XATTR_H #include #endif }]], [ getxattr((char *)0, (char *)0, (void *)0, 0, 0, XATTR_NOFOLLOW); ]), [ AC_DEFINE(EA_SUPPORT, [], [if defined, activates support for Extended Attributes]) AC_DEFINE(OSX_EA_SUPPORT, [], [if defined, activates support for Mac OS X Extended Attributes]) local_ea_support="yes" AC_MSG_RESULT([yes]) ], [ AC_MSG_RESULT([no]) ], ) ] ) ] ) AC_MSG_CHECKING([ext2fs.h availability]) AC_ARG_ENABLE( [nodump-flag], AC_HELP_STRING(--disable-nodump-flag, [deactivate the "ext2/3 nodump flag" feature check]), [AC_MSG_RESULT([nodump flag disabled])], [AC_LINK_IFELSE(AC_LANG_PROGRAM([[extern "C" { #include #if HAVE_SYS_IOCTL_H #include #endif }]],[[int fd, f; ioctl(fd, EXT2_IOC_GETFLAGS, &f);]]), [ AC_DEFINE(LIBDAR_NODUMP_FEATURE, [NODUMP_EXT2FS], [if defined, activates the ext2/3 nodump flag feature]) local_nodump_feature="yes" AC_MSG_RESULT([found ]) ], [ AC_LINK_IFELSE(AC_LANG_PROGRAM([[extern "C" { #include #if HAVE_SYS_IOCTL_H #include #endif }]],[[int fd, f; ioctl(fd, EXT2_IOC_GETFLAGS, &f);]]), [ AC_DEFINE(LIBDAR_NODUMP_FEATURE, [NODUMP_LINUX], [if defined, activates the ext2/3 nodump flag feature]) local_nodump_feature="yes" AC_MSG_RESULT([found ]) ], [ AC_MSG_RESULT([NOT FOUND]) AC_MSG_WARN([cannot find ext2_fs.h header file, nodump-flag feature will not be activated]) ]) ]) ]) AC_ARG_ENABLE( [gnugetopt], AC_HELP_STRING(--disable-gnugetopt, [avoid linking with libgnugetopt]), [], AC_CHECK_LIB(gnugetopt, [getopt_long], [], []) ) AC_MSG_CHECKING([for getopt() availability]); AC_LINK_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif }]], [ getopt(0, 0, 0); ]), [ AC_MSG_RESULT([ok]) ], [AC_MSG_ERROR([absent but required])]) AC_MSG_CHECKING([for getopt_long() availability]); AC_LINK_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif }]], [ getopt_long(0, 0, 0, 0, 0); ]), [ local_have_getopt_long="yes" AC_DEFINE(HAVE_GETOPT_LONG, 1, [whether getopt_long() is available]) AC_MSG_RESULT([available]) ], [AC_MSG_RESULT([NOT AVAILABLE])]) AC_ARG_ENABLE( [examples], AC_HELP_STRING(--enable-examples, [buld example and testing programs]), [examples="yes"], [examples="false"]) AC_ARG_ENABLE( [os-bits], AC_HELP_STRING(--enable-os-bits=arg, [arg is 32 or 64. If for some reason, one wants to overcome detected system value]), [AC_DEFINE_UNQUOTED(OS_BITS, $enableval, [manually set CPU's registers' size])], [ AC_CHECK_HEADER(inttypes.h, [ AC_COMPILE_IFELSE( [AC_LANG_PROGRAM([extern "C" { #include }], [ uint16_t a = 0; uint32_t b = 0; uint64_t c = 0; int16_t d = 0; int32_t e = 0; int64_t f = 0; return a+b+c+d+e+f; ]) ], [], [AC_MSG_ERROR([Cannot find *int*_t type declarations in headerfile, --enable-os-bits=... option must be used])] ) ], [AC_MSG_ERROR([Cannot find inttypes.h headerfile, --enable-os-bits=... option must be used])] ) ] ) AC_ARG_ENABLE( [mode], AC_HELP_STRING(--enable-mode=arg, [where arg is 32 or 64. Makes dar internally use 32 bits or 64 bits integers in place of infinint (which is the case if this option is not given)]), [build_mode=$enableval], []) AC_ARG_ENABLE( [furtive-read], AC_HELP_STRING(--disable-furtive-read, [Ignore furtive read mode availability on systems that support it]), [ local_furtive_read_mode="no" AC_MSG_WARN([Furtive read mode disabled]) ], [ AC_MSG_CHECKING([furtive read mode availability]) AC_LINK_IFELSE(AC_LANG_PROGRAM([[ extern "C" { #if HAVE_SYS_TYPE_H #include #endif #if HAVE_SYS_STAT_H #include #endif #if HAVE_FCNTL_H #include #endif #if HAVE_DIRENT_H #include #endif } ]], [[ int x = O_NOATIME; int fd = open("/",O_RDONLY|O_NOATIME); #if HAVE_FDOPENDIR (void)fdopendir(fd); #else syntaxically incorrect statement here to force compilation to fail! #endif ]]), [ AC_DEFINE(FURTIVE_READ_MODE_AVAILABLE, 1, [furtive read mode is available]) AC_MSG_RESULT(available) local_furtive_read_mode="yes" ], [AC_MSG_RESULT(no available) local_furtive_read_mode="no" ]) ] ) AC_ARG_ENABLE( [debug], AC_HELP_STRING(--enable-debug, [build targets with debugging option and no optimization]), [ CXXFLAGS="-ggdb -Wall" CFLAGS="-ggdb -Wall" debug_static="yes" AC_DEFINE(LIBDAR_NO_OPTIMIZATION, 1, [if defined, informs the code that no optimization has been used for compilation]) ], [ debug_static="no" ]) AC_ARG_ENABLE( [pedantic], AC_HELP_STRING(--enable-pedantic, [enable pedantic syntaxical check at compilation, use only for debugging purposes !]), [ CXXFLAGS="$CXXFLAGS -pedantic -Wno-long-long" ], []) AC_ARG_ENABLE( [build-usage], AC_HELP_STRING(--enable-build-usage, [re-build C++ generated usage files with Chris Martin's dar-help program, non-developper should not activate that option]), [ export CPPFLAGS="$CPPFLAGS -I/usr/include/libxml2" AC_CHECK_HEADER([libxml/tree.h], [AC_DEFINE(HAVE_LIBXML_TREE_H, [1], [defined if libxml/tree.h is present])], [AC_MSG_ERROR([Cannot find libxml/tree.h header file])]) AC_CHECK_HEADER([libxml/parser.h], [AC_DEFINE(HAVE_LIBXML_PARSER_H, [1], [defined if libxml/parser.h is present])], [AC_MSG_ERROR([Cannot find libxml/parser.h header file])]) AC_CHECK_LIB(xml2, [reference], [], [AC_MSG_ERROR([library xml2 not found])]) build_usage="yes" ], [build_usage="no"]) AC_ARG_ENABLE( [build-html], AC_HELP_STRING(--disable-build-html, [don't build programming documentation (in particular libdar API documentation) and html man page]), [ doxygen="no" groff="no" ], [ AC_CHECK_PROG(doxygen, doxygen, [yes], [no], [$PATH]) AC_MSG_CHECKING([for doxygen version]) if test "$doxygen" = "yes" ; then n1=`doxygen --version | cut -d '.' -f 1` n2=`doxygen --version | cut -d '.' -f 2` if test $n1 -gt 1 -o $n2 -ge 3 ; then AC_MSG_RESULT([ >= 1.3]) else AC_MSG_RESULT([ too old (< 1.3) ignoring doxygen]) doxygen="no" fi fi AC_CHECK_PROG(tmp, man, [yes], [no], [$PATH]) if test "$tmp" = "yes" ; then AC_CHECK_PROG(groff, groff, [yes], [no], [$PATH]) else groff = "no"; fi ] ) AC_ARG_ENABLE( [special-alloc], AC_HELP_STRING(--disable-special-alloc, [special memory allcation scheme makes dar using less memory and run a bit faster, in particular in conjunction with --enable-mode=32 or 64, you can disable it if you like]), [], [AC_DEFINE(LIBDAR_SPECIAL_ALLOC, [], [if set activates specific new and delete operator which do better allocation for a lot of small items]) local_special_alloc="yes" ]) AC_ARG_ENABLE( [upx], AC_HELP_STRING(--disable-upx, [by default configure looks for UPX and if available make executables compressed at installation time, you can disable this feature]), [ AC_MSG_NOTICE([ignoring UPX]) upx="no" ], [ AC_CHECK_PROG(upx, upx, [yes], [no], [$PATH]) ] ) AC_ARG_ENABLE( [fast-dir], AC_HELP_STRING(--disable-fast-dir, [disable optimization for large directories, doing so has a little positive impact on memory requirement but a huge drawback on execution time]), [], [AC_DEFINE(LIBDAR_FAST_DIR, 1, [activation of speed optimization for large directories]) local_fast_dir="yes" ] ) AC_ARG_ENABLE( [thread-safe], AC_HELP_STRING(--disable-thread-safe, [libdar is thread safe if POSIX mutex are available, you can manually disable the use of POSIX mutex, the resulting libdar library will not be thread-safe anymore unless special-alloc is disabled too]), [ AC_MSG_NOTICE([thread-safe support disabled]) ], [ AC_CHECK_LIB(pthread, [pthread_mutex_init], [], []) AC_MSG_CHECKING([for POSIX mutex]) AC_LINK_IFELSE(AC_LANG_PROGRAM([[extern "C" { #if HAVE_PTHREAD_H #include #endif }]], [[ pthread_mutex_t mutex; pthread_mutex_init(&mutex, (const pthread_mutexattr_t*)0); pthread_mutex_lock(&mutex); pthread_mutex_unlock(&mutex);]]), [ AC_DEFINE(MUTEX_WORKS, 1, [POSIX mutex (pthread_mutex_t) is available]) local_mutex_works="yes" AC_MSG_RESULT(yes) ], [ AC_MSG_RESULT(no)]) ] ) AC_ARG_ENABLE( [execinfo], AC_HELP_STRING(--disable-execinfo, [disable reporting stack information on self diagnostic bugs even]), [AC_MSG_WARN([ignoring execinfo even if available])], [ AC_CHECK_LIB(execinfo, backtrace, [], []), AC_CHECK_HEADERS([execinfo.h]) ] ) AC_ARG_ENABLE( [profiling], AC_HELP_STRING(--enable-profiling, [enable executable profiling]), [ profiling="yes" ]) AC_ARG_ENABLE( [debug-memory], AC_HELP_STRING(--enable-debug-memory, [log memory allocations and releases to /tmp/dar_debug_mem_allocation.txt this debugging option lead to a slow executable]), [AC_DEFINE(LIBDAR_DEBUG_MEMORY, 1, [if defined, builds a very slow executable])]) AC_ARG_ENABLE( [dar-static], AC_HELP_STRING(--disable-dar-static, [avoids building dar_static, a dar statically linked version]), [build_static="no"], [build_static="yes"]) MEM_LDFLAGS=$LDFLAGS LDFLAGS="-static" AC_LINK_IFELSE(AC_LANG_PROGRAM([[ extern "C" { #include } ]], [ printf("Hello World!"); ]), [static_pb="no"], [static_pb="yes"]) LDFLAGS=$MEM_LDFLAGS AM_CONDITIONAL([MAKE_ALL_DIR], [test $examples = "yes"]) AM_CONDITIONAL([MAKE_USAGE], [test $build_usage = "yes"]) AM_CONDITIONAL([BUILD_DAR_STATIC], [test $build_static = "yes" -a $static_pb = "no"]) AM_CONDITIONAL([DEBUG_STATIC], [test $debug_static = "yes" -a $static_pb = "no"]) AM_CONDITIONAL([BUILD_MODE32], [test "$build_mode" = "32"]) AM_CONDITIONAL([BUILD_MODE64], [test "$build_mode" = "64"]) AM_CONDITIONAL([USE_UPX], [test "$upx" = "yes"]) AM_CONDITIONAL([USE_DOXYGEN], [test "$doxygen" = "yes"]) AM_CONDITIONAL([USE_GROFF], [test "$groff" = "yes"]) AM_CONDITIONAL([PROFILING], [test "$profiling" = "yes"]) AC_SUBST(UPX_PROG, [upx]) AC_SUBST(DOXYGEN_PROG, [doxygen]) # defaults AC_PREFIX_DEFAULT(/usr/local) # hack from litool mailing-list to know from source point of view whether we are compiling for dynamic or static way AC_CONFIG_COMMANDS([hack-libtool], [ sed 's,^pic_flag=,pic_flag=" -D__DYNAMIC__ ",' libtool > libtoolT \ && mv -f libtoolT libtool && chmod 755 libtool ]) AC_CONFIG_FILES([Makefile man/Makefile src/Makefile src/libdar/Makefile src/dar_suite/Makefile src/testing/Makefile src/examples/Makefile doc/Makefile doc/samples/Makefile misc/Makefile doc/mini-howto/Makefile src/libdar/libdar.pc.tmpl doc/man/Makefile src/check/Makefile po/Makefile.in]) AC_OUTPUT [echo "" echo "--" echo "dar and libdar have been successfully configured with the following parameters:" echo "" echo " LIBDAR parameters:" echo -n " Zlib compression (gzip) : " if [ "$local_libz" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Libbz2 compression (bzip2) : " if [ "$local_libbz2" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Liblzo2 compression (lzo) : " if [ "$local_liblzo2" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Strong encryption support : " if [ "$local_crypto" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Extended Attributes support: " if [ "$local_ea_support" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Large files support (> 2GB): " if [ ! -z "$ac_cv_sys_file_offset_bits" -o ! -z "$ac_cv_sys_large_files" ] ; then echo "YES" else echo "NO" fi echo -n " ext2fs NODUMP flag support : " if [ "$local_nodump_feature" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Special allocation scheme : " if [ "$local_special_alloc" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Integer size used : " if [ -z "$build_mode" -o \( ! "$build_mode" = "32" -a ! "$build_mode" = "64" \) ] ; then echo "infinint" else echo "$build_mode" fi echo -n " Thread safe support : " if [ \( "$local_mutex_works" = "yes" -o ! "$local_special_alloc" = "yes" \) -a -z "$local_test_memory" ] ; then echo "YES" else echo "NO" fi echo -n " Furtive read mode : " if [ "$local_furtive_read_mode" = "yes" ]; then echo "YES" else echo "NO" fi echo -n " Large directory optim. : " if [ "$local_fast_dir" = "yes" ] ; then echo "YES" else echo "NO" fi echo "" echo " DAR SUITE command line programs:" echo -n " Long options available : " if [ "$local_have_getopt_long" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Building examples : " if [ "$examples" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Building dar_static : " if [ "$build_static" = "yes" ]; then if [ "$static_pb" = "yes" ]; then echo "NO (system does not support static linking, see note below)" else echo "YES" fi else echo "NO" fi echo -n " using upx at install : " if [ "$upx" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " building documentation : " if [ "$doxygen" = "yes" ] ; then echo "YES" else echo "NO" fi if [ "$static_pb" = "yes" -a "$build_static" = "yes" ]; then echo "" echo " Note:" echo "" echo " If you want to know which libraries are not available as static" echo " libraries check the logs in the config.log generated file. the command" echo "" echo " 'grep -e -static -A 2 config.log'" echo "" echo " should bring you to the essentials." echo "" echo "You also might want to speed up the compilation process running ./configure" echo "with the --disable-static option" echo "" fi echo "" ] dar-2.4.8/misc/0000755000175000017520000000000012023053205010221 500000000000000dar-2.4.8/misc/dar64_ea.rpm.proto0000644000175000017430000000767112003275604013432 00000000000000# GENERATE_COMMENT summary: DAR - Disk ARchive Name: dar Version: DAR_VERSION Release: 1 Copyright: GPL Icon: dar.gif Group: Applications/Archiving Source: http://dar.linux.free.fr/dar-DAR_VERSION.tar.gz URL: http://dar.linux.free.fr/ BuildRoot: %{_tmppath}/%{name}64_ea-%{version}-%{release}-root BuildRequires: zlib-devel >= 1.1.3, gcc-c++, bzip2-devel >= 1.0.2 %description DAR is a command line tool to backup a directory tree and files. DAR is able to make differential backups, split them over a set of disks or files of a given size, use compression, filter files or subtrees to be saved or not saved, directly access and restore given files. DAR is also able to handle extented attributes, and can make remote backups through an ssh session for example. Finally, DAR handles save and restore of hard and symbolic links. %prep %setup %clean make clean rm -rf %{buildroot} %build ./configure CXXFLAGS=-O --enable-mode=64 --enable-ea-support --prefix=/usr --mandir=/usr/share/man make %install rm -rf %{buildroot} mkdir -p %{buildroot}/usr make DESTDIR=%{buildroot} install-strip %post %files %defattr(-,root,root,-) /usr/share/man/man1/dar.1 /usr/share/man/man1/dar_manager.1 /usr/share/man/man1/dar_slave.1 /usr/share/man/man1/dar_xform.1 /usr/share/man/man1/dar_cp.1 /usr/lib/libdar64.so.LIBDAR_MAJOR.0.LIBDAR_MINOR /usr/lib/libdar64.so.LIBDAR_MAJOR /usr/lib/libdar64.so /usr/lib/libdar64.la /usr/lib/libdar64.a /usr/include/dar/config.h /usr/include/dar/libdar.hpp /usr/include/dar/path.hpp /usr/include/dar/mask.hpp /usr/include/dar/integers.hpp /usr/include/dar/real_infinint.hpp /usr/include/dar/statistics.hpp /usr/include/dar/user_interaction.hpp /usr/include/dar/erreurs.hpp /usr/include/dar/deci.hpp /usr/include/dar/limitint.hpp /usr/include/dar/infinint.hpp /usr/include/dar/compressor.hpp /usr/include/dar/special_alloc.hpp /usr/include/dar/generic_file.hpp /usr/include/dar/wrapperlib.hpp /usr/include/dar/storage.hpp /usr/include/dar/tuyau.hpp /usr/include/dar/tools.hpp /usr/include/dar/catalogue.hpp /usr/include/dar/scrambler.hpp /usr/include/dar/archive.hpp /usr/include/dar/header_version.hpp /usr/include/dar/ea.hpp /usr/include/dar/crypto.hpp /usr/include/dar/int_tools.hpp /usr/include/dar/thread_cancellation.hpp /usr/include/dar/tronconneuse.hpp /usr/bin/dar /usr/bin/dar_xform /usr/bin/dar_slave /usr/bin/dar_manager /usr/bin/dar_cp /usr/bin/dar_static /usr/share/dar/dar_par.dcf /usr/share/dar/dar_par_create.duc /usr/share/dar/dar_par_test.duc /usr/share/dar/dar-differential-backup-mini-howto.en.html /usr/share/dar/dar-differential-backup-mini-howto.it.html /usr/share/dar/dar-differential-backup-mini-howto.es.html /usr/share/dar/FEATURES /usr/share/dar/LIMITATIONS /usr/share/dar/NOTES /usr/share/dar/TUTORIAL /usr/share/dar/GOOD_BACKUP_PRACTICE /usr/share/dar/README /usr/share/dar/LINKS /usr/share/dar/FAQ /usr/share/dar/api_tutorial.html /usr/local/share/locale/fr/LC_MESSAGES/dar.mo %changelog * Wed Dec 29 2004 Denis Corbin - updated list of file installed by the package * Sat Nov 22 2003 Denis Corbin - removed the %doc in spec file, as documentation is now installed by "make" * Tue Oct 21 2003 Denis Corbin - added mini-howto and LINKS documentation * Thu Oct 9 2003 Denis Corbin - added sample scripts and dar_cp * Mon Sep 15 2003 Denis Corbin - added dependency libbz2 - see CHANGES file for more * Thu Jan 9 2003 Denis Corbin - removed the OS_BITS flag, which is no more necessary - added dar_static in %files * Thu Nov 7 2002 Axel Kohlmeyer - modified the spec file to comply with standard redhat rpms - allow building of rpm as non-root user - add build dependency on zlib and c++ - handle x86/alpha arch from specfile. * Thu Jun 27 2002 Denis Corbin - see file named "CHANGES" dar-2.4.8/misc/dar32.rpm.proto0000644000175000017430000000764412003275604012760 00000000000000# GENERATE_COMMENT summary: DAR - Disk ARchive Name: dar Version: DAR_VERSION Release: 1 Copyright: GPL Icon: dar.gif Group: Applications/Archiving Source: http://dar.linux.free.fr/dar-DAR_VERSION.tar.gz URL: http://dar.linux.free.fr/ BuildRoot: %{_tmppath}/%{name}32-%{version}-%{release}-root BuildRequires: zlib-devel >= 1.1.3, gcc-c++, bzip2-devel >= 1.0.2 %description DAR is a command line tool to backup a directory tree and files. DAR is able to make differential backups, split them over a set of disks or files of a given size, use compression, filter files or subtrees to be saved or not saved, directly access and restore given files. DAR is also able to handle extented attributes, and can make remote backups through an ssh session for example. Finally, DAR handles save and restore of hard and symbolic links. %prep %setup %clean make clean rm -rf %{buildroot} %build ./configure CXXFLAGS=-O --enable-mode=32 --prefix=/usr --mandir=/usr/share/man make %install rm -rf %{buildroot} mkdir -p %{buildroot}/usr make DESTDIR=%{buildroot} install-strip %post %files %defattr(-,root,root,-) /usr/share/man/man1/dar.1 /usr/share/man/man1/dar_manager.1 /usr/share/man/man1/dar_slave.1 /usr/share/man/man1/dar_xform.1 /usr/share/man/man1/dar_cp.1 /usr/lib/libdar32.so.LIBDAR_MAJOR.0.LIBDAR_MINOR /usr/lib/libdar32.so.LIBDAR_MAJOR /usr/lib/libdar32.so /usr/lib/libdar32.la /usr/lib/libdar32.a /usr/include/dar/config.h /usr/include/dar/libdar.hpp /usr/include/dar/path.hpp /usr/include/dar/mask.hpp /usr/include/dar/integers.hpp /usr/include/dar/real_infinint.hpp /usr/include/dar/statistics.hpp /usr/include/dar/user_interaction.hpp /usr/include/dar/erreurs.hpp /usr/include/dar/deci.hpp /usr/include/dar/limitint.hpp /usr/include/dar/infinint.hpp /usr/include/dar/compressor.hpp /usr/include/dar/special_alloc.hpp /usr/include/dar/generic_file.hpp /usr/include/dar/wrapperlib.hpp /usr/include/dar/storage.hpp /usr/include/dar/tuyau.hpp /usr/include/dar/tools.hpp /usr/include/dar/catalogue.hpp /usr/include/dar/scrambler.hpp /usr/include/dar/archive.hpp /usr/include/dar/header_version.hpp /usr/include/dar/ea.hpp /usr/include/dar/crypto.hpp /usr/include/dar/int_tools.hpp /usr/include/dar/thread_cancellation.hpp /usr/include/dar/tronconneuse.hpp /usr/bin/dar /usr/bin/dar_xform /usr/bin/dar_slave /usr/bin/dar_manager /usr/bin/dar_cp /usr/bin/dar_static /usr/share/dar/dar_par.dcf /usr/share/dar/dar_par_create.duc /usr/share/dar/dar_par_test.duc /usr/share/dar/dar-differential-backup-mini-howto.en.html /usr/share/dar/dar-differential-backup-mini-howto.it.html /usr/share/dar/dar-differential-backup-mini-howto.es.html /usr/share/dar/FEATURES /usr/share/dar/LIMITATIONS /usr/share/dar/NOTES /usr/share/dar/TUTORIAL /usr/share/dar/GOOD_BACKUP_PRACTICE /usr/share/dar/README /usr/share/dar/LINKS /usr/share/dar/FAQ /usr/share/dar/api_tutorial.html /usr/local/share/locale/fr/LC_MESSAGES/dar.mo %changelog * Wed Dec 29 2004 Denis Corbin - updated list of file installed by the package * Sat Nov 22 2003 Denis Corbin - removed the %doc in spec file, as documentation is now installed by "make" * Tue Oct 21 2003 Denis Corbin - added mini-howto and LINKS documentation * Thu Oct 9 2003 Denis Corbin - added sample scripts and dar_cp * Mon Sep 15 2003 Denis Corbin - adapted spec file to dar version 2 (configure script) * Wed May 14 2003 Denis Corbin - added dependency libbz2 - see CHANGES file for more * Thu Jan 9 2003 Denis Corbin - removed the OS_BITS flag, which is no more necessary - added dar_static in %files * Thu Nov 7 2002 Axel Kohlmeyer - modified the spec file to comply with standard redhat rpms - allow building of rpm as non-root user - add build dependency on zlib and c++ - handle x86/alpha arch from specfile. * Thu Jun 27 2002 Denis Corbin - see file named "CHANGES" dar-2.4.8/misc/dar.rpm.proto0000644000175000017430000000760612003275604012611 00000000000000# GENERATE_COMMENT summary: DAR - Disk ARchive Name: dar Version: DAR_VERSION Release: 1 Copyright: GPL Icon: dar.gif Group: Applications/Archiving Source: http://dar.linux.free.fr/dar-DAR_VERSION.tar.gz URL: http://dar.linux.free.fr/ BuildRoot: %{_tmppath}/%{name}-%{version}-%{release}-root BuildRequires: zlib-devel >= 1.1.3, gcc-c++, bzip2-devel >= 1.0.2 %description DAR is a command line tool to backup a directory tree and files. DAR is able to make differential backups, split them over a set of disks or files of a given size, use compression, filter files or subtrees to be saved or not saved, directly access and restore given files. DAR is also able to handle extented attributes, and can make remote backups through an ssh session for example. Finally, DAR handles save and restore of hard and symbolic links. %prep %setup %clean make clean rm -rf %{buildroot} %build ./configure CXXFLAGS=-O --prefix=/usr --mandir=/usr/share/man make %install rm -rf %{buildroot} mkdir -p %{buildroot}/usr make DESTDIR=%{buildroot} install-strip %post %files %defattr(-,root,root,-) /usr/share/man/man1/dar.1 /usr/share/man/man1/dar_manager.1 /usr/share/man/man1/dar_slave.1 /usr/share/man/man1/dar_xform.1 /usr/share/man/man1/dar_cp.1 /usr/lib/libdar.so.LIBDAR_MAJOR.0.LIBDAR_MINOR /usr/lib/libdar.so.LIBDAR_MAJOR /usr/lib/libdar.so /usr/lib/libdar.la /usr/lib/libdar.a /usr/include/dar/config.h /usr/include/dar/libdar.hpp /usr/include/dar/path.hpp /usr/include/dar/mask.hpp /usr/include/dar/integers.hpp /usr/include/dar/real_infinint.hpp /usr/include/dar/statistics.hpp /usr/include/dar/user_interaction.hpp /usr/include/dar/erreurs.hpp /usr/include/dar/deci.hpp /usr/include/dar/limitint.hpp /usr/include/dar/infinint.hpp /usr/include/dar/compressor.hpp /usr/include/dar/special_alloc.hpp /usr/include/dar/generic_file.hpp /usr/include/dar/wrapperlib.hpp /usr/include/dar/storage.hpp /usr/include/dar/tuyau.hpp /usr/include/dar/tools.hpp /usr/include/dar/catalogue.hpp /usr/include/dar/scrambler.hpp /usr/include/dar/archive.hpp /usr/include/dar/header_version.hpp /usr/include/dar/ea.hpp /usr/include/dar/crypto.hpp /usr/include/dar/int_tools.hpp /usr/include/dar/thread_cancellation.hpp /usr/include/dar/tronconneuse.hpp /usr/bin/dar /usr/bin/dar_xform /usr/bin/dar_slave /usr/bin/dar_manager /usr/bin/dar_cp /usr/bin/dar_static /usr/share/dar/dar_par.dcf /usr/share/dar/dar_par_create.duc /usr/share/dar/dar_par_test.duc /usr/share/dar/dar-differential-backup-mini-howto.en.html /usr/share/dar/dar-differential-backup-mini-howto.it.html /usr/share/dar/dar-differential-backup-mini-howto.es.html /usr/share/dar/FEATURES /usr/share/dar/LIMITATIONS /usr/share/dar/NOTES /usr/share/dar/TUTORIAL /usr/share/dar/GOOD_BACKUP_PRACTICE /usr/share/dar/README /usr/share/dar/LINKS /usr/share/dar/FAQ /usr/share/dar/api_tutorial.html /usr/local/share/locale/fr/LC_MESSAGES/dar.mo %changelog * Wed Dec 29 2004 Denis Corbin - updated list of file installed by the package * Sat Nov 22 2003 Denis Corbin - removed the %doc in spec file, as documentation is now installed by "make" * Tue Oct 21 2003 Denis Corbin - added mini-howto and LINKS documentation * Thu Oct 9 2003 Denis Corbin - added sample scripts and dar_cp * Mon Sep 15 2003 Denis Corbin - adapted spec file to dar version 2 (configure script) * Wed May 14 2003 Denis Corbin - added dependency libbz2 - see CHANGES file for more * Thu Jan 9 2003 Denis Corbin - removed the OS_BITS flag, which is no more necessary - added dar_static in %files * Thu Nov 7 2002 Axel Kohlmeyer - modified the spec file to comply with standard redhat rpms - allow building of rpm as non-root user - add build dependency on zlib and c++ - handle x86/alpha arch from specfile. * Thu Jun 27 2002 Denis Corbin - see file named "CHANGES" dar-2.4.8/misc/Makefile.in0000644000175000017520000003724312023053152012220 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ noinst_PROGRAMS = todos$(EXEEXT) subdir = misc DIST_COMMON = README $(dist_noinst_DATA) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = PROGRAMS = $(noinst_PROGRAMS) am_todos_OBJECTS = todos.$(OBJEXT) todos_OBJECTS = $(am_todos_OBJECTS) todos_LDADD = $(LDADD) DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ SOURCES = $(todos_SOURCES) DIST_SOURCES = $(todos_SOURCES) DATA = $(dist_noinst_DATA) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ LIBDAR_MAJOR = `grep LIBDAR_COMPILE_TIME_MAJOR ../src/libdar/libdar.hpp | cut -d '=' -f 2 | cut -d ';' -f 1 | cut -d ' ' -f 2` LIBDAR_MINOR = `grep LIBDAR_COMPILE_TIME_MINOR ../src/libdar/libdar.hpp | cut -d '=' -f 2 | cut -d ';' -f 1 | cut -d ' ' -f 2` dist_noinst_DATA = dar.gif make_cygwin_dist make_rpm batch_cygwin batch_linux batch_linux_ea README batch_solaris dar.rpm.proto dar32.rpm.proto dar32_ea.rpm.proto dar64.rpm.proto dar64_ea.rpm.proto dar_ea.rpm.proto # rm -f dar.rpm.spec dar32.rpm.spec dar32_ea.rpm.spec dar64.rpm.spec dar64_ea.rpm.spec dar_ea.rpm.spec todos_SOURCES = todos.c all: all-am .SUFFIXES: .SUFFIXES: .c .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu misc/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu misc/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstPROGRAMS: @list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list todos$(EXEEXT): $(todos_OBJECTS) $(todos_DEPENDENCIES) @rm -f todos$(EXEEXT) $(LINK) $(todos_OBJECTS) $(todos_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/todos.Po@am__quote@ .c.o: @am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c $< .c.obj: @am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(COMPILE) -c `$(CYGPATH_W) '$<'` .c.lo: @am__fastdepCC_TRUE@ $(LTCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCC_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCC_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(LTCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(DATA) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstPROGRAMS ctags distclean \ distclean-compile distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags uninstall uninstall-am %.spec : %.proto ../configure.ac sed -e "s%DAR_VERSION%$(PACKAGE_VERSION)%g" -e "s%GENERATE_COMMENT%THIS IS A GENERATED FILE DO NOT EDIT !%g" -e "s%LIBDAR_MAJOR%$(LIBDAR_MAJOR)%g" -e "s%LIBDAR_MINOR%$(LIBDAR_MINOR)%g" $< > $@ dar.rpm.spec : dar.rpm.proto dar.rpm.spec : dar.rpm.proto dar32.rpm.spec : dar32.rpm.proto dar32_ea.rpm.spec : dar32_ea.rpm.proto dar64.rpm.spec : dar64.rpm.proto dar64_ea.rpm.spec : dar64_ea.rpm.proto dar_ea.rpm.spec : dar_ea.rpm.proto # specs$(EXEEXT) : dar.rpm.spec dar32.rpm.spec dar32_ea.rpm.spec dar64.rpm.spec dar64_ea.rpm.spec dar_ea.rpm.spec # @echo "RPM spec files generated" clean_specs : # specs_SOURCES=dar.rpm.spec dar32.rpm.spec dar32_ea.rpm.spec dar64.rpm.spec dar64_ea.rpm.spec dar_ea.rpm.spec # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/misc/dar64.rpm.proto0000644000175000017430000000764312003275604012764 00000000000000# GENERATE_COMMENT summary: DAR - Disk ARchive Name: dar Version: DAR_VERSION Release: 1 Copyright: GPL Icon: dar.gif Group: Applications/Archiving Source: http://dar.linux.free.fr/dar-DAR_VERSION.tar.gz URL: http://dar.linux.free.fr/ BuildRoot: %{_tmppath}/%{name}64-%{version}-%{release}-root BuildRequires: zlib-devel >= 1.1.3, gcc-c++, bzip2-devel >= 1.0.2 %description DAR is a command line tool to backup a directory tree and files. DAR is able to make differential backups, split them over a set of disks or files of a given size, use compression, filter files or subtrees to be saved or not saved, directly access and restore given files. DAR is also able to handle extented attributes, and can make remote backups through an ssh session for example. Finally, DAR handles save and restore of hard and symbolic links. %prep %setup %clean make clean rm -rf %{buildroot} %build ./configure CXXFLAGS=-O --enable-mode=64 --prefix=/usr --mandir=/usr/share/man make %install rm -rf %{buildroot} mkdir -p %{buildroot}/usr make DESTDIR=%{buildroot} install-strip %post %files %defattr(-,root,root,-) /usr/share/man/man1/dar.1 /usr/share/man/man1/dar_manager.1 /usr/share/man/man1/dar_slave.1 /usr/share/man/man1/dar_xform.1 /usr/share/man/man1/dar_cp.1 /usr/lib/libdar64.so.LIBDAR_MAJOR.0.LIBDAR_MINOR /usr/lib/libdar64.so.LIBDAR_MAJOR /usr/lib/libdar64.so /usr/lib/libdar64.la /usr/lib/libdar64.a /usr/include/dar/config.h /usr/include/dar/libdar.hpp /usr/include/dar/path.hpp /usr/include/dar/mask.hpp /usr/include/dar/integers.hpp /usr/include/dar/real_infinint.hpp /usr/include/dar/statistics.hpp /usr/include/dar/user_interaction.hpp /usr/include/dar/erreurs.hpp /usr/include/dar/deci.hpp /usr/include/dar/limitint.hpp /usr/include/dar/infinint.hpp /usr/include/dar/compressor.hpp /usr/include/dar/special_alloc.hpp /usr/include/dar/generic_file.hpp /usr/include/dar/wrapperlib.hpp /usr/include/dar/storage.hpp /usr/include/dar/tuyau.hpp /usr/include/dar/tools.hpp /usr/include/dar/catalogue.hpp /usr/include/dar/scrambler.hpp /usr/include/dar/archive.hpp /usr/include/dar/header_version.hpp /usr/include/dar/ea.hpp /usr/include/dar/crypto.hpp /usr/include/dar/int_tools.hpp /usr/include/dar/thread_cancellation.hpp /usr/include/dar/tronconneuse.hpp /usr/bin/dar /usr/bin/dar_xform /usr/bin/dar_slave /usr/bin/dar_manager /usr/bin/dar_cp /usr/bin/dar_static /usr/share/dar/dar_par.dcf /usr/share/dar/dar_par_create.duc /usr/share/dar/dar_par_test.duc /usr/share/dar/dar-differential-backup-mini-howto.en.html /usr/share/dar/dar-differential-backup-mini-howto.it.html /usr/share/dar/dar-differential-backup-mini-howto.es.html /usr/share/dar/FEATURES /usr/share/dar/LIMITATIONS /usr/share/dar/NOTES /usr/share/dar/TUTORIAL /usr/share/dar/GOOD_BACKUP_PRACTICE /usr/share/dar/README /usr/share/dar/LINKS /usr/share/dar/FAQ /usr/share/dar/api_tutorial.html /usr/local/share/locale/fr/LC_MESSAGES/dar.mo %changelog * Wed Dec 29 2004 Denis Corbin - updated list of file installed by the package * Sat Nov 22 2003 Denis Corbin - removed the %doc in spec file, as documentation is now installed by "make" * Tue Oct 21 2003 Denis Corbin - added mini-howto and LINKS documentation * Thu Oct 9 2003 Denis Corbin - added sample scripts and dar_cp * Mon Sep 15 2003 Denis Corbin - added dependency libbz2 - see CHANGES file for more * Thu Jan 9 2003 Denis Corbin - removed the OS_BITS flag, which is no more necessary - added dar_static in %files * Thu Nov 7 2002 Axel Kohlmeyer - modified the spec file to comply with standard redhat rpms - allow building of rpm as non-root user - add build dependency on zlib and c++ - handle x86/alpha arch from specfile. * Thu Jun 27 2002 Denis Corbin - see file named "CHANGES" dar-2.4.8/misc/dar32_ea.rpm.proto0000644000175000017430000000767112003275604013425 00000000000000# GENERATE_COMMENT summary: DAR - Disk ARchive Name: dar Version: DAR_VERSION Release: 1 Copyright: GPL Icon: dar.gif Group: Applications/Archiving Source: http://dar.linux.free.fr/dar-DAR_VERSION.tar.gz URL: http://dar.linux.free.fr/ BuildRoot: %{_tmppath}/%{name}32_ea-%{version}-%{release}-root BuildRequires: zlib-devel >= 1.1.3, gcc-c++, bzip2-devel >= 1.0.2 %description DAR is a command line tool to backup a directory tree and files. DAR is able to make differential backups, split them over a set of disks or files of a given size, use compression, filter files or subtrees to be saved or not saved, directly access and restore given files. DAR is also able to handle extented attributes, and can make remote backups through an ssh session for example. Finally, DAR handles save and restore of hard and symbolic links. %prep %setup %clean make clean rm -rf %{buildroot} %build ./configure CXXFLAGS=-O --enable-mode=32 --enable-ea-support --prefix=/usr --mandir=/usr/share/man make %install rm -rf %{buildroot} mkdir -p %{buildroot}/usr make DESTDIR=%{buildroot} install-strip %post %files %defattr(-,root,root,-) /usr/share/man/man1/dar.1 /usr/share/man/man1/dar_manager.1 /usr/share/man/man1/dar_slave.1 /usr/share/man/man1/dar_xform.1 /usr/share/man/man1/dar_cp.1 /usr/lib/libdar32.so.LIBDAR_MAJOR.0.LIBDAR_MINOR /usr/lib/libdar32.so.LIBDAR_MAJOR /usr/lib/libdar32.so /usr/lib/libdar32.la /usr/lib/libdar32.a /usr/include/dar/config.h /usr/include/dar/libdar.hpp /usr/include/dar/path.hpp /usr/include/dar/mask.hpp /usr/include/dar/integers.hpp /usr/include/dar/real_infinint.hpp /usr/include/dar/statistics.hpp /usr/include/dar/user_interaction.hpp /usr/include/dar/erreurs.hpp /usr/include/dar/deci.hpp /usr/include/dar/limitint.hpp /usr/include/dar/infinint.hpp /usr/include/dar/compressor.hpp /usr/include/dar/special_alloc.hpp /usr/include/dar/generic_file.hpp /usr/include/dar/wrapperlib.hpp /usr/include/dar/storage.hpp /usr/include/dar/tuyau.hpp /usr/include/dar/tools.hpp /usr/include/dar/catalogue.hpp /usr/include/dar/scrambler.hpp /usr/include/dar/archive.hpp /usr/include/dar/header_version.hpp /usr/include/dar/ea.hpp /usr/include/dar/crypto.hpp /usr/include/dar/int_tools.hpp /usr/include/dar/thread_cancellation.hpp /usr/include/dar/tronconneuse.hpp /usr/bin/dar /usr/bin/dar_xform /usr/bin/dar_slave /usr/bin/dar_manager /usr/bin/dar_cp /usr/bin/dar_static /usr/share/dar/dar_par.dcf /usr/share/dar/dar_par_create.duc /usr/share/dar/dar_par_test.duc /usr/share/dar/dar-differential-backup-mini-howto.en.html /usr/share/dar/dar-differential-backup-mini-howto.it.html /usr/share/dar/dar-differential-backup-mini-howto.es.html /usr/share/dar/FEATURES /usr/share/dar/LIMITATIONS /usr/share/dar/NOTES /usr/share/dar/TUTORIAL /usr/share/dar/GOOD_BACKUP_PRACTICE /usr/share/dar/README /usr/share/dar/LINKS /usr/share/dar/FAQ /usr/share/dar/api_tutorial.html /usr/local/share/locale/fr/LC_MESSAGES/dar.mo %changelog * Wed Dec 29 2004 Denis Corbin - updated list of file installed by the package * Sat Nov 22 2003 Denis Corbin - removed the %doc in spec file, as documentation is now installed by "make" * Tue Oct 21 2003 Denis Corbin - added mini-howto and LINKS documentation * Thu Oct 9 2003 Denis Corbin - added sample scripts and dar_cp * Mon Sep 15 2003 Denis Corbin - added dependency libbz2 - see CHANGES file for more * Thu Jan 9 2003 Denis Corbin - removed the OS_BITS flag, which is no more necessary - added dar_static in %files * Thu Nov 7 2002 Axel Kohlmeyer - modified the spec file to comply with standard redhat rpms - allow building of rpm as non-root user - add build dependency on zlib and c++ - handle x86/alpha arch from specfile. * Thu Jun 27 2002 Denis Corbin - see file named "CHANGES" dar-2.4.8/misc/make_cygwin_dist0000755000175000017430000000237212010477201013415 00000000000000#!/bin/tcsh -f if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'conigure' script resides" exit 1 endif if( "$1" == "") then echo "usage : $0 " exit endif set dir_name="dar$2-$1-i386-windows" set build_dir=`pwd`/$dir_name echo "install directory is $build_dir" if( -e $build_dir) then echo "cannot create $build_dir file exists" exit 1 endif mkdir $build_dir make clean distclean || echo "ignoring error, tree already clean" setenv CXXFLAGS -O ./configure --prefix=$build_dir $3 $4 $5 $6 $7 $8 $9 make make install-strip mv $build_dir/bin/* $build_dir/lib/*.so.*.*.* $build_dir cp /bin/cygwin1.dll /bin/cygz.dll /bin/cygbz2-1.dll /bin/cygiconv-2.dll /usr/bin/cygintl-8.dll /bin/cyggcc_s-1.dll /bin/cygstdc++-6.dll /bin/cyggcrypt-11.dll /bin/cyggpg-error-0.dll /bin/cyglzo2-2.dll $build_dir cp -R doc $build_dir foreach fichier (README TODO INSTALL ChangeLog THANKS COPYING) cp $fichier "$build_dir/`basename $fichier`.txt" misc/todos "$build_dir/`basename $fichier`.txt" end rm -rf $build_dir/man $build_dir/bin $build_dir/lib $build_dir/include rm -rf $build_dir/share zip -9 -r "$dir_name".zip $dir_name rm -rf $build_dir dar-2.4.8/misc/dar_ea.rpm.proto0000644000175000017430000000763412003275604013257 00000000000000# GENERATE_COMMENT summary: DAR - Disk ARchive Name: dar Version: DAR_VERSION Release: 1 Copyright: GPL Icon: dar.gif Group: Applications/Archiving Source: http://dar.linux.free.fr/dar-DAR_VERSION.tar.gz URL: http://dar.linux.free.fr/ BuildRoot: %{_tmppath}/%{name}_ea-%{version}-%{release}-root BuildRequires: zlib-devel >= 1.1.3, gcc-c++, bzip2-devel >= 1.0.2 %description DAR is a command line tool to backup a directory tree and files. DAR is able to make differential backups, split them over a set of disks or files of a given size, use compression, filter files or subtrees to be saved or not saved, directly access and restore given files. DAR is also able to handle extented attributes, and can make remote backups through an ssh session for example. Finally, DAR handles save and restore of hard and symbolic links. %prep %setup %clean make clean rm -rf %{buildroot} %build ./configure CXXFLAGS=-O --enable-ea-support --prefix=/usr --mandir=/usr/share/man make %install rm -rf %{buildroot} mkdir -p %{buildroot}/usr make DESTDIR=%{buildroot} install-strip %post %files %defattr(-,root,root,-) /usr/share/man/man1/dar.1 /usr/share/man/man1/dar_manager.1 /usr/share/man/man1/dar_slave.1 /usr/share/man/man1/dar_xform.1 /usr/share/man/man1/dar_cp.1 /usr/lib/libdar.so.LIBDAR_MAJOR.0.LIBDAR_MINOR /usr/lib/libdar.so.LIBDAR_MAJOR /usr/lib/libdar.so /usr/lib/libdar.la /usr/lib/libdar.a /usr/include/dar/config.h /usr/include/dar/libdar.hpp /usr/include/dar/path.hpp /usr/include/dar/mask.hpp /usr/include/dar/integers.hpp /usr/include/dar/real_infinint.hpp /usr/include/dar/statistics.hpp /usr/include/dar/user_interaction.hpp /usr/include/dar/erreurs.hpp /usr/include/dar/deci.hpp /usr/include/dar/limitint.hpp /usr/include/dar/infinint.hpp /usr/include/dar/compressor.hpp /usr/include/dar/special_alloc.hpp /usr/include/dar/generic_file.hpp /usr/include/dar/wrapperlib.hpp /usr/include/dar/storage.hpp /usr/include/dar/tuyau.hpp /usr/include/dar/tools.hpp /usr/include/dar/catalogue.hpp /usr/include/dar/scrambler.hpp /usr/include/dar/archive.hpp /usr/include/dar/header_version.hpp /usr/include/dar/ea.hpp /usr/include/dar/crypto.hpp /usr/include/dar/int_tools.hpp /usr/include/dar/thread_cancellation.hpp /usr/include/dar/tronconneuse.hpp /usr/bin/dar /usr/bin/dar_xform /usr/bin/dar_slave /usr/bin/dar_manager /usr/bin/dar_cp /usr/bin/dar_static /usr/share/dar/dar_par.dcf /usr/share/dar/dar_par_create.duc /usr/share/dar/dar_par_test.duc /usr/share/dar/dar-differential-backup-mini-howto.en.html /usr/share/dar/dar-differential-backup-mini-howto.it.html /usr/share/dar/dar-differential-backup-mini-howto.es.html /usr/share/dar/FEATURES /usr/share/dar/LIMITATIONS /usr/share/dar/NOTES /usr/share/dar/TUTORIAL /usr/share/dar/GOOD_BACKUP_PRACTICE /usr/share/dar/README /usr/share/dar/LINKS /usr/share/dar/FAQ /usr/share/dar/api_tutorial.html /usr/local/share/locale/fr/LC_MESSAGES/dar.mo %changelog * Wed Dec 29 2004 Denis Corbin - updated list of file installed by the package * Sat Nov 22 2003 Denis Corbin - removed the %doc in spec file, as documentation is now installed by "make" * Tue Oct 21 2003 Denis Corbin - added mini-howto and LINKS documentation * Thu Oct 9 2003 Denis Corbin - added sample scripts and dar_cp * Mon Sep 15 2003 Denis Corbin - added dependency libbz2 - see CHANGES file for more * Thu Jan 9 2003 Denis Corbin - removed the OS_BITS flag, which is no more necessary - added dar_static in %files * Thu Nov 7 2002 Axel Kohlmeyer - modified the spec file to comply with standard redhat rpms - allow building of rpm as non-root user - add build dependency on zlib and c++ - handle x86/alpha arch from specfile. * Thu Jun 27 2002 Denis Corbin - see file named "CHANGES" dar-2.4.8/misc/batch_cygwin0000755000175000017430000000057012003275573012546 00000000000000#!/bin/tcsh -f if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'configure' script resides" exit 1 endif if("$1" == "") then echo "usage: $0 " exit endif # misc/make_cygwin_dist $1 "" # misc/make_cygwin_dist $1 "32" --enable-mode=32 misc/make_cygwin_dist $1 "64" --enable-mode=64 dar-2.4.8/misc/batch_linux0000755000175000017430000000072112003275573012403 00000000000000#!/bin/tcsh -f if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'configure' script resides" exit 1 endif if ("$1" == "") then echo "usage: $0 " exit endif misc/make_rpm $1 misc/dar.rpm.spec mkdir rpm_dar mv *.rpm rpm_dar # misc/make_rpm $1 misc/dar32.rpm.spec # mkdir rpm_dar32 # mv *.rpm rpm_dar32 misc/make_rpm $1 misc/dar64.rpm.spec mkdir rpm_dar64 mv *.rpm rpm_dar64 dar-2.4.8/misc/Makefile.am0000644000175000017430000000255712003275604012215 00000000000000LIBDAR_MAJOR = `grep LIBDAR_COMPILE_TIME_MAJOR ../src/libdar/libdar.hpp | cut -d '=' -f 2 | cut -d ';' -f 1 | cut -d ' ' -f 2` LIBDAR_MINOR = `grep LIBDAR_COMPILE_TIME_MINOR ../src/libdar/libdar.hpp | cut -d '=' -f 2 | cut -d ';' -f 1 | cut -d ' ' -f 2` %.spec : %.proto ../configure.ac sed -e "s%DAR_VERSION%$(PACKAGE_VERSION)%g" -e "s%GENERATE_COMMENT%THIS IS A GENERATED FILE DO NOT EDIT !%g" -e "s%LIBDAR_MAJOR%$(LIBDAR_MAJOR)%g" -e "s%LIBDAR_MINOR%$(LIBDAR_MINOR)%g" $< > $@ dar.rpm.spec : dar.rpm.proto dar.rpm.spec : dar.rpm.proto dar32.rpm.spec : dar32.rpm.proto dar32_ea.rpm.spec : dar32_ea.rpm.proto dar64.rpm.spec : dar64.rpm.proto dar64_ea.rpm.spec : dar64_ea.rpm.proto dar_ea.rpm.spec : dar_ea.rpm.proto dist_noinst_DATA = dar.gif make_cygwin_dist make_rpm batch_cygwin batch_linux batch_linux_ea README batch_solaris dar.rpm.proto dar32.rpm.proto dar32_ea.rpm.proto dar64.rpm.proto dar64_ea.rpm.proto dar_ea.rpm.proto # noinst_PROGRAMS=todos specs noinst_PROGRAMS=todos # specs$(EXEEXT) : dar.rpm.spec dar32.rpm.spec dar32_ea.rpm.spec dar64.rpm.spec dar64_ea.rpm.spec dar_ea.rpm.spec # @echo "RPM spec files generated" clean_specs : # rm -f dar.rpm.spec dar32.rpm.spec dar32_ea.rpm.spec dar64.rpm.spec dar64_ea.rpm.spec dar_ea.rpm.spec todos_SOURCES=todos.c # specs_SOURCES=dar.rpm.spec dar32.rpm.spec dar32_ea.rpm.spec dar64.rpm.spec dar64_ea.rpm.spec dar_ea.rpm.spec dar-2.4.8/misc/batch_linux_ea0000755000175000017430000000075412003275573013056 00000000000000#!/bin/tcsh -f if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'configure' script resides" exit 1 endif if ("$1" == "") then echo "usage: $0 " exit endif misc/make_rpm $1 misc/dar_ea.rpm.spec mkdir rpm_dar_ea mv *.rpm rpm_dar_ea # misc/make_rpm $1 misc/dar32_ea.rpm.spec # mkdir rpm_dar32_ea # mv *.rpm rpm_dar32_ea misc/make_rpm $1 misc/dar64_ea.rpm.spec mkdir rpm_dar64_ea mv *.rpm rpm_dar64_ea dar-2.4.8/misc/dar.gif0000644000175000017430000007500112003275604011410 00000000000000GIF89a@ðçh§´ô‡±Æ‡Üõ ßùïÛzI±×iikÁ°z‡É߇òûKÈIÜô|‹Çó÷Êø:‡À—‘ŠHòø¦ÜêÓÇ|oÊß¶¯„¨ÈØm°ÈœæÉÝæ§òô)±ùáÜÓàÉrkÞõ§Ÿƒ‡ÔÐëñómÔð+Éú†èú¨èðf¢¼n½Ó&„¼Èçé<žÔ)¼úóüjèú³¡|jñûˆüûMÔöP¿Ú¨½Çɽy¨ýüÜ˯—ÉÛ,ÔöÇÈ̃¡³’ΖÜòzÊÞZVT¾óOœÓ–¼ÏÙÝß·ßâ{½Òxxy¿½Á&ÜùÇýü©ÕäÃÔÝ yÁyÜõ!è—óø—Ôêåçåèüü]Èé—©³:¼ñ—±¾Kèö·òõäÒxkýü¹èí\¿Ù‚¨¾Ø¿y†Ëî­¯²†Àã{Ôð;Èô|°Å—èô:¨êª™xÓú¾ö'èû<°î“ãXßøÔçç…´×{Êð‚„IüüÞÞÜØôø:’Ư©ªŒŽ¶ËÕ—âó¹àî{Áêyâ÷‡â÷ÜÊš7}¾'ñû!y½&”Íèü!n¬©éO©à!¨÷]³Ü}™¤ÞÔ»PËnnmúúŽ——$üù†×i¨Ã9Üú¸¼¾ÏÎвúÊúöõõ±¨Þú{òüXóùo±ÜÖÜž˜„:Ôøµ³¶GŸÍz¹·ÕàM²ï˜üû\ÔñŸ¢{èúùûúXæø^ ÉΩ-z¶ÓǦ¸ýüØýûyýü7èûÔÔׇ…‡¨øWýü#†Óú’mÁìèÌlŒ¨»`¨Ó‘Ï~’â‡ØÌ¶y,žÞjÊõN¼ò”Ëñ\¿ôÔ¿”Yµôbae¨§©F¨ç.¨ìèúòüÔûE‘É0’ÄHŸâMÎñ¨âí¨ÎÛÊâç(¶úÅÎÖ]Îê‘’”ëéèîÔm¼©wE‰É0…¾êÛÞw ±rqsy©ÀéÖड¡Î½‘Q¶ÐŠÁIâøo¶Í*Âú©ÂΖÂÖ(âù<ÂƦ¶À;Îö<¶ê%r¯O–ÕÁ¶€º¶—¢ïÖâàÂÂÅ ¢î{¶Ì™˜›!þ.DAR - Disk ARchive - http://dar.linux.free.fr ,@ðþëH° Áƒ*\Ȱ¡Ã‡#JœH±¢Å‹3jÜȱ£Ç CŠ©°ˆÉ“(Sª\ɲ¥Ë—0cÊœI³¦Í›8sêÜÉófE€ Jt¨Ñ¢H*MÊt©Ó¦PŸJJuªÕªX¯jÍÊu«×®`­Ö)ò³¬³hϪMËv­Û¶pßÊKw®ÝºxïêÍËw¯ß¾€ÿ Lx.Yqd]¬¸1ãÇŽ#Cž,¹2åË–3cÞ¬¹3çÏžCƒ-º4éÓAl’õê“®cÞݚöëÚ¸oë–›÷nÛ¾ƒÞ›øïâÈ+žœùrãΣCŸÞœúm ?[]½]\÷ïÚÃþsï<øñè˧?¯¾=û÷æã¯—ïž>üùøë翯¿?ÿÿö¸Ÿ€þ%vVc&•àI ªæ`ƒ2(á‚>8¡…F˜á…bèa‡ r(â†$~8¢‰%†˜â‰*¢èb‹0ŠXk‹ D–ÝXãŽ:ö˜ã8É#C 飑DYä’J6™ä“HFÉ$”SJ餕T^Yå–Zv $zD1†ØXc¾Ffbe¦‰æšg¶iæ›jº'œlÒ)gsæ‰çžwöiçŸzú( |*h¡ƒ&Šè¢6¶fãYêmG”I’Vz’¥”^ªi¦œNê)¦Ÿnj§ –*ª©¤žªjª¬Žê*ªþ¯®k«°Ö*«­´Þš ‚Ù5j£8¿K¦°« ì°È›ì±Ê6Ëì³ÆF»¬´ÎR í´ØV›íµÚvËí·Ö†»­¸Þ’ î¸ÓÅØ®®èî‹,ÆûnŒóÊ o½øÞ«/½ûÚËï¿þNÈëkåýj °A!ì µ°Ã ' ñÄWü°Å_¬qÆS¼±Çc òÈ"—ü±É!Ÿ¬rÊ,“¼²Ë-£ì²7.ØnYç °Î9÷ËóÏ>½³Ð=mô…ÅšY&¯56{–ÃF õÔêRýtÕX_­µÔYs½µÕ^‡ öØ]“ýµŽÑƬÁf‹]öÛgÇí¶Üm× ÷ÜRk¨ þZ4JŠÚߦøà‚g¦l›Çî8£j8NøãŽG~Ú˜Ý1n3k@!ÆšæwÎù瞇ú袗Nú馧Žúêª'½î®Ávfë¬×Nûí¶çŽûí6Z&v "¬iƒ=oüñÅ¿>”B4ÖF<òÐG¼ÃkR^ó'xõcØw¿˜÷dóøâ—ÿýùá£O¾úæ§ïþúï·?>Q›/ŽÀ ²¯üûÃï¿üì_ùGÀù-ÎA9ð3&ƒaKäR–&A/U‚KÊô ä07BÌ%xÁ’ð„D᯶Ӹ٥é{’ÊÍì2Ç)ꆆ“²¡kpX9Ò¯†þ?ÌæjãCñ†×9â“ø"èy¬Å'¤ˆ’²„s9 b{ˆÄ!6±‹@,"u¨)/’Qˆa㤤@u)PCOËÑÒX˜4G-0|u,Ö³#<=:Fƒ²Û# íøÇ<K†ì•béGîuîLåùÄkAIKXR’’¬é‚§Èï%‘}1—ÎÃe/ƒYËa¾©˜Ú%“ùË^~°Q&™d-,EðÀÓä?¬iIlbS ’œ‚4bò’™ëÊ:…¹NdRH™ê|g3ÛyNa9®þ‰P+W³iÙ0˜Ðú'œ‚%Ы h"¨§ Z,„ús¡ =èšòwiVÒÚäG6š„mr›R¤"ãÉP`9´Z' (D*©’.®¥mèJ7¿-³` Êœ€·«ëPϦö„&PĬNoFBERÙ¨Ô¢zê¨Nêa’Z!â“”¤dF9*fˆA` «(¶‰QlbÒSŽbjU—**¨Rl5j/£ÚÔ·ºU­’y …â˜@ø07¢eïØÀÁ†°Ÿ²Ÿb +ÂFH±ŽM,a›¿É6n(¬é`-©Í$ˆB xÀCWSÁT¤âç8­WÁºQ~€tWê±þça³§Ë¾Ž³½-KÙÚæ–±‹}la7g&_ö¨°=õQÖ°,ð´‰…; Ï…µga3a¸Ãþ ˆ—)b ³ð1~-Â$?QÏâáç=/7la `ÐØsÆhœÞ󦂢بYŸ)a6}XÃæ0’CœáW&¹ÂK&q“qÊä /Г+æó2ÅGgZŲ“mþ¾Â1‰¨‰%óþ£fé¦ÍWnVó Û¤ ðzVåýGnœcrø€ôß ßÓ—µ^r׬¸š•™Íg~t£áœæH3m޾ô›yÃÖ²¬ {lNÓ µÀÊò<åå˜ÜŒa¥%¶Õ«~µª%-eWçÖ´ž]ö4IkV⳦ýG‘@ìb#ÁÏÈF0Ž=ã÷ž÷A¾¦HÃTëXßzÖšÆu¶±ÍjnËÚ”Nìwk& –»G{\xÛ<µp×MD1`Ï=$EƒúGö–·åè­Ü5aw;$ˆ8µêÙòªwÙä86²@†'<áʶqm _fT¢£ØT—ëú}¦{Ã›ßøö·¾þòk¼Þ".cÌ8â.M6Ps"˜ìmêÚô±”„Ð ¦óÈh®çùyN}žGœ·Ú¿­ÃE“0Z=ÏAᎠ‡#ÁðDýÏÄ´Ömq^t²ß<ݧv°ÆåšJ˜^šÇ¬L™.µwßPªï¶ù»jüŽ`ãêFð†w âexÜá$›L7¯zûìçXÞòSzÖc­ë8ÿ²kÁ®øÅ+Çô„WpâS¢úä >ð…Ç ËYž,r/bj—cêMÒ >F6´´%ðSíÛ#¦¸ø_<>n¯Îå'?ç*écb8›„s¤×Ä–ºÕ/þy«ÿÙósè|Ö‘z1$AÚµøýl‚_Ùõßù6Tþ£ãßüù‡©þk¦¿mhÈ+ý;:Ô‰“aϵ5·6¤†`fCQ²”€¡¶€’X HY…VaÂS…Åb<àY©°gÀàgSWuT×}WqZ÷tÆFq^·M”Dko/(h91Hƒ3+ç+-”i‡Å£æA'§&ße\¯“j ÆvØAK²¡0ΕI“„‡esÔÓ+NÈ„TH\~5,΂S¤M¢@ZØwl!huT7†eXl[h Gl¶_‡¦Iì…HØ„K8…J8Vx‡Rxr(tPØ`Kc?¼ÄB¥JHáBþ”µnñ¦ Æ9²´7ЧWÂr”òˆ‚w)–ˆ€•ØOˆ’¸7$is–¤QxÀ jX†fÈ}d¨y)˜c q6–òu~‰–‰”xrM£‰ºèh¹ø‰—¸‰‘ö‹¸8˜v|—{ígƒXöTŒ¸e¶GmÚ#&É—SÔÖX=Òø>ÃSÜÈ9Wa7Õ#µ1MüP ç0lR—yÝwy#8uÉf‚łޠlmhq7C¹wÛ8?à¸ÚèþXÓlÆS$‡t²,'†vpWfå&ˆ8<"‘†7U˜j‡‡TÙ‘&őЇTh¶œØ,SÔb§XcÚÇŠÛçŽÛWþ‚Æu#H€Æ çP ×ÔAr„5¾3W©‘)’@ù‘#’»x”Áurv> v\–dtV\Otf±¤r„aÏ“•éLEæ>^‰8–ê1–_©NšŽefŽxæH@†.9‚Ü—y~vlÙ‚ÜGîõceVË€Z™\ –¡(–º;†¹•+Ô•rçj÷…6T9d)²ÖE\Þx1ŸÆ8=ɈœyK È4 •˜Ö<Ò˜6¦IfŽ7û0]yØp­è’qu¶Yë(—   ‡¶˜hÊ!šºFš¨I!Ù™¡ ;ÃyšÈI3í§`¶¤~]:oZÈ”ÃäIþTj‚X™íÇ`‚¹[•{W„¾žÆTGáèˆHZU xð(8†´YŸð˜›-™—*¨“~¹P‚µ•âéç9ž’e9Úæi@zmt‡{ç` –NÔWc†X}÷R"TTº„¡³€! ê(Ú¡ïÄ¡¿#Tö“ùCIü€g–p¬(—ºéŽW‚ÈÆŽ–W—\gq®EEä#ú0Ê¡BZ¢A¢2¢¾"¢Ö@ç j¹‹÷”FÙ;3çFë‚EÞÕŒøJ@E<ó¥…& dŠ@'7a` r¨¢€^÷’˜7£î(‹Ëv—ùyyä08dýé7å9¦Ù¥þaŠ?f:¨UU¨U5wƒ—PÞ¥¥'ÙîÃSBA‘aKã!7³{äÈ”ÈF“±AžÊ‰ Ê©lóh &Œ“èž–Àb@ZwÚŠô8£5Jláw«w9£6Ù§=ú gú˜F"Œˆ¢¦j¬ Øq€¥VÀ„f#Ù4ÌÕ4#Ts#¹HhG=†»ÆEO‰–Ô¶­ÆGÝA’Gä<ž$<5’Y•ô…¨˜§T—›–· 9Zl;f €cwš§ ·—æ×‚k4ŽáˆLÚšNÜ*®á ®zžf‰½LúX˜Š#»„9™‘[Z€Š6¡Øu'çÖ:“¨s Š¹ÙX‰¢Ë˜Wècé“×Yx§»h“äªÖç¶&è^(ø–5 àä%bP •ðUzË’Ž ³° t+_? vW¤¨«{§«{©½ ‹‰¥þK˜­{KâVf¥v¢¦Ú¼Éò#‚õ‡L´ƒi½A'±˜&&êûf1¥X;¸6Èõ¬ÆØO–E®ñž`x§³Ø^Ä–y¶šàUÀ›•`Àx |{lñм{úÐÖ«É…Xáë¤é‹[î[Áëû¾ìîæœ‘h’üˆC¨6Ž1'8"¸·CN‰tL Må M2ü™VûEàû IG¥çžµÀè¨g56 ©°·p»H0`Ä¢P üpŽç¼Ì ÀO‚ËuÀÁ}©YÏDK/<„4ìJ¢yÃ3Æ…è#Bjt0h B{‰¿ •>§¢ ¸A‹9Qh©SŠvS|¬ÇþàQ–W°Ëj©ìIG&a)û‒§^0À©Õh@‹9ÅšÌüªàH0‚Œ>‹h?Zœ*7Èá!È}³Ê<|NåÁÊõ$¾¾4jf›4Í>éùP –í’‹Nk­ÿô;˜ÈÆr•Û“L9¨[—Ì!Y?ÊŦ·C›Åx&y>vV·©°»áÇ›`hÀ;ÎÁ[ ÌpðtÇ[†» m¦‡x¸LÉ\¢Æ¬!ÈL[ZÏyªòD±Û+$»4?| î‹ÈŠ'¹âEâKµ_⡈ ƒëÖ­6g`ú4ÑÂÌIãáÃØUêå^^廾‹Îyà:Æu?öѾþ¼^eG ÊX¬y¥µ¼ÄZóûÐш =žÏÒ;´µ:ò‡Þ%ŒÃ•vt´S+'v;ÈÓ=uªsò&ü§EÄ<ÕõÕQ-n¥&Uí†5UÕk Å ñc»ZæœÐ^_ ZçÒä…Z72ºÄ8)–Ô‚YȦö‡­4ùVý|Í×€GDà©×Ñ蜭wÐn,L ÙÔo…Ös|¡SÛ¸wƒTàˆ®d,D¶Œ>ŽÕI™ýD&ï l¡g·d` ­-š¸©ç@Û¦åUÌÀ IÌÀòêÀ€“xЫWCo—»ÙôóƒßuÙÆÍH x­ADÜ Ú®þ[Jhw,„\Ìïf %Ûsöûið6?{{Ï(ÖE]ŠX€Ù“¡õ—\kûkåEZam^ñ¥ 5©¬ °ß °Ö¢l8+¯½Í›?–w=h{•Þ]È n ©:ÏÍWo±1ôÊÜÛ‹¢ûƒÊŠ–Ô(ŽÚ©ì‹©?)Ãk<âu·XT©ÝùT÷Hã‡å¤*öÃî-c½ Vç ! çi1ÀãÐß:nßå5Òs@;»›¥u=z$¶!aµ|â5Mâ(Ž\È2#magºOЪÇÝIFbr¿ÆÐ6óS,ánš¾Ôº\öæxw„ý<­ÇW´uçM¨þfëk¾k^i zšš°ãPèè1šPè;þèU,csÐÀòJ~ñåZ?úŒX)? çtn¬çhÿ}§IÚ«#&Ú­—¥âÏ6Ó¨Ò]#÷fe}Ì¾ä žªtGi×°¢Íhók?ºgˆõã±'1uPM¿öªÜPä5Î~ø€ÐŽ7 íßè® ØÎÚz&›A`¼HÀux°“§YHâÜ»~ë•ëiFçËj•@Xzêƒvƒ'™ì—@ÿ LK¨Æ)|©s‡ÈÔ5©Y'k¬Ý'º‹:íL‘h祪o) Ÿbcÿ°ßi0èù íÔ~ÿ ñ/í‡>èšÐߨHþuMîÿàu@Ë“?Å\XHðÛ¹î¤6ð¾îâÖ†ˆU“ÔŽ˜iIÙSwNg]¶õ[\Ú¹' 6n.²¸Á\ßå¤-ÞáÑ<>¬Î[RILñy ÀG®í‰~öP[Àñañ…~Ö5vß.àðÛ œ)*Š˜•¦ƒG¥$sþ¯hðÎ4üĸÎé­»±f)Íb¦‰*„~3²ˆ ½žhœxå÷ /¥ïnô©„ðä8Ù”xf ®ä1öÀ ð[°ú°úeÏñ`ö7ð i ¶Ðp:U ¦e~wv8¿V´ûPÅoü'qüɯü-Aü­þ¹B^dÌÔf±$›¦3È‘xÏí–ÝcW®dÌåÕkâAaæ:þqG|4¢Ô´nÞv×,)önÚÈÿä߀î¿ÿüÏ ¼ðñÌ0iÒØrt€Ü†äÈãöOKŸŠ™"®ˆ¹";^¬S§ˆH‘#C†YgŸÊ}Eö½t9råI–-iƼØòâN(Å‘¬ãá£8¡EŠzi²NO’…jì)´)I©I¥ŠôøÔªR¥SIfM r'ÐPMŽ-ÙÔS´bE®u{ÖgØ‹R5Mú6lS£;ÇŠ-b‰‡¨|¢Bœ3Á00`xyóV@²·-”)oÙÂKà°oi$ þIHnYB…Àòüc&ŠGÅ‹ŸJ~„ÝÓäݵ!=œôp;¨í§AûýiTäÊ—-g®…+¼.ì×rߢ ÷6H¤b[6ϦQKV½H”îR â±‚÷[õnGô}±þFÿ~ºÙèXÓ*Å‹}÷È«rz¨E01˜0¿aÌ1^ € 2y©L3?¨€3ƒ€Y(ÃЀH"Š*²è"Žüªî'8¸O€[Ë"qÌyñsf”Ñ7*·“üã)¯¼ÈêîǧÀ›j¬éŠº¶¦Òí¯ûzº.,¸š›.¥’Ê(–‹©÷¾êr¯‘ªã/%¾ ò5dÓo%¿. ÐQÎÁþãœdì^t02oöÔ =º¡3 lA¢¡…¦Q[<´„5Ùà2«ÄŽV\q#£8úd-‹p±ÇPT²Ê4Ãã¬5KeŽº¤ üqÌX‹ªR¹®Î{°û #K¶ìKÀ@:JÌæ|{Ž®`™ZÙT[5¯¤-Ýš©.kñ :ñ00?гAqÿTB0n(Ô 0t„Òj”›TDI‚‡ZBÄ+=öÄ‹+<™Í_ž\DîÖïúzÓ«7· RÌX‘É^­2UÙ÷”ƒ®?ôÀz%aɮȾŽdkª(w½ OÙV•â1oX¸qKËNx°Äq?(ó€3§sœ£[‹¹ç0'(ÎI‘€È.ò!† H@¢‚@[—ÙÇGhz㤠F6½ l>ÚÉ'>Q‹ÁàÌH ‚TEV‚!ücD,=` ~ä ÈÃÀè.†`ä@Âþþñ´HÕ¢cš‚ ÓCäGÏ\Ñ3¥ùÆ|-E+âÕwN–1¿ýFT‚ìÞ®Z–•æ`Œ„lÑ­Ê´N"ËyÅÃa!cOb5.yýBÓP΂ÈZ)雌SßBi3Sžƒ1ÐÄðÅ ´’sX#¨6ñ£ÌàFvIEÍá4ç ×j,Ñ5˜äð™ å"_SRQ±4_q™àúÅ\Aål=ôX±*ÕÕe lY‚Rvœ$²õ(ÌxÙ;_[’c«¿!lIö‰ä™·é¸‰D¢¬?D±UfÔ‰ȇ.^¹Ê¸29ˆ¥D»án¨ÄI.W·¨eÔÕ!Ê7ˆiþÌc–ó%pØ6ºVlìc‡5l‹ 4¶±Ø€,66°ÂÂÁ¥Euœņ8‘)ˆgh' Ó¡Èpj ¤’¬¢²¤²Ö:Íù`«.f%ÛÈÓ$cÙOUfIâ)n$‘Òª( ÚD§é®@Ç0’‹Žd4וèºA‚n´•”Å%†|YWî2*"©YÍ1Åñ׿bæÝàX¬×½€oÀ‡.Ð]¸¯º ý²—• ×ÒÂSYR·H –VlêÚØpF1äÂVVàdÉqeÓº¤±J$Dr®³IÐC§Ül…½¥Í‡(±‰¨SJ< ˆ@ùp?\ þt0·¹ÍÍZ3ÝL€SŒÍRñ9ð’»u=J'¯$ðC}½à08ØAÊð]B–—‹-Ç"Zˆ80f à (ˆNƒSŒÙÍÐBcÏkÙ¸ÀqlsSfçSÜÍ4dBg°|IºÀ°µÐ©úŠj<&±pI4äIUèIÃÚò+M<Ì’Žììs¬X¸NkñÓV“cW$°ÈqŽÕZ]ûøÇu¨E%ððD¹ëÈË@7’‰Šˆƒ™È|lbÛA²°C—½¼l4ÛÙQ€v´£p jŸbÕ>Eš9°ß.86ÅëÙa‹ÈÃÒöÈ;ÆÓ ˜˜w•CþçT‘î´é“ˆ55Dªï™kÖu“›{kE}Õœ ‘—JŠj©FNn>bm 1 ¦<åÍêu­OD*Õ2úɤëê_÷¸­Ÿ°3†üÐI«µ˜‚3±ŠU¬|T€9dób';ËÌvvÎqí3HÛÖ:µ§Mí1w¡Û‚Ý׌ø½ú¼ Ã+;¸sZUb?Òg†1d°?£×0eí‹c«J$Z–¢)b÷ÞÒOƒ’Â}®SJÏÉ•Ìñ;& ñ“_‹ÅP‰cz S÷Š#:åO·ã.o9 ê`‰JÌCâ`õÊÿr–S>æ3G6²µ¼lÎ{9çc>ÎÐó¡[þûÚ3@}êSŸí¢7¶%üVøF÷KÍÝ?ª˺ŸŠõ”™‰NÅKËü+¶''ì u¥p®A g›‹”* ˜é†Î)µHI6ð mh£ñ 5aøáš²ÎD…7<°]ùLtc YEX>ùÊןå1—ù±µ|óÍwÞËd.3Ñ‹‚žû¹k:ÕC½lK3þú/k©€‘˜Èš‹£ _™!D›·µ)1 /éVq˜5jU–1´P;ÑŠ£¾¶à­¼»¹Ë â°,ÞÑ Ãª…&°¨y¹&:‡$¨—:Ð8ér?ôc5•[¿–6{?mÐ8`¿úƒ9*¼þ?Ì;6Yè?/ÛÂeÓ03k@=€¶4½¤¶Ó3À<:#aº k0yŠ*}qC‚{’±©­¦’‚KŸЬˆY ³ã¢äû‹k"+H…®b~°—s5V«.õ3ÅSlB˜¥&€*°¿W¬<üË?ýÛ?ÿÓ97½Ñƒ¶fÃ65<=hCÀL3£c8ø  «2 JQ’ኊ)¸)§”´›XQ&زA;–² r‰›Ø‰:Ê“Š&oª¡Û"¤¥(©Q8Êò/+“²b›*°ƒ&+°‚›YOà±nþP¿ÈÈ;E&ô¸¶Ú€&`€ÅWtEüˈìBˆ3\´ ½4û¹ óÅ4œYIal=x½ÜÈ>ÇÉåúϺF%Á°¿y0±©Z©ŽÚH"µ ŸPé©®¨£3¸8²P¹7ú >K ›x¦I¯Æz/øª¹˜#}°TH…}\:à±ö#ÈR,ÈùS?›.¡,;hHû“E™36b;6b{e«HþúB €»d ˜6^4CaT€¿D½YL‘üKmkÃðP!i›¥š½X°¯áCA½0‰¹: r!~‘´ŸJ8öÈ Ž˜;àG;jž¡ø žìþ«£žáÆj/-xØ|;˜Mcü—k‚y°E”K¯ȰD¼Sü±"(…R`´TKü#d‹ËdÓ<ϫȋCkø)‚ÐËŽ:‹ã½n’!q´â•B´¬Â‚¬ ø¯š•¬Ãâƒål¨/ŸíÙœ ,ƒVàY›&q l•N/Ô‚%àÖˆ|ËpmŒÑ Q&ÌÚ>®n(  P·´ T,#°ÝÚÃ=ø>ƒÞƒ.áøßýa 0‚ -,QÇ: §*¡¬ØÔ{S8Õ2©¹8N8x^ƒDÅ%΀¬^"±Ý^Y´þƒüã€tHñ%ß0ßH¨ÛB€†õU‚hÐÛcñ…€h8Ø5p+ˆ@ÀMUÅ=ÝX }c?P…ÕÎòCÞùŠ«›õt°Ü ;wúž¡ò½?öžs*ºh‰Ã2  ]6á~ã7F\ÄõƒPLÎäL~ã=í4#, S‰7™ËE8‘P/0Óa.lÓ¸œÓ™[‚ãÜ£‚`›B­ýSTQ'´0&ÞÞ—ƒ¹™K6ØgßMhÛ„ºÝâ€ÝLØ[¾Í†5ØMÐ[!¸„€‚+„67>N(Ü7¶QUP…&(=E“]é‘gñ_M´"þ1°Ük§÷(™–<ŠqêˆÊ‘â`ä®äƒ®dLÆd:ÖduVNph7þØí¶ÿŠÕܳ³~ 8‘è”"HOáe3gû2=^äõÖ“®èz ¶?^è­¿±Ë)h%Î^‡$æbžÅØgpLX6`ƒg®[Ax†»uß÷_ùm_5È[L¸„[Ð}è‡+PãSHÕÑKÜrOPg?¸Qõ?‚w¶‰g…¾ž²ÃÙróÀÏ}6§è9Sñ© ÝR•`. €Hܾ¶dÅUçrVÜJ¦c‡†hˆV…¿¼Øþ=n“,8ùCfÒe–ýâË&^6åÖ H˜bù²þPQ†K"^Bƒ4È}Ðy0)…´¬Âb>¶óêgXƒhØ„gØâ·Eßgèâ/!PöejXƒKx꨾…[ȆjàT=ƒ0\9V€'°î?ë€ ƒ08&PÏãÙ’IQ yØŠQÐR¤½h-JK’"º-C¸éh¬Ù%lK¾dì쇆hrnhˆÆXV5çýåØû²hP!P¢ £B\޶Èj^ºÜVn…eÌ+æ.0‚.06•<±¤?\žÞ–+‚Õ)X*¬Â›6d#¶"àƒzÀ„áÖb·}„Øbx ãø}ßµÝ5P„}ê~€‚~Ш¾ª>þíNÜ@Žî}2 99ð–×ûCNŠuLYB>d&=ŠÁÜ v™@¢VtÌK ÓÝë¨c:þƒ:ÞduFìˆ~èÅìgÕYã(ao@œÈ½^}) ÞÇìf[6ÝìnÓ`k@^aqOËõ#'‚8ø·B·Gç4F¨LÀmØbhÞbm†ß5ÀU/îHî¨>òK°õK¨†uþƒh쯖ò¨r,÷î,(…†–èãgçÑ=@ ó¤ª~é›éyk¾h’¬xG>°Îðìw:.ì‡V p7÷ÄîsŒýKÂT€7a1þ¤¯n¬®¨÷PåÆÊa¾\31ÃЧ5^“¦S™»K>xâ[Q—Öå\nBë%‚øXPñX„¹L€KÚ܇.@P‚M¨ñ¡Þb¦~ßYǃî¨.ò~@î¾Õ‡zWEUtvk2ƒ*ïþî1x•A)ekFZÍÉeYº´¿ -TÑ÷Ô\ó…S8ɲ°wMkÐz€®ïz{tOì=ÿKsÎØŒtÎî8×5º£kGœ~)‚àå/F6‹$³‘&x-”Z™ëíÔ‚'ž^è¼[fé–“iwx‡c8ƒX€×Øfq¸LÏkõzXuhðW îâþ÷•uú5ƒåf¨N„ÒO]?„à>„xxî(øƒaOÜ= òž÷ù+9ƒÀ€‹Ö@€Ž#qv“å°ôYjíØšàà±´?«k¬ÁëÔï°Æn­ïTJ½TK]‡N`¯GwÅUlw?ûñ÷j2Ðn.3bTϺÆ£Ø-@1 é5[³(عxP0»ðX^)FdÙ¡²*S¦ƒ ®:ذ!‡T¦h³‘àljXR鸪£,*$ìÀ±óà‹.ªÊ(aó š A´àíÀ$&;L˜hwéç­[ý pèh€5kÞÌ8ñäÉ€þ !" V¬WÃŒþÓgÄ3ˆl(R¤ÎY³h‹ˆó¶\µkÓ¶=‹îݳnçÂÝû¶È^´âÌz—.Þ´j›Ýg×íà"øPø1@•?T©z‚ªÚ‚xeFÓ UšÒ:Z¬`Àà¤êì 8Íž5‹Ó¬Ù~@<•:õÌ(ºtaá˜pÞ"Ø´pˆÇ©S8¦sà€#Š…f8ÄŠ¥eɃ%Kd‘'OEÖ(<°C¢Â‡*D˜‰Ä>=îÇ!KâFD…&ÁÁBJQTC‰K0±Ï&j@A4Fƒ‰R\@PnÀM(aœøAlSU%Yuõâa™G9_ÈS[þ‡)Æ–Y‘ Y~¥å`ÈåEäbhö—[=šØZ@>’xy°Ž’mÀ„e˜eÆÙnÛ\q… döÄ“ Ñì %o¼g°Í6'3pB› ”ðef›qØàødpØ\§<ÝtÐápB:¤X€BwKh¡Exâ™’¥(XƒiHõÉçÐ|ó݇M):H!ÿÔÑ«hJØXƒÃ‚‚<#Jl‚ jX¨°BhØá!·b”ÍFÏ:¡Ðà ÎQÅâV $ÐÕGÄáOßšaFXlP^q%䎈ÝUä\Múhu½µ¤ar Æ`?.&e_ŒÕq.þe]Âàfª€ðÆW´ã»àB;˜`r¡ Ò¢˜'¨r§·Í2à (ÌgfOøpÃg6,ÄLŠ:w³u7?AÈ4éDQéwá…‡^y²Äâép@…}ÉÇ´C Ý·:`ÄI!ÁÚÑ}ç¹W Jvq† Ъmá…•HFÒl°›ÐÍôÀ…AFUaˆ°í1ïŒ1nã¦@ã8f|ÑC/8f©¤ºPJÉ#æ¿›y¾O¶–ºìÎ¥WæjIIdÇÙµ6]T6€ÇªÀ‰0 ¨Œf‚ *…IÅæ4 »©ŒðÇ Ì©@É ƒÀ˜»e¦þÙÖXCAJÄY‚s%ϨtQ Z!êŒr•‚'ž¦EÇÂp ’°B‘ ‘}#acœÀÉ)¡žkHì@=€8Â9+hÁ†^UhJp Ô¤»KEúØ. %(!ƒ m¢"¨ˆàÛA¸ÈUŽrŒk…¨‘ jÑq¨®spþg<ï±ÏGäúŒ„Â9ƒñú·µd‚<`3 ‚ªg¼! ª‰w A‹­!.¸D:K¬A'¨ÌI@èDå{øJ8†Ä©p…,,Ç8Ê`e¹’8’ã&-ƇPÚÜb®™œ*e3`Nz‹’zø—#1†P©›™5öpâ‰5¶ÛI³¨ÅŠq‘w+Ç\ã?ÀiŒ¯ñÃôT‚€>aËSlö°‡3T§¡7S߸÷ˆÖã>pŠqŠK íˆLdsIž÷9­Tó[…I´À‡=ðm§XµF ò°À,@‰W ƒupp”ààƒ8X¡,*åþ•—pÁhàéÓp„  Š/Ih•cŒAFƼ* ‰@ÈCFŠÌ ée/Ìt¤»áZ ³#щU1i¥ \Ý‚/}…³û`Ppý‘‘Œ0°btr1-Z¬bº³g<²ÙÑdìäL@K YÉ.O½yÂTþàñf‰Úžg£PB@AÁ`€)Š)Ky´hèÑ‚÷8à>QI2> )P°ä¹¦ y ‹“Ø4%Z(@Ѐ0—A jM„°é*…'4M<èñ܆3š=ƒexYB9ŒÀÆü g"øƒ:‡æÑ„¯º5GЉf§yÍ1/þý×Z™"qnt[‹9ÚrÎ.X6dŒ 'þJ¤‰°h’g;Ú;Є#+™ñÛW2¦Q²Û(Æ6$;¼uòõ Õ‚hhãµàÁpƒHÑ‚'ç$ŠìN,ÒW£5G:àqOÔ’,µ 7·QÍÈÆóÛ ”acä(P6æ†Âl´ %(±zŒfLe¨ h‚Ta €Á‡UAVe+r8‚yk„^±€eïÇ^Øà(ˆÑ4L“@ǯßW`Ög’ö‚¤rÖ7­|ÉK8‘d³L&²éë:9ñ†j”aÂij[a{’á5hø‹!mBl¼;¹fx˜þ艫PŒÉTaªP€l ÊÙ›b{=Á6‰c7H"»ø ´Àñ˜‘çtž‹ˆd’£òöµ™Ã·SäF#SöäIÒÊãòªA²°XôorøÃV€`X`O¨ßœr‚üÀ*‚ ‡1àqáªjŸGÀÂ/Œã z@W[Äá/Ì©õ­)+8áÚßQ®äu(ë]|ȵú ‡`e<ý±}z WD“1–“ óNø#‘‹g<’…Xa”}J‰‹Œb0½Íe£¬cc+ ¤…ŽÁEü€zu rµ‹¦ÈDMgÛ\[òI™œç o§ˆþ…k „ñ¨»€²ep@²½sö'8Ž!xŒáÚ ÎÍNp†= À2R¯ În½ã:xÇVÉEñqŒ£6À°Ä®m.J'ù“¼ñ$•“œ•ƒ+ƒ”›ã›&TZ>!ŽéÕŰY™>eðSÃç=é9QyÒŽ2¨¹¯´þ°TŠ63½³¾Ò—Ž /¸yOQ6ß0ãÙT 6Ʊ|@ˆVH¡“ù:úæN4ôY:p—ŸmçG KÕŒï8øh"¡Ä¥`JÞM‡¢p@p,0IÞ1ˆÀãG)^l,UX…^…ôA ^  €†˜‚0¨C0Ð!ªÃ4Ä”| GeJü‘G,DÛ™,¼ÇüÐÇGPÁI\‡ nOù<¢G™"=@%vGÞ­OuDAïõ(. tôcøEpd`äYEIWA1¹â,öÀ,‚¤`515Mɺ¼‹Ž¨K¾Ô ç¬•1Ò`踕Ü9IaìÃ>ð5ø^œxÌlØ\†µÚÃôÄ´eÅ å,€d‘/ €Ê| ¬qšՀ)Üþå:–¡"ƒ2˜‚2xA ”€lb±…_?*ÀhýBÞ¦_(Â50Á5ÐQ>€Hj ¶!t´׈JD„¥´Ép@úâÜE¢wÙ%^¢u8TJbOÍÔLt&ˆ…ßœ‰—œ$€?dÀ$Ó¸<Î,.¤ 6L3*˜ZÈÉ•}1†¿ìbS²•è¼^åä žžÁ ÉîáU:ÏþÛþˆZ8ŒX²Úª=ŒR˜%Zº@=C ƒ[N\ÊÎø:ÖÃ]šB˜B3Ü¥*C_2¦¬¹ÝD¶¡™B.0€$ƒ$HAúÀ4ø‚PÀB¶O^fyþÄÂhº]ùp&ÔHRüàm6âÛæ!U ر¦ˆ–O£ àuô÷td>‚˜‹ýFpü€ ý¦dÀ¸ŒË,ò$äXÌVÖËÅ…3†Ur\NS ÈUeUæÈvŽP b§têžrh¥:!Pã…%Zºg{Â'ÅLL=\®‘Ž[æ'g€€1Üe€ h3 A34C_úå]îÂÊ,(ˆ¹]ˆÃ/ä)T(¤ä4 Á10X# ØyÔ\‡Ô}GEDÓHMG`›Ž:ˆŽÇ‹Ê(¶j£H¬¾dmîÉŒLˆñ íÁU$@˜×p:Åõ@ç-Â"ô¸ÃÅÙUþ|Â'8 ~ÑKÞP¢Ó´Ö• fSSJ%¦eÎ\5†c¬`Æ H:ÍšÉÐ ÃÄÃÄ< »NLļ«ÄLL6\1ا¸¥ô!NCÉ–%þت6Da¢”1ÍBÄHˆhG©*úĨˆ¾lJÇKÊêÌÒ,ã>Y)â™? ë,­ïŠÁñôò5sÜX•TÒ × cX]éÕÂ^’„“4Õt¥Î3‹ƒ9lÚhel”ÑÌq1 xq7“1Uw38_BÿDt¡[Þ+\~ÉkÛ(Â(Øñ;Ãó=ì4˜B=Ûóþþpˆù20€T,ŽÝ˜h@냆¬AÀÀ)TÇB²BWЧN$¨¾þ!  tÔjÉ\4F¿ìŒ¢°ì~4¬:xý@¼ƒ?ˆ´rh‡v/pÕ;ÌQ(±€dD§D1±0[Iêu•ÀÕ_ð¼ÈÞGÇçùVQk7kJ ÙÒšƒíð©+ú–ñÈ’6Gw,}q´A½Ö©DÒÅÉ“Y"Ü4¸384Â;‡÷=Œ"´@$÷ñÄš¬@ò90}ëC"ÐRÞ0…t 6a_pwðÍDšfxÄ$ÍT§êÝ ä†Ôö%n4 Çj 룢vüÒ༠þ‰ `A,vø@îZÍ¥ò Àdä²åð¢ÑUÖÖ½‰POo¶X ÑëM æD±Q§N$u³òÁØœçk °ì€@5p³6c5ùÎR’Ç’“_‚3´2ìBt Â…cfxåÀD@HC#€ƒ˜ƒƒ4”¹4ŒB:<¹~òg:Ô5¤æu}ÅOdÐ0ECÕ(k–HŠhÛí÷€DR£E'8Ññùž;¸eÃîŒI8ÉØL8Þ Á—ÄA,íðY·´6¬v.¦KgbTš“Ò´e­l£¤½¸“ÊÞªµ]E­Æ}‚ØžsBSƒ*xñ+ñþQýD’ÃuÃ’3T·)PùWTù½byìÃ4¬À ¼B „C ‚—{ùÜÁ(ƒ/èйá4£V(úYÝB¯ÁšlOuÄl¢c´Þ}X‰ 3öHL¢ÊÖIɼ]a7ŠF‹2ߤðk°ôX…¶ P`ÁÐ^º+Έ?Œ€¶ðRÁId^ƒ-KN”s¶I•tëW»|çÒ  žUP7­|BQqMGÓd¨“ÇŽk¬°¿’7gõ}À’“£uSùWdw¿Æx¹‡Ã ¼Â+؃µ[{8D@8 ‚š#ŒÒÆùãã:AœçõºE6Ô&ìþ@€ üÀÇèc“ & W¨ Œ„,ˆo!ÙöFWöëfgùA TA l  Â64þ6Xà8<Ãýêã¼²vË$ÂAÅUƒè-ãÍsº, 6 ÑY=m. ÉÆY±ééE1’“¿‘kOñ\t&ñzyn± ¤ç,ÉÒÐýÐÇÜı_˜Â”ï³÷Xâ>Eg„ƒ=Pÿ+€CÕ‚ÕG@ÕßVœóf”gÑýš ;ù›?ºËy‡Ì’† €ÖsLÇ$ |¿ä£o&A@7ì&t$@ ˆ¢€Ó¬3œÂ ÇB‡ Fl¨ðT”S þ”ð²þm›@ú¨ð—¿RˆÄŸ3½¾ôG€¿@F$H©RçŸqäa(… N‘:âöÕ™²o_§E<ˆûdÎ\§uê$µŠkqP:õÐÕª‡§F­Býš–+Ô­V×>MûµÕ±N›:ÅÆÇÚ UœNæDPŒx.:$î­Ã¥ÅŠ!'v|ÉYdÅŠíê£H @È”x2Ñ O¯ì gÏjC¯ ò%'L9žüQåW•‚À§9àiÐ R¹ F ”~—²e»o*?3NqˆÂA‹Ä† Á3xPa¬vHt×ÍÃ#F¬í9£`VýÝ'FŒÅbþD…8*:¥"ž( 9бàv2(§¥RȈÎ8áƈ#™dú l:" Ž ñG@E"„ÚÀ(qÞb*-¯>Ǭ¬žb ­§`t*.ѺË(³Â* H¬¢:«Ç­Ä²Ê¼Š`jlX `¿ãN` á Æ [l²ÈKÄÉ:Pæ²Íúh9Þ$#´b`óÄ`ÍCô4¤‘6唌'žðã?tûã¢i‰´‘V(õEEñ @€A†¿JHÀ†ø“<‰$o!-Ø'*Ѧ&( ðO9ñÑ(¾{ˆÿ€ТGûLv|þñee3ÈàΈ¢ÂŽI¦¾ÈöCjÑ[—ÌP&,äÑC›(§0ëÕ$ϺÊU­Þò€È½몮n¤WÈ™¬#߯ò­ã“ºä%rX°…+·ìí„-9y#žÈƒÌ±Îì ÅúéÀd4“£Í1ÆH A c,TNÜ}BÉH`„wÞAC I&™6pƒÁP˜V ЧqàÕ»^O½Hƒ¶Ó¢‹&šà~*±á)T:¡Sv@ê©ýï?a+zúŒTÈ@‘#4‰ìÅYç0ˆ#¦k³U1\Á×Cq{À ø©åÅþÅùŠ®{ýE²-®à:rGº@2®¸n|‹Þ­Ö²¼®®æcáßÛmKª©¸14sc31® dÍÚyŒ>l»­Ó¹óe—óôåÍ@kÆMzÈP ðŸuš"H~ç˜0ŽQI„?óíé§pÔ©ûk!¦·Ûƒ Ü…™TR‘)' äÔBñ#îà m¡ÈvÎw†'Ád"2™DÀì3ÅÈÂ0†|AXø¶× ˜$Î _(G¶b"€^ô ´6j!sÈp†NÙ °‘”«€¥+÷ò![.g:!©I£3 ]üU¯B¥FæðÀ”\W!Þ( 7³þÁÈD&g\‚‹—ØÆÐà SdP#Ë m%؃Å!ÌXƒ§;!erMÎ5žôdò¨IÆðŽ1ˆ@ ?ßÎpù$2Zç €ÔÇ>ÿD :ò7+@‚“ð?T` ‚#%C½S@A2jÛ9Ã~ ‚º-kËò;T"§ìwÌ MPÂÁ¡Ðp.@ Q“l9®– ‘¢‡}E^™Ë5ÇÂ"A‰Û,ÒVˆ”Ä~ÉÅ-ãìÜQ¦â”„Uˆ T$Èø´€zä.1_´X"óEg ác˜™ÈÊh9)cƒ@N. ZœA¼"ö‡þ@> ÝÇB Oð6¼o}H@ÑÝ8Í"O †¥>•N U ðŸÈ€ôÀœ\DÊÑ,/T§=õCD~ “B¥ç‹Â ùƒ°£¾PA:Ñb~àTU ¥’1\'ÌVD(€ÂyÈC.)G¸ wBÂp`‹S²©.uµ%JLÚáæÒ²M²ô.Ù´k7"©È°ØèB…þ²%wB ‹ŒîêÙ/önŸ— Ô.h†“¯(A HAa0@Â8rHÁ€4ãT|XÓ²tF­dÞjIüÑÑ4ŽO¢'€Ú°Ð§Ò•¾ U:Áþ®Z‘ÌÔÀ˜Ã1ŽL‚éHG ÀS? ‹€Q#j$å¦Hèd© ÉeõÈ€”àª/]ÔxsÛ’ŠP&Åt‰?rº­o}¨„‡ 6à"´´šO‚féú5N{­e^^ÉëéÀBº"¹-ûÊ^æÃ @,±³Áæ2eccëbböi 9ƒzr¨õ¨7(?T ÂøÅ/‚‘ã`”66F|SEÅf7ïMÀjël•ã¬qñÄǨ '¥Â.+ý‹QhœÒ;ʱˆæ:ÀÑDvZ/`· Uv¢vÒQ¡On±DI‚†3¿éŠÏUµªÒªþº¨Z€ Ê2Á%ú£$*À› D"’áí%ÓÁ5š°£H…HöJ_­/°$é.=DÝYÆê Ó ‰;’—W6°dXÕ$LÄà‚l¸À—Ðut“Éœcm0†y6¨Aå̪P*ŒA ç@AÍ‘vs €†m¨b[šÁ, æZ?èñÈ'âÖ ±U¢!|*‘ÂÎpy“‡2h@<âюø` ¼þ5°c»Ä£—Ôx¡œØÚ‰82Î0{ª±áútÜ&TLOÞ3` „€d7øÞM¡xÏûõÆŽ½ß<Vú! GE4,Ì b˜‡tQò»4 Áþ~R95‰J²7‚1 ª½g yØ5,ƒ ClLSÅ…|†"Ž·z_TýööÏ3!Ó5ŒÎ–¼¨¥+Al—锘¯o†ó(sK½¨Ä‡=èféL‡AèQlûí°ú÷¿/cXGºRT¡øìG¬ .¸B¼þùË_: A}8ñíÕ7ì0Ï[K’é œŒdj#¤àíj bÞä­ïÏ€V ’  *ï PÁ@ðLøÁô`lp†SÞà Xá âá¢&n´#¼NÀÎtÀgäaô@@@N céèƒ> ¢>p™f6 ¢LzD*ýEWœ m,d¬¬6 /ÒâI æ‰èŠ_†dHŒäFÂÉG^­,ŒçŽè¬P`Ö°$Ü B:B¡ è¸û¼×ªÎê¾dÞà/®ÙþÀPP¯0h  ´ QË€:peéÚÎÙúï3N„ÌŠÉÊ ‚ª'¢ÊGðïTJ#þéÊNh PS€®Á\d… éP:eX! Þ€ 2/}Â+Z¦oTBˆ Æ`zƒ n rð êCgÁ‡ñbŠð&¤zë>vÃÙ~#±ë麀pˆ(4í)ž$F´âù‚+Tç®Ê),R ˆªqnº€ à Ãé´ov€(Áwà 1¡âÐý®à ÂÙˆÌ/xCeð B±€Bá!-/"Eðñ¯iÊ7Ñ»ŠÉÎ*·J‚@Wʇiø;LÒ$Õ‡­lX€AàöÀJA >2ÌS^±²@4QÎÈe©¤àþðŒ&F€ N€ir‘±)Q‘Q€Dͼ,±|ðÈ’·ä7ä# ±¡ ›œª‰s^$‰ê¥ÁÌÁxŽ-Ìt.'IždË7* 0âqKd`’r” ì·x­ .¯úÂ.0ˆPý@Ž^2 Ö¡2?ð%?ÐSÀ­õ¢adë àÎÜ\‚¬ÊêC*ÍÅòè ~£iš†ÞRå5P%YI}BðU!‘º€˜ºà@¸ &€'eq„Jâéö€Q~|)°à&~‘¹ :£R*¡R*…!þ }Ð¥ñ óÃn<)Šb-‡x‰âeþ‡ZíGˆ®^zÄøz„è¦ð)XÀ”î+ƒŒ/³ 0ó1ìQ ôÑ0£.ªF.{£ü,jãüà @€Xa²ÀB?pCß@Àëõpc%L¤YŒIERDÑJ¤DÀfeQ*D¢H26°‡+!>,3€r3À K 08;&ÀCßl ÎG>–©) .ÎâN ïœ2*‹ñ³Ó:·S”ËÑ,€²r)+I9läPbN’Puó,¸bøÈ",Ì+² ‡ Ì),Lçôb •Î ƒ¸@(Á@£A ”`¢a TÛ° RGÑÙÞÎ#BÃ#,þ€ >‚ šÍST:^2aà«oLS4¼ÓÑ8 É0Nƒ*-V4\ìÅ i$#9ïiœF@N}<ì7ì P  à€J´.bªˆt8먞æI¾‡d Sz1 ¾Í:³*ÁµÛÊÏTà3¤‡|dÇ” @å<»r7ö@Mù 7¥é &èÔ‰œ°Ð±-Æ‚rÚ¥,숄Þ1ÃÚi)™ iÖaÙàÖañ1h`j  ¶a—J@”|ëjœ†õädÀiºÀ†PठYé@jeq›æ’>ï´6ƒF€TĤ”>Ä6©³l‡Ü|ƒÙ¢‘<©žté¶rLíw s“=ýå.dÄM{hÔÜRèŒ$söÖF6+¦$éh­íöÒÃ×q¥—œªs‘É—31ñjä Çm:þ¥ëªG Dt òãš v­—Ô/•&åÅbLN–M5;¬>|¸Î ÷Ègʸõ5ÿ.ˆeÀ·Š•JìH™øYŸ8%7x?Tâ4:ã@‹O [§Ò r;Á˜•Ù•‡ŒŠbgIøm!fõì·n‰"/üjGì-¸_RmÀÚò(nd6€/ØIP bÈ‚‘‘éy‘›’¿-’óYsÅËÖ‹c D&Ñ^0›­üðèíx ”S¸ôX¸@P9¢U™‹×!¢ ‰­%@Ê"J96TPÒ$=,ÿ€Œ 7 ö j›˜‚·³F­ÖÄG Çgh☀àΚé£|þÏ×|±Ôó¯Û>â— ±Äšõ²ŠÚ‰ÈÕÃKi —Õ°BÔØBG®âˆŠÎœëØÁ^)6ÀJµÖä¹lñ)yŸÏÖ™ñ¯íd+¦bj—D©c ä@å/üâ ûæ\ÎR7®üR¹üLWzé°‘K$YÏ H*qßÑâ>*‡³r>¸u–3qd€S`Þ˜7¥Q`¥¹ ¥'@iù}d+– Ð(}ÂÌÀAD H£ùJÓº)ÕúT­ù?å·<ß郡Zy†Xç5‡ŠÂ æ«e$N«I0‡Àøpù VÐõTqeÝ*÷9›"×N<£VíHÙt‰ @@7ÒO¡1Õþ#v!wÉ@¢ˆ0ì;ä:™äš7^ ~ž×n† ²|^Ô£›Æ²O²hAîa˜ö 4,´¥vjuõ·/$|Àp4H…LH‹Ï€J¡™¶ú|Ÿ…‘1emíTAeÿÂí·Ý87Ë2HâøMwFê¥,²iÀžo`Z§V"ê Ç´Œõr0Ô•ºÙÎ µ;žµ»0^Lg¤'•™í A·ë@æ$ÚP´rLórcP¯—ò£Ij;ÜabÍ®¡d$[Q¦ŒibSH©¤€y—øó€Sz)˜ÁûNÞ|ã½j£¦±•[8È àR…¶½õ™9\¶ƒZ’…ü<Ëõ^‹éþ8žmXZÜä¦b]øëx_ÛF 6a( £Å]yü]•zãQ÷uG.t™M÷˜MeQ˜¯5g¤\ºãqi¢„’X¥ŒÁ\ô¢LQÈÇÌO’ÞB¶ñJñ”viq²é8™Ý´¹»6JFƒR$™@ ”ا‹1ÿµJµT¨G®wÏTÄÓøÇ7SŒ·ø`iËÒ_ËÀVí-+ÁŒÄ€ –â†Ï¦ÑP¤óLu}›W–]•¾A®PîˆQÀn׿+ÿ‚Ëb¸à©Í]ã7¬‘‘8à rÓˆÇ2™  ¤ôè<úi–=a`jÀþD`¶‘)Äp>æ:Ûçid‹»eu äà˜ý[à¬áÉý§ Ý­T­EþL©h~·e§ÿ|œˆµÑÞáB_ëB‰ð…çÖrÀ¶©š¦© âc_¯à÷pSV7v#¾ßÕ+kíãù}ÿÇãZ8æíÞ+98왺¯“‘DïC‘ö SüÍ]^¬bJá"Û³RÙ1qTá¼@(@Å£Ä=—‘;a¨/‚±}C2(ƒ’Þ'éƒN?‰ÑgêÁÕà ÝܘÑKøL£‘Y·í˜¦òeÄ,fHß;g†Æ¹z,ZgúòÐÙȧ}˾ù:eÃÎÄã9þù[oK@7B‘‹âE ¤€¬d>€ñ­©› º¢]K›Òèe –(ÀÞÈ‚…À}÷©ãAœ‡:è‘@ŽüpfÀ 3NÍør§m^T‘b„œƒû°u9…—™\fô8C‚OD„ÓgŒÐ#|9Ú«†R,ºtAÁC”S8g̺Š5+Öš\µfå6,XUdUq2[6­Ø«œfuËIA”3(PtaAð`‘"âöö­SăÞ:€'ÔË·ˆ¹À‚±a³¶çDZU'üüù£ÊÏdÊ’%«5Ö­h« âròóäIÅÕ«P À„§(¢”†;ú-nþÝ[½Â-}âÄ€=?ìváã[‘¼ƒ '\¸†w}„0‘õFŽ!;æ|²KE)|X`+ˆÍ Ž4¯~¬ªóÏ€0r~ 3bD3G{ô’§Í-5¥ETREa•U¾-xUMZqÁ‰ g}FÖYN&VhÊEv™çWˆ‰‰Ø×ˆ…í¢8 ícÉ !YÁQVÖ…“•fZh^)8–*É)PÕG6ut*ô¡ƒš¶M®§@´zæ$ÜÀ@%©FõЃz|BØŠmúä ‚”UWQ>謅 K– ª *¡ª­¶úê‡ "†"`ˆÆBâôÕ×€()'¨rFcªªàˆþc\W¬`NgœÑ%Æ%§-“ËéZâ,< ÕG2dE'9™ ¨ÂcN ¨2@Œ‘€(a#¦˜˜­ï¾œ‰š i‹ÇX÷S@„ÓT!-üQ'T cÖë’m2IÉ5ÇØÙiûõòEXa·F/gc]h±{]>Ûà”é†ÊV©Æ¯EêÏÆëtÆ cû…o`·'ö×B|}ò×éµh'²{ϸ7Ö`e-çê®Ë!u¡`DŽ~PÊyñºt.=|M¤ê„XPdM]ª Ã.€0‚w0¡¥Ð^Å’¿ÀèE0DÈ>£µé“sL2þÈ6 [TfžP‚’XÀ ^°ÀN±“'øÍ@8‚t@¤BdV)“^©§EÁ”ˆI¥x}äPŒð”@' ÓJHù œ° ~©Ÿ¦Zˆ ¹Ç| @ä ‡>èR—þàO쑊yø’¹ª#“†ø”áQ›%¨Ä(ŠEYʃânV%–þ GVS“&“3MÒDÂF)ŒÐ¾ºp&_›K‡Þù»©ünÖ° :µ…)69r$fÊrÅ—ËIžr‹–Èð¿’¨`¡GèEDà·#¨À#€¤Ä6 &FÐV ÑâÓQ±©×8ÁþW¼])x©Û‰*¥p„Ü«…ê‚5.ã°èÉ OÀep™€ !‡$ëÁæ¡U2‚Ñ#ˆ@x礨tèAÄ#—«²Ù¬äiÕxJŒ‹¢Û1mDg»àô #Ä•%Ç<æArîr—Ù¼Ï1ckU2«æÈ^X´×^°ñ€"z~øß.ú ‚#HT±ŒJ/ýñŽkþ,pý,BUä8¤!f“XÇ™ó/2pG`‡îÔ­­ô;ãáÖ†²}ð|pÑÌÂ9…%ò–rJ.Gp„^Ž£¨XÐC- ²ÔÀ˜ciŽsª@œÒ¾¶•’&Ð$W²>f)¯Tå’æºæ2ÅI~³³z)HA"È´þЊÍk>_ÇQ ç1Ü®ì…=‰T~W‘¦¬ñ)ÐÉ»`‡?ü\× ‡û)G90è¡”eŽbì…E-¤º"k%ź8øk}ð‡ D03&hÅ]s*DS8ìjqýÔ¤0ÜêioðÉOƒ23Ä¡ãÇ"h—Ïø9þG¹¼Šî#—QaÔuðš)M¯æ¨gÖLÞnd!yõR6Ê–¢}1DPZ¼Õ?weŽ8Çù9d’`BägæÎ_Ý\š90¼+ùÝn1›dIà!?DGä!,D:Òò 0 1‹×²Këtg1Ò¾F/õSò’͘‘!þÀ\Šûh>qÙDOøR9æjLȱÆtÛ1ûY?q0Ê†Û  Sɉ™sœ±§æòžGmNZ¦)Ø•'ú̉³WÕ¨F÷ásPX.ZÍk4,ºÓ䶬à m:“IÝxÄsBQn^ý™¹¬Õôò: tê‘þ|0‚¶ŒÐêaÂ60$=o¾[¿ðew·:? îe1ÿ&îüE„ °ã8àcir²SqÉÐÖ+¹×g—öEàÖ™ª í‡1|aÈDÇ–}¢Æ•ûÝïÆ£|=>Ð)ú7ÙóÏÊokõ6Q÷®\¦è>Y¥É›ˆ '…¾/Ä\0¯ÿlãòlo|…}£†ÉÕ&ùY(ÂíSÊ'›èëEøi\!ñc.×f=K0´¹JH­˜Ä38ô !_¢vêžœüåÝ$È»ã\ü€YGüÈNV†]âœç=nqáw¢ˆ]0F÷¦g%Û¦H•ÊID”L¼½ª.¿°fþÝÞÐצ8 c{5Lf5Œ;ƒ˜wù楤7ñ¬±›Z³ýá$-çöî…Mˆ+<_Ö®} †SüÁ$‡A¼"lè@{ø]³6¶c°Ýej²¼¾íGÎ_W?âm.s©KûQz<äÜò}"‚qdR¿r}EA»£9¥%3q:MäeÛ•5ÜæU"4+‘ø…IÓ'N`jojÍQ4‘jÍw6}§\+ÈõjÐG4ù‚q›Ô_·“zn‡‚yõgÅ—2‡,FPO²Nr„ï¤Lšs h•2å†yéqܱ9nñ#d€KÅJrìÀÃU2ü°lâ÷oƒA"ÁOþJ“g‚A Qá ƒ. BB.¸E:¡ÙTf"+LS?uFA‹s&gˆµ$|§÷‚kÿd^š"&ʇ;“xEõwAݤfj}oÆv36"›t†!’nd=ŠˆkãÔ-FƒLNñDä$»³;Fh QQ )±Þ§nËÑFÐ~rÈ(Ø¡…@ €½;$“ ÊÖaµ|dWv”oË%jö#{Rq |”a!¨.ê¢5Z“‡p!fbf8Ešr83%=&¥qî…"Yt;\$vÍW1ÊgaZÐ(q´ˆsfjÛg+–å+¸²^ÎèaŠ–E17ˆ†èA8ÙbþeÏÅŠ¬hWL5j{èpHBR Ž‚h ð7úELÚ€4©w}ÆŒ+(}ˆÁuù‘µarÛÅÉó,â"%¬âtò]gÀåqff~Q¸aÖu 9(Ógjø‚4ïøY×Ó\ƒb/ê¸Eã÷C6É\œEG,aLFÃ|7A5_{_´‚dbt¤hV¢…²2-!+LAÛ·M0D²¨>èLÀRp Ç€Çðî X`V€•ÀéŠg3–£v¹‚n25…vIRezÈm"!k×õS‚]ì$4ÊaV…g=‚Aƒ5¨}tôfýèzC§ôXL8hxËþ…=³2N•œcµo˜”^úåìH4i)j¨–jnG•â$µp_¸–&uÀcIAVt›·döeodÚ‚$IRžÚ0azàîÀîÀ“é”oæoc‰&2É´ÙöNa„¶q(NŽÐ¤2qD[Åë"7ê’:Ð3ãR”XÇatꢸt•„–®9´z€ŠØqäŸHV2)ŠÅä-‰L³wã7ˆ_©/{©YJƛ᧳¬(¨vÒ„ i–ZIÁÚvGC«ßjgJv7¡óx^ѱ‹(` ïê%-ÄNþ‰„¤„>žÇ5Ѥ7sÁFPL&¢ÛÊ©›eIiÙ¬'¨M‰†¡ÈQ¸Ú€ Ëþ›&z‚X~EÓ¡ ñÊ9–ÊI§ôÈÖšq%j«EQH›(B«£Ö­”i‚+*²iùOÇlP PWA—¢ž,ÐeëiÖOC+O.Ã!ü:E(=""Vx:^'›þÈQ3[~‡g“¤öM‚ú¯´‚AM‚Ô“°öOå§µ½Š8j&V÷u!Ûµ9è¬ÆÚaש—aû´áW±[«¢dRŠ(º¶K¥¬3k4y‘šRW³q‘=Û³­È"‹@«¥A»>|`\x€J} YEnÚƒ€ç§gœ¢˜€ÿZŠ—9A é¹]gE)x¯©ƒ–d– 8¸2«%»—Ëx~íÆaþy–^y"®‡²¤æ¬PkŸšxuv+I¸Û·^QwúeÏû\éÑÁ²)Ûd¸´ñŠÆá·z4âê|ÀK}Y{­ËH~dǽ^¨´Ë¢d{~ãj¶uJФ¦µÜ=õ)º[´·&º´mËwã³5k¾lW±Àk«ƒ÷»!«¹Ýêº¿Š–s‰ÊG‚OJv¹s}w‰*q-ž)W›Ò/9V†á§_—¾&¶l·«t¬û§£xµ*Šqëvõézxí%jÇZg—J4ªÛ½hã–ýõ¦ È´¹ë8ÕÓ»Ñ5®m—¨ +»xK=›¤¶òS¡ñf›¤X¿êU™ú´2æeÀÚfþÄ¥¶Ë¶½N •µÖz²”Y=Ý‹Y¦ÆnÚº”Êlm»T.ܶBÌ¢bL=Ö¿]I­bËŽ'|°Ýë°ã§ˆüÃ{"wm´ly€ÿ:=w†›ª:ýØ8k ‚³bw-aa ¼Q Ù¨yjÀÇ ¼›È| mêŒÅ‡II Çhj”¤­±û”2;›:xhRÈ–$³DÌÂÞ²ƒ­<ÉqÃ&Ê¢}š'Ä̼—z˃ëèQŽj|×¹/›´\ß§vG—£AÈ€øha[Lsœ"Á{¼K+KÇÑ Ï¥ˆ$¿·§ ‹†XK"ˆ|»ÑüCàú¨­Li" ë••,Š˜:þ^Ë:ôrqªËA×À¦kÏl:¬Q©8ž«}ÒÍOºv› x™£ÇgÀU›¹…zÐëÄW¤¢ÿzàh§\û”wÊTÑ<¡Íáz¼…1ÌíeÄý•j;©®<¿Péº[²ô¹—åW¿\dÐÐ*ÂW›Ã¥û±²›‰‹·gÈ›Ú<³Êν ¿¸ÏŠ\Ÿ{™~jºÐ©» ܰjŒ&©›Ìb{ÊSKÇ5˜ƒšô¨¡» ÍÑ]»k\À{¬À|W ¾©x¦þk×mψ‘ˆ¿šÒÈË6À|Ùš ö ÛquKÍ,µ©çÔ {À¯©œ©×yJ·§¬¦‚šËÂê¶Sí}e ­²þIÄ´§G­Ê,ŒÒ]»µ'J¶yzÚ>ì-êŒÂÃÌ¥j2¾»®“´²¿Â êÕ}ý¤3æÛ¿íÑtôÆ ÄÿFÔ´ÌÎe­Ðùüµ¶òMk|4+ìh°gØ=”ʰ_«Ö§Œ€Àú’Ȭƒ¹ÌÝ¢µ\Üñ낚ˆDÓÐꓵ›°O9•»dÖ9ÉÊ™ÃÜÛબÊÁm–!̨QÉYŒ Ôç:RüÈÄœ¹¢[À¹»°Ìl¼Yq]­Y»¢FÅÂ}ÐDܺþ ·NÞGÓÚÑ Àù|+ŠÄN[Gœh²¸ÍÜCçq j†Ð:Çñ^iJÃi²ÎqÛ­}ìÛ5¨±2‹Îþ_yÐ;(Ú­¹}\Ïa¾Þ ûºdýÛ4ÜÀo‹€6Úf׺±“¨äZعk×ë+~YЕLÂkÊØÌ6²—·ÜU­[ô´µ|ãÊ §•ÝÊäËß•¾;H•ä Ð)+¬÷~‘NË¢;ÕUTÐh~¼M‰Ö\ÎlÉM¾¹ì­áêa ˃q­_¼Ó/ ÑJüá-Ü8»\x[=x抃7¬Â Ñœq.Ö ™º¥íêË,å=<»£èføý§ûÌ †m¿ }Ñð˜Ó‚z·£è¾×œÆnªåñ|»·;Óííâñ àØošäÍ:a[g »Ñ8èŒZÝé0~¢1|Í@Ï™Š^¢®È÷]ÍŽþ°ª»­*Ý·UôšÂÛá€ñ¿;Âî}â™ »>ÍÑËéÙĬñ°]åHãÔ’ž¾­ýÝȧÏu½Ë¾lÇy Íó¦Ø1»Îó¹QÁººK6ÏÀLÆKƦççó‹,È ›êмíÜ]ÃóxÏÄδd^µü«)ªéä«åÏŽYËÝéF§hÙÇ&ÌèÜ‚ýäÝÞHm̵ªôÕþÛî5Âø®² [±mßÔÛîÎÅœÜ%¾°¼Ì¹(ÛÓIlºè“žÇ´zxNV°sïÒ0LŸÝíÐöL¹ÖnЂæ\ôðú¼Û®ñéßBìêDL¶Ý—êšeɉ®Õ”[ågëà•ÏùjÀ`ž«œ>Æœþ•à¾×&^‰Cý⣞÷w ¨^]¬Æ9É–;¾ì=î¸ßùÓÎzõ'íʼûm|«‹ÂÝŽ©)lê(ˆÎ©ômãÿîd^æ¡WîyÜÄ)ÝëËè-§ØîìâÍëó,ð§[캩ýûø#=©¼âc\äÁ uŠWÄC„(p A‡=HdX À:fÄX°¢AquDn,bñaI‡¢Dè1eɃ!²T)£J„E¶<93dKŒuVf¸q§Ëœw–lÈtåC›yžd:±&Ë•Jl*Ô`Ñ ]zÔÖáИM™Š%k1æL¦ÓÆ…ÚP©Â‘þb%Ò͉7gظ/w¦$éáiXÃ{éZÍ 1åͧ_ÊÔ ñ!cÇjF,{’ë;KÇRló+äЖ5s¾üøéI“cMå:¤×¶×äÊ·q]°Ië2ÖȺ+jÃS¹J¾™dç…•­Š~ óô_½Á½>«;6ÑÆ&BÕ*n£ù¤Iz,L¶_ëuó]+Ö}|·PçC%™Ðsxð"í·ßÌ{ .³tš.­‹lê¯.ñ¬Ãm-†"2J¾«*é·ùöÊ+*ÏÈòð2¥b3 3Óx:j@²BëN·Êô 8Ìjl° é’È&²’óïÆúVDL«íªB?”n’J´‚ÖsH!«˜þ³O0äbóѸÆvÜ '«ZÌL²ž”¬-µé ƒÑºu[?Æh[ëÙ:B2(ŸüÊ9Åk.)Åê»S0Ç#ª¢¹.«³¯ ,k¢Ø´ãn·7#Í¿c> „ƒS.à>.Fù|Ú0M¯Óh+C—í»NM#²5ýUCê {RÕ0ùtª,-K%n-õNIì¢s¹ñT+SWœžkv'0)ͬE/½è8Íô «Çb9œ.·5N¾ÇˆKÄîˆe¾—xBïH—<ì/­uWT<ôtì/¢wÿ[5¥ŸŽê’A·–¯‰õ¶©¬úJ´HÎþ”Ý&AU©Ì çÊ*ΆþHŠZÓ|×c©#Ñb€{:ù7ä Rµ©¥ê¯§.ÅNÕ³v+3å e´>Ûhär×öúÈ$/­mNKƒιˆšÍß1a‹QçYGë,7«‘”šªÁìlŒÑÿ(ûô¼øb­8Öõô¬ê°éôº¼$ãzðD Ǿ¸)K•ëlЩúm—Ù— 0ð㶃k¢}¥Ó1úœÆNeÀÙN¼Ð,Ë ëñJT“\ÍuÙà7O´Ö²'¥Fzì϶OëÉï ¼TŽ>Ët»ÁôÛ Ì2Ø^Ùî3ÒÊŠžd²S¾Nç€målLÐüZ:8¬Üú>–Qqg¸$5êÃø¢Fœß¬ã\–°þªv)_ËÖzµ$YsY½ £¦MTm7»ÖøEguûÄfYM!H““võ6†©EpïqÊ…7 î¬«>¨ÜrÔB›k¥Ìu‡QLÂÒ˜¢iODq¹SæxÕ‘%¹C“™ÓmXÓ°B=Z½±ËFh³ÔÑî>Q’Yx¤f.ô­äJ­M§7ìr š®6š°)èf*‚OŠ6³;½k[YŠ”þR'ú`EQùÂÏx¶¡¹©fÊš´¤·ª-j…BìÊÔ¸9º),o TTÌR‡ýn ¤8–1éüI.¶U‹Ü&:ÿDËW©™•Œ¤$¹dýgfþS3SȲ9ɉFÔA*M4+0-éPØÑã Ää¯ôéÄSR*Õ›s§˜)9?‰¥³®ƒ!pmNmnj ½F¿FF•JÉ f*3ÚÅTd ý)”r9f’ ¤eã~#Å--0EÕɶø7Àm‹:vû ÚÀ¶š]ÄlB &7'·”¤=¢¡ÙÐÙ¨™©(w“Û|c³6á¬3Ý£¥i äªL!49 ©]*¡ó$Óm mâRM‹.$,/ñ-•ÄÚˆäÓ$òµÆ™»Ê@õ=£=Œ":Û^GªØ¶Ç<üNë ¤Èý |~*•"wÔ'þÍq½DÏ 'qùE“þ]Ÿ¡ŠÇ©6r z,TNã$†°½ùgKû*Û« r÷\ëlXC+U [¡üZ¢Œ´ÈÌP5è T„Š9%"‹£ôá“…;£ŽÐ£ÑÑN_Ÿª£–Iw£Ùh_¬‡HŒfÌ‚~õÕEÔ3ZRhBî-°®È¡Ä%R8 \¬Ül»Gâárœ ‘~è;ù}îx4Ê\’zY²¯8ðÔ”3tª,&ˆ7“JÚ  áŠ;xqÙ€vGª1ñ¤!&b5Ô·ÅvÖEƒ•ªx²)(T–T6Sòlv#„«(9Mn¯"äUÛú•eñ)( ƒéeíW[ÔTìhëìÈPOa‘†ôTþÚׂGX.ªêAXºŠ‡ì§$­ô'7fËͪéÒŒ…ÀL”Sk¥Zi9¨“”t×!/¤êDI«3TJ£÷;œ¬ÕL –' ê»ÒE%ÈvÊ%ÔŽéRàU¯õ 3]âò:΄U6Ix¶ë8¾Åå”'©§G¿®• B—7UZ(Bõ î´ø2©ôªíÛ“§l³K`Í0Ъ­‚8,Ñá͇AnU&xúW0RòQÙ¥s³õSØ,†çÜ‘YD ¶})Õˆ‹mç褸¶éfFiçWPm4íLC¬bçñHãërFŠÉ"쯒'ÃEJÉÐ׊% Y2a‹ MKܵ*þåb™Óæ°êÙM;c“;Ù×óîÏ®§Á)®^u•¯Öšªé6SÂ6°·ú¢1 y/¼WI²¾Rˆˆ[µ÷ŒÃ¢è¤*9ÅL&£,½§˜˜£|Ëc‹·Z%d§Ëa…ÓTZi¢ÅZ¦'Fk¢UÇŠw¡OµwC¹ŸzœZÛULÊz\¹á¦¥Ñ[b!” q<[ªÌE¬,IË–: ×0\™òÑ_/‹™§‡©˜Ðk j(rNs´X¼c®Ž¢³—ц÷1õ³l·Ž]Ïœõ»"à¡Û±té(o·‰_9›šÉž`Z»¯âc&c\]ÒõKÅìÈj¢#3ñ“^íÂH2\ŽÑËw ¹Ýžç7RóBYë1«®P–Q‹/øëlf7q9Œ˜›§ø‚åÁ¡q!³à±š¸ ½Ð1Jù1â¹%Rr-¼HË௠Û(Ýá, 1>{Ž>Á‘ 73°AŸâò o‘sš5{ry'I£ý@¿_Rþ,o9"‘¡|k¾Ù1w™ªî¡5ÀXßJ¢¿C‰™øÄ©>$š +³ä²{ó'ái¦7Â,€²»zĨ â¨×£+®‘ ¡ÿ¦ÉÛ+˜Á›‘ºÕ›@Q¢¯Ú!3ºê[È>ìr0j¹«&cÃc„+Ô ºs¹±3™šÙ›ƒ-ó;ãb¦¼A£Ÿàº½#—ÝÁ-ùk¹“b¼îà®SÉš#j‹®>*£®Ù£a9ª"[£©³×{EüC>ߊ™=Ë¢3\Ÿ+œ?‰„›*Iûº$C¤¯Ñ@3³šw2œ4²=Z; È‚4| ,,!($`û•˜y«CþYÆü>ºP¹ª1&t¢¹q1DÉ+6Õ!ž/Ò €B‘i¼>îÊ(-J9÷Û›z!´Ë!£À—Ã$òÀ:D‹]+Æù ¾«¥ "šŠÁí²Á½Æ9@ª±l,Û+»®Á&Q¬#É1#Á1¬#Ѥi¡+ÉË2Œ ¼Q·qÚ¨¹½ù!¥ t%4ƒºv;¦kz•ÕRº¯«$ÒZ¥±ID/¢¶Wd0K•ÉZ6Ù#§Êê*û¸IâˆéÃpH-KÄÿ–‰F[’!À‘&(›ÊÇKô´Ì¢’ 1Jk¤D&CÃ|r´ µ©ªEyJÀK ÑÊE=‹œþÿË%˜› ÿç[0¦) ŸCgqš ñœ=á‘ØÚ 8„@q³LÛ¨¨Š'ïB2–£±&t-Nʾ`KÄüC×’‘/ RŽ˜£¬á€DòÊþ¹©,c:TI•?š˜‰ÛÇX%Þ(+>n“ÈnqIí$ZìBÅQ bŸ¤®ð˜?Jìó;–‡ÒOÜá8ø ;dar-2.4.8/misc/batch_solaris0000644000175000017430000000203212003275573012712 00000000000000#!/bin/tcsh -f if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'configure' script resides" exit 1 endif if("$1" == "" || "$2" == "" || "$3" == "") then echo "usage: $0 " echo "example : $0 2.0.0 sun4 Solaris" exit endif set filen="$1-$2-$3" ./configure CXXFLAGS=-O && make && mkdir dar-$filen && make DESTDIR=`pwd`/dar-$filen install-strip && tar -cf dar-$filen.tar dar-$filen && gzip -9 dar-$filen.tar || exit 1 make clean && make distclean || exit 1 # ./configure CXXFLAGS=-O --enable-mode=32 && make && mkdir dar32-$filen && make DESTDIR=`pwd`/dar32-$filen install-strip && tar -cf dar32-$filen.tar dar32-$filen && gzip -9 dar32-$filen.tar || exit 1 # make clean && make distclean || exit 1 ./configure CXXFLAGS=-O --enable-mode=64 && make && mkdir dar64-$filen && make DESTDIR=`pwd`/dar64-$filen install-strip && tar -cf dar64-$filen.tar dar64-$filen && gzip -9 dar64-$filen.tar || exit 1 make clean && make distclean || exit 1 dar-2.4.8/misc/README0000644000175000017430000000134212010477201011023 00000000000000What will you find in this directory ? -------------------- | IMPORTANT ! | as there is so many distro today (yep, the Slackware hegemony is over...) | I do not produce anymore binary packages. Ask your preferred distro | maintainer team to upgrade their packages or wait for they do so. | The following scripts are provided for convenience and are now obsoleted | there are thus no more maintained not much used. -------------------- many script that help(ed) build the releases: - batch_cygwin - batch_linux - match_linux_ea - make_cygwin_dist - make_rpm the RPM spec files for releases - *.rpm.spec DAR's logo to be included in RPM packages: - dar.gif a helper program to translate documentation to DOS format - todos.c dar-2.4.8/misc/todos.c0000644000175000017430000000257012003275573011455 00000000000000#include #include #include #include #include #include #define TMP ".###tmp_file" #define MAX 10240 char c='\n'; int main(int argc,char *argv[]) { char buffer[MAX]; char sortie[2*MAX]; char *file; int p; register int i,lu,d; if(argc < 2) { printf("usage : %s liste de fichiers\n",argv[0]); return 1; } close(0);close(1); for(p=1;p " exit 1 endif cp "$1" misc/dar.gif /usr/src/rpm/SOURCES cp "$2" /usr/src/rpm/SPECS set old_dir = `pwd` cd /usr/src/rpm/SPECS rpm -ba --clean --rmsource --nodeps --rmsource `basename $2` mv /usr/src/rpm/RPMS/i386/dar* /usr/src/rpm/SRPMS/dar* $old_dir rm /usr/src/rpm/SPECS/`basename $2` dar-2.4.8/ABOUT-NLS0000644000175000017520000026713312023053122010447 000000000000001 Notes on the Free Translation Project *************************************** Free software is going international! The Free Translation Project is a way to get maintainers of free software, translators, and users all together, so that free software will gradually become able to speak many languages. A few packages already provide translations for their messages. If you found this `ABOUT-NLS' file inside a distribution, you may assume that the distributed package does use GNU `gettext' internally, itself available at your nearest GNU archive site. But you do _not_ need to install GNU `gettext' prior to configuring, installing or using this package with messages translated. Installers will find here some useful hints. These notes also explain how users should proceed for getting the programs to use the available translations. They tell how people wanting to contribute and work on translations can contact the appropriate team. 1.1 INSTALL Matters =================== Some packages are "localizable" when properly installed; the programs they contain can be made to speak your own native language. Most such packages use GNU `gettext'. Other packages have their own ways to internationalization, predating GNU `gettext'. By default, this package will be installed to allow translation of messages. It will automatically detect whether the system already provides the GNU `gettext' functions. Installers may use special options at configuration time for changing the default behaviour. The command: ./configure --disable-nls will _totally_ disable translation of messages. When you already have GNU `gettext' installed on your system and run configure without an option for your new package, `configure' will probably detect the previously built and installed `libintl' library and will decide to use it. If not, you may have to to use the `--with-libintl-prefix' option to tell `configure' where to look for it. Internationalized packages usually have many `po/LL.po' files, where LL gives an ISO 639 two-letter code identifying the language. Unless translations have been forbidden at `configure' time by using the `--disable-nls' switch, all available translations are installed together with the package. However, the environment variable `LINGUAS' may be set, prior to configuration, to limit the installed set. `LINGUAS' should then contain a space separated list of two-letter codes, stating which languages are allowed. 1.2 Using This Package ====================== As a user, if your language has been installed for this package, you only have to set the `LANG' environment variable to the appropriate `LL_CC' combination. If you happen to have the `LC_ALL' or some other `LC_xxx' environment variables set, you should unset them before setting `LANG', otherwise the setting of `LANG' will not have the desired effect. Here `LL' is an ISO 639 two-letter language code, and `CC' is an ISO 3166 two-letter country code. For example, let's suppose that you speak German and live in Germany. At the shell prompt, merely execute `setenv LANG de_DE' (in `csh'), `export LANG; LANG=de_DE' (in `sh') or `export LANG=de_DE' (in `bash'). This can be done from your `.login' or `.profile' file, once and for all. You might think that the country code specification is redundant. But in fact, some languages have dialects in different countries. For example, `de_AT' is used for Austria, and `pt_BR' for Brazil. The country code serves to distinguish the dialects. The locale naming convention of `LL_CC', with `LL' denoting the language and `CC' denoting the country, is the one use on systems based on GNU libc. On other systems, some variations of this scheme are used, such as `LL' or `LL_CC.ENCODING'. You can get the list of locales supported by your system for your language by running the command `locale -a | grep '^LL''. Not all programs have translations for all languages. By default, an English message is shown in place of a nonexistent translation. If you understand other languages, you can set up a priority list of languages. This is done through a different environment variable, called `LANGUAGE'. GNU `gettext' gives preference to `LANGUAGE' over `LANG' for the purpose of message handling, but you still need to have `LANG' set to the primary language; this is required by other parts of the system libraries. For example, some Swedish users who would rather read translations in German than English for when Swedish is not available, set `LANGUAGE' to `sv:de' while leaving `LANG' to `sv_SE'. Special advice for Norwegian users: The language code for Norwegian bokma*l changed from `no' to `nb' recently (in 2003). During the transition period, while some message catalogs for this language are installed under `nb' and some older ones under `no', it's recommended for Norwegian users to set `LANGUAGE' to `nb:no' so that both newer and older translations are used. In the `LANGUAGE' environment variable, but not in the `LANG' environment variable, `LL_CC' combinations can be abbreviated as `LL' to denote the language's main dialect. For example, `de' is equivalent to `de_DE' (German as spoken in Germany), and `pt' to `pt_PT' (Portuguese as spoken in Portugal) in this context. 1.3 Translating Teams ===================== For the Free Translation Project to be a success, we need interested people who like their own language and write it well, and who are also able to synergize with other translators speaking the same language. Each translation team has its own mailing list. The up-to-date list of teams can be found at the Free Translation Project's homepage, `http://translationproject.org/', in the "Teams" area. If you'd like to volunteer to _work_ at translating messages, you should become a member of the translating team for your own language. The subscribing address is _not_ the same as the list itself, it has `-request' appended. For example, speakers of Swedish can send a message to `sv-request@li.org', having this message body: subscribe Keep in mind that team members are expected to participate _actively_ in translations, or at solving translational difficulties, rather than merely lurking around. If your team does not exist yet and you want to start one, or if you are unsure about what to do or how to get started, please write to `coordinator@translationproject.org' to reach the coordinator for all translator teams. The English team is special. It works at improving and uniformizing the terminology in use. Proven linguistic skills are praised more than programming skills, here. 1.4 Available Packages ====================== Languages are not equally supported in all packages. The following matrix shows the current state of internationalization, as of June 2010. The matrix shows, in regard of each package, for which languages PO files have been submitted to translation coordination, with a translation percentage of at least 50%. Ready PO files af am an ar as ast az be be@latin bg bn_IN bs ca +--------------------------------------------------+ a2ps | [] [] | aegis | | ant-phone | | anubis | | aspell | [] [] | bash | | bfd | | bibshelf | [] | binutils | | bison | | bison-runtime | [] | bluez-pin | [] [] | bombono-dvd | | buzztard | | cflow | | clisp | | coreutils | [] [] | cpio | | cppi | | cpplib | [] | cryptsetup | | dfarc | | dialog | [] [] | dico | | diffutils | [] | dink | | doodle | | e2fsprogs | [] | enscript | [] | exif | | fetchmail | [] | findutils | [] | flex | [] | freedink | | gas | | gawk | [] [] | gcal | [] | gcc | | gettext-examples | [] [] [] [] | gettext-runtime | [] [] | gettext-tools | [] [] | gip | [] | gjay | | gliv | [] | glunarclock | [] [] | gnubiff | | gnucash | [] | gnuedu | | gnulib | | gnunet | | gnunet-gtk | | gnutls | | gold | | gpe-aerial | | gpe-beam | | gpe-bluetooth | | gpe-calendar | | gpe-clock | [] | gpe-conf | | gpe-contacts | | gpe-edit | | gpe-filemanager | | gpe-go | | gpe-login | | gpe-ownerinfo | [] | gpe-package | | gpe-sketchbook | | gpe-su | [] | gpe-taskmanager | [] | gpe-timesheet | [] | gpe-today | [] | gpe-todo | | gphoto2 | | gprof | [] | gpsdrive | | gramadoir | | grep | | grub | [] [] | gsasl | | gss | | gst-plugins-bad | [] | gst-plugins-base | [] | gst-plugins-good | [] | gst-plugins-ugly | [] | gstreamer | [] [] [] | gtick | | gtkam | [] | gtkorphan | [] | gtkspell | [] [] [] | gutenprint | | hello | [] | help2man | | hylafax | | idutils | | indent | [] [] | iso_15924 | | iso_3166 | [] [] [] [] [] [] [] | iso_3166_2 | | iso_4217 | | iso_639 | [] [] [] [] | iso_639_3 | | jwhois | | kbd | | keytouch | [] | keytouch-editor | | keytouch-keyboa... | [] | klavaro | [] | latrine | | ld | [] | leafpad | [] [] | libc | [] [] | libexif | () | libextractor | | libgnutls | | libgpewidget | | libgpg-error | | libgphoto2 | | libgphoto2_port | | libgsasl | | libiconv | [] | libidn | | lifelines | | liferea | [] [] | lilypond | | linkdr | [] | lordsawar | | lprng | | lynx | [] | m4 | | mailfromd | | mailutils | | make | | man-db | | man-db-manpages | | minicom | | mkisofs | | myserver | | nano | [] [] | opcodes | | parted | | pies | | popt | | psmisc | | pspp | [] | pwdutils | | radius | [] | recode | [] [] | rosegarden | | rpm | | rush | | sarg | | screem | | scrollkeeper | [] [] [] | sed | [] [] | sharutils | [] [] | shishi | | skencil | | solfege | | solfege-manual | | soundtracker | | sp | | sysstat | | tar | [] | texinfo | | tin | | unicode-han-tra... | | unicode-transla... | | util-linux-ng | [] | vice | | vmm | | vorbis-tools | | wastesedge | | wdiff | | wget | [] [] | wyslij-po | | xchat | [] [] [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] | +--------------------------------------------------+ af am an ar as ast az be be@latin bg bn_IN bs ca 6 0 1 2 3 19 1 10 3 28 3 1 38 crh cs da de el en en_GB en_ZA eo es et eu fa +-------------------------------------------------+ a2ps | [] [] [] [] [] [] [] | aegis | [] [] [] | ant-phone | [] () | anubis | [] [] | aspell | [] [] [] [] [] | bash | [] [] [] | bfd | [] | bibshelf | [] [] [] | binutils | [] | bison | [] [] | bison-runtime | [] [] [] [] | bluez-pin | [] [] [] [] [] [] | bombono-dvd | [] | buzztard | [] [] [] | cflow | [] [] | clisp | [] [] [] [] | coreutils | [] [] [] [] | cpio | | cppi | | cpplib | [] [] [] | cryptsetup | [] | dfarc | [] [] [] | dialog | [] [] [] [] [] | dico | | diffutils | [] [] [] [] [] [] | dink | [] [] [] | doodle | [] | e2fsprogs | [] [] [] | enscript | [] [] [] | exif | () [] [] | fetchmail | [] [] () [] [] [] | findutils | [] [] [] | flex | [] [] | freedink | [] [] [] | gas | [] | gawk | [] [] [] | gcal | [] | gcc | [] [] | gettext-examples | [] [] [] [] | gettext-runtime | [] [] [] [] | gettext-tools | [] [] [] | gip | [] [] [] [] | gjay | [] | gliv | [] [] [] | glunarclock | [] [] | gnubiff | () | gnucash | [] () () () () | gnuedu | [] [] | gnulib | [] [] | gnunet | | gnunet-gtk | [] | gnutls | [] [] | gold | [] | gpe-aerial | [] [] [] [] | gpe-beam | [] [] [] [] | gpe-bluetooth | [] [] | gpe-calendar | [] | gpe-clock | [] [] [] [] | gpe-conf | [] [] [] | gpe-contacts | [] [] [] | gpe-edit | [] [] | gpe-filemanager | [] [] [] | gpe-go | [] [] [] [] | gpe-login | [] [] | gpe-ownerinfo | [] [] [] [] | gpe-package | [] [] [] | gpe-sketchbook | [] [] [] [] | gpe-su | [] [] [] [] | gpe-taskmanager | [] [] [] [] | gpe-timesheet | [] [] [] [] | gpe-today | [] [] [] [] | gpe-todo | [] [] [] | gphoto2 | [] [] () [] [] [] | gprof | [] [] [] | gpsdrive | [] [] [] | gramadoir | [] [] [] | grep | [] | grub | [] [] | gsasl | [] | gss | | gst-plugins-bad | [] [] [] [] [] | gst-plugins-base | [] [] [] [] [] | gst-plugins-good | [] [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] [] | gstreamer | [] [] [] [] [] | gtick | [] () [] | gtkam | [] [] () [] [] | gtkorphan | [] [] [] [] | gtkspell | [] [] [] [] [] [] [] | gutenprint | [] [] [] | hello | [] [] [] [] | help2man | [] | hylafax | [] [] | idutils | [] [] | indent | [] [] [] [] [] [] [] | iso_15924 | [] () [] [] | iso_3166 | [] [] [] [] () [] [] [] () | iso_3166_2 | () | iso_4217 | [] [] [] () [] [] | iso_639 | [] [] [] [] () [] [] | iso_639_3 | [] | jwhois | [] | kbd | [] [] [] [] [] | keytouch | [] [] | keytouch-editor | [] [] | keytouch-keyboa... | [] | klavaro | [] [] [] [] | latrine | [] () | ld | [] [] | leafpad | [] [] [] [] [] [] | libc | [] [] [] [] | libexif | [] [] () | libextractor | | libgnutls | [] | libgpewidget | [] [] | libgpg-error | [] [] | libgphoto2 | [] () | libgphoto2_port | [] () [] | libgsasl | | libiconv | [] [] [] [] [] | libidn | [] [] [] | lifelines | [] () | liferea | [] [] [] [] [] | lilypond | [] [] [] | linkdr | [] [] [] | lordsawar | [] | lprng | | lynx | [] [] [] [] | m4 | [] [] [] [] | mailfromd | | mailutils | [] | make | [] [] [] | man-db | | man-db-manpages | | minicom | [] [] [] [] | mkisofs | | myserver | | nano | [] [] [] | opcodes | [] [] | parted | [] [] | pies | | popt | [] [] [] [] [] | psmisc | [] [] [] | pspp | [] | pwdutils | [] | radius | [] | recode | [] [] [] [] [] [] | rosegarden | () () () | rpm | [] [] [] | rush | | sarg | | screem | | scrollkeeper | [] [] [] [] [] | sed | [] [] [] [] [] [] | sharutils | [] [] [] [] | shishi | | skencil | [] () [] | solfege | [] [] [] | solfege-manual | [] [] | soundtracker | [] [] [] | sp | [] | sysstat | [] [] [] | tar | [] [] [] [] | texinfo | [] [] [] | tin | [] [] | unicode-han-tra... | | unicode-transla... | | util-linux-ng | [] [] [] [] | vice | () () | vmm | [] | vorbis-tools | [] [] | wastesedge | [] | wdiff | [] [] | wget | [] [] [] | wyslij-po | | xchat | [] [] [] [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] [] [] [] | +-------------------------------------------------+ crh cs da de el en en_GB en_ZA eo es et eu fa 5 64 105 117 18 1 8 0 28 89 18 19 0 fi fr ga gl gu he hi hr hu hy id is it ja ka kn +----------------------------------------------------+ a2ps | [] [] [] [] | aegis | [] [] | ant-phone | [] [] | anubis | [] [] [] [] | aspell | [] [] [] [] | bash | [] [] [] [] | bfd | [] [] [] | bibshelf | [] [] [] [] [] | binutils | [] [] [] | bison | [] [] [] [] | bison-runtime | [] [] [] [] [] [] | bluez-pin | [] [] [] [] [] [] [] [] | bombono-dvd | [] | buzztard | [] | cflow | [] [] [] | clisp | [] | coreutils | [] [] [] [] [] | cpio | [] [] [] [] | cppi | [] [] | cpplib | [] [] [] | cryptsetup | [] [] [] | dfarc | [] [] [] | dialog | [] [] [] [] [] [] [] | dico | | diffutils | [] [] [] [] [] [] [] [] [] | dink | [] | doodle | [] [] | e2fsprogs | [] [] | enscript | [] [] [] [] | exif | [] [] [] [] [] [] | fetchmail | [] [] [] [] | findutils | [] [] [] [] [] [] | flex | [] [] [] | freedink | [] [] [] | gas | [] [] | gawk | [] [] [] [] () [] | gcal | [] | gcc | [] | gettext-examples | [] [] [] [] [] [] [] | gettext-runtime | [] [] [] [] [] [] | gettext-tools | [] [] [] [] | gip | [] [] [] [] [] [] | gjay | [] | gliv | [] () | glunarclock | [] [] [] [] | gnubiff | () [] () | gnucash | () () () () () [] | gnuedu | [] [] | gnulib | [] [] [] [] [] [] | gnunet | | gnunet-gtk | [] | gnutls | [] [] | gold | [] [] | gpe-aerial | [] [] [] | gpe-beam | [] [] [] [] | gpe-bluetooth | [] [] [] [] | gpe-calendar | [] [] | gpe-clock | [] [] [] [] [] | gpe-conf | [] [] [] [] | gpe-contacts | [] [] [] [] | gpe-edit | [] [] [] | gpe-filemanager | [] [] [] [] | gpe-go | [] [] [] [] [] | gpe-login | [] [] [] | gpe-ownerinfo | [] [] [] [] [] | gpe-package | [] [] [] | gpe-sketchbook | [] [] [] [] | gpe-su | [] [] [] [] [] [] | gpe-taskmanager | [] [] [] [] [] | gpe-timesheet | [] [] [] [] [] | gpe-today | [] [] [] [] [] [] [] | gpe-todo | [] [] [] | gphoto2 | [] [] [] [] [] [] | gprof | [] [] [] [] | gpsdrive | [] [] [] | gramadoir | [] [] [] | grep | [] [] | grub | [] [] [] [] | gsasl | [] [] [] [] [] | gss | [] [] [] [] [] | gst-plugins-bad | [] [] [] [] [] [] | gst-plugins-base | [] [] [] [] [] [] | gst-plugins-good | [] [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] [] | gstreamer | [] [] [] [] [] | gtick | [] [] [] [] [] | gtkam | [] [] [] [] [] | gtkorphan | [] [] [] | gtkspell | [] [] [] [] [] [] [] [] [] | gutenprint | [] [] [] [] | hello | [] [] [] | help2man | [] [] | hylafax | [] | idutils | [] [] [] [] [] [] | indent | [] [] [] [] [] [] [] [] | iso_15924 | [] () [] [] | iso_3166 | [] () [] [] [] [] [] [] [] [] [] [] | iso_3166_2 | () [] [] [] | iso_4217 | [] () [] [] [] [] | iso_639 | [] () [] [] [] [] [] [] [] | iso_639_3 | () [] [] | jwhois | [] [] [] [] [] | kbd | [] [] | keytouch | [] [] [] [] [] [] | keytouch-editor | [] [] [] [] [] | keytouch-keyboa... | [] [] [] [] [] | klavaro | [] [] | latrine | [] [] [] | ld | [] [] [] [] | leafpad | [] [] [] [] [] [] [] () | libc | [] [] [] [] [] | libexif | [] | libextractor | | libgnutls | [] [] | libgpewidget | [] [] [] [] | libgpg-error | [] [] | libgphoto2 | [] [] [] | libgphoto2_port | [] [] [] | libgsasl | [] [] [] [] [] | libiconv | [] [] [] [] [] [] | libidn | [] [] [] [] | lifelines | () | liferea | [] [] [] [] | lilypond | [] [] | linkdr | [] [] [] [] [] | lordsawar | | lprng | [] | lynx | [] [] [] [] [] | m4 | [] [] [] [] [] [] | mailfromd | | mailutils | [] [] | make | [] [] [] [] [] [] [] [] [] | man-db | [] [] | man-db-manpages | [] | minicom | [] [] [] [] [] | mkisofs | [] [] [] [] | myserver | | nano | [] [] [] [] [] [] | opcodes | [] [] [] [] | parted | [] [] [] [] | pies | | popt | [] [] [] [] [] [] [] [] [] | psmisc | [] [] [] | pspp | | pwdutils | [] [] | radius | [] [] | recode | [] [] [] [] [] [] [] [] | rosegarden | () () () () () | rpm | [] [] | rush | | sarg | [] | screem | [] [] | scrollkeeper | [] [] [] [] | sed | [] [] [] [] [] [] [] [] | sharutils | [] [] [] [] [] [] [] | shishi | [] | skencil | [] | solfege | [] [] [] [] | solfege-manual | [] [] | soundtracker | [] [] | sp | [] () | sysstat | [] [] [] [] [] | tar | [] [] [] [] [] [] [] | texinfo | [] [] [] [] | tin | [] | unicode-han-tra... | | unicode-transla... | [] [] | util-linux-ng | [] [] [] [] [] [] | vice | () () () | vmm | [] | vorbis-tools | [] | wastesedge | () () | wdiff | [] | wget | [] [] [] [] [] [] [] [] | wyslij-po | [] [] [] | xchat | [] [] [] [] [] [] [] [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] [] [] | +----------------------------------------------------+ fi fr ga gl gu he hi hr hu hy id is it ja ka kn 105 121 53 20 4 8 3 5 53 2 120 5 84 67 0 4 ko ku ky lg lt lv mk ml mn mr ms mt nb nds ne +-----------------------------------------------+ a2ps | [] | aegis | | ant-phone | | anubis | [] [] | aspell | [] | bash | | bfd | | bibshelf | [] [] | binutils | | bison | [] | bison-runtime | [] [] [] [] [] | bluez-pin | [] [] [] [] [] | bombono-dvd | | buzztard | | cflow | | clisp | | coreutils | [] | cpio | | cppi | | cpplib | | cryptsetup | | dfarc | [] | dialog | [] [] [] [] [] | dico | | diffutils | [] [] | dink | | doodle | | e2fsprogs | | enscript | | exif | [] | fetchmail | | findutils | | flex | | freedink | [] | gas | | gawk | | gcal | | gcc | | gettext-examples | [] [] [] [] | gettext-runtime | [] | gettext-tools | [] | gip | [] [] | gjay | | gliv | | glunarclock | [] | gnubiff | | gnucash | () () () () | gnuedu | | gnulib | | gnunet | | gnunet-gtk | | gnutls | [] | gold | | gpe-aerial | [] | gpe-beam | [] | gpe-bluetooth | [] [] | gpe-calendar | [] | gpe-clock | [] [] [] [] [] | gpe-conf | [] [] | gpe-contacts | [] [] | gpe-edit | [] | gpe-filemanager | [] [] | gpe-go | [] [] [] | gpe-login | [] | gpe-ownerinfo | [] [] | gpe-package | [] [] | gpe-sketchbook | [] [] | gpe-su | [] [] [] [] [] [] | gpe-taskmanager | [] [] [] [] [] [] | gpe-timesheet | [] [] | gpe-today | [] [] [] [] | gpe-todo | [] [] | gphoto2 | | gprof | [] | gpsdrive | | gramadoir | | grep | | grub | | gsasl | | gss | | gst-plugins-bad | [] [] [] [] | gst-plugins-base | [] [] | gst-plugins-good | [] [] | gst-plugins-ugly | [] [] [] [] [] | gstreamer | | gtick | | gtkam | [] | gtkorphan | [] [] | gtkspell | [] [] [] [] [] [] [] | gutenprint | | hello | [] [] [] | help2man | | hylafax | | idutils | | indent | | iso_15924 | [] [] | iso_3166 | [] [] () [] [] [] [] [] | iso_3166_2 | | iso_4217 | [] [] | iso_639 | [] [] | iso_639_3 | [] | jwhois | [] | kbd | | keytouch | [] | keytouch-editor | [] | keytouch-keyboa... | [] | klavaro | [] | latrine | [] | ld | | leafpad | [] [] [] | libc | [] | libexif | | libextractor | | libgnutls | [] | libgpewidget | [] [] | libgpg-error | | libgphoto2 | | libgphoto2_port | | libgsasl | | libiconv | | libidn | | lifelines | | liferea | | lilypond | | linkdr | | lordsawar | | lprng | | lynx | | m4 | | mailfromd | | mailutils | | make | [] | man-db | | man-db-manpages | | minicom | [] | mkisofs | | myserver | | nano | [] [] | opcodes | | parted | | pies | | popt | [] [] [] | psmisc | | pspp | | pwdutils | | radius | | recode | | rosegarden | | rpm | | rush | | sarg | | screem | | scrollkeeper | [] [] | sed | | sharutils | | shishi | | skencil | | solfege | [] | solfege-manual | | soundtracker | | sp | | sysstat | [] | tar | [] | texinfo | [] | tin | | unicode-han-tra... | | unicode-transla... | | util-linux-ng | | vice | | vmm | | vorbis-tools | | wastesedge | | wdiff | | wget | [] | wyslij-po | | xchat | [] [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] | +-----------------------------------------------+ ko ku ky lg lt lv mk ml mn mr ms mt nb nds ne 20 5 10 1 13 48 4 2 2 4 24 10 20 3 1 nl nn or pa pl ps pt pt_BR ro ru rw sk sl sq sr +---------------------------------------------------+ a2ps | [] [] [] [] [] [] [] [] | aegis | [] [] [] | ant-phone | [] [] | anubis | [] [] [] | aspell | [] [] [] [] [] | bash | [] [] | bfd | [] | bibshelf | [] [] | binutils | [] [] | bison | [] [] [] | bison-runtime | [] [] [] [] [] [] [] | bluez-pin | [] [] [] [] [] [] [] [] | bombono-dvd | [] () | buzztard | [] [] | cflow | [] | clisp | [] [] | coreutils | [] [] [] [] [] [] | cpio | [] [] [] | cppi | [] | cpplib | [] | cryptsetup | [] | dfarc | [] | dialog | [] [] [] [] | dico | [] | diffutils | [] [] [] [] [] [] | dink | () | doodle | [] [] | e2fsprogs | [] [] | enscript | [] [] [] [] [] | exif | [] [] [] () [] | fetchmail | [] [] [] [] | findutils | [] [] [] [] [] | flex | [] [] [] [] [] | freedink | [] [] | gas | | gawk | [] [] [] [] | gcal | | gcc | [] | gettext-examples | [] [] [] [] [] [] [] [] | gettext-runtime | [] [] [] [] [] [] [] [] [] | gettext-tools | [] [] [] [] [] [] | gip | [] [] [] [] [] | gjay | | gliv | [] [] [] [] [] [] | glunarclock | [] [] [] [] [] | gnubiff | [] () | gnucash | [] () () () | gnuedu | [] | gnulib | [] [] [] [] | gnunet | | gnunet-gtk | | gnutls | [] [] | gold | | gpe-aerial | [] [] [] [] [] [] [] | gpe-beam | [] [] [] [] [] [] [] | gpe-bluetooth | [] [] | gpe-calendar | [] [] [] [] | gpe-clock | [] [] [] [] [] [] [] [] | gpe-conf | [] [] [] [] [] [] [] | gpe-contacts | [] [] [] [] [] | gpe-edit | [] [] [] | gpe-filemanager | [] [] [] | gpe-go | [] [] [] [] [] [] [] [] | gpe-login | [] [] | gpe-ownerinfo | [] [] [] [] [] [] [] [] | gpe-package | [] [] | gpe-sketchbook | [] [] [] [] [] [] [] | gpe-su | [] [] [] [] [] [] [] [] | gpe-taskmanager | [] [] [] [] [] [] [] [] | gpe-timesheet | [] [] [] [] [] [] [] [] | gpe-today | [] [] [] [] [] [] [] [] | gpe-todo | [] [] [] [] [] | gphoto2 | [] [] [] [] [] [] [] [] | gprof | [] [] [] | gpsdrive | [] [] | gramadoir | [] [] | grep | [] [] [] [] | grub | [] [] [] | gsasl | [] [] [] [] | gss | [] [] [] | gst-plugins-bad | [] [] [] [] [] [] | gst-plugins-base | [] [] [] [] [] | gst-plugins-good | [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] [] | gstreamer | [] [] [] [] [] | gtick | [] [] [] | gtkam | [] [] [] [] [] [] | gtkorphan | [] | gtkspell | [] [] [] [] [] [] [] [] [] [] | gutenprint | [] [] | hello | [] [] [] [] | help2man | [] [] | hylafax | [] | idutils | [] [] [] [] [] | indent | [] [] [] [] [] [] [] | iso_15924 | [] [] [] [] | iso_3166 | [] [] [] [] [] () [] [] [] [] [] [] [] [] | iso_3166_2 | [] [] [] | iso_4217 | [] [] [] [] [] [] [] [] | iso_639 | [] [] [] [] [] [] [] [] [] | iso_639_3 | [] [] | jwhois | [] [] [] [] | kbd | [] [] [] | keytouch | [] [] [] | keytouch-editor | [] [] [] | keytouch-keyboa... | [] [] [] | klavaro | [] [] | latrine | [] [] | ld | | leafpad | [] [] [] [] [] [] [] [] [] | libc | [] [] [] [] | libexif | [] [] () [] | libextractor | | libgnutls | [] [] | libgpewidget | [] [] [] | libgpg-error | [] [] | libgphoto2 | [] [] | libgphoto2_port | [] [] [] [] [] | libgsasl | [] [] [] [] [] | libiconv | [] [] [] [] [] | libidn | [] [] | lifelines | [] [] | liferea | [] [] [] [] [] () () [] | lilypond | [] | linkdr | [] [] [] | lordsawar | | lprng | [] | lynx | [] [] [] | m4 | [] [] [] [] [] | mailfromd | [] | mailutils | [] | make | [] [] [] [] | man-db | [] [] [] | man-db-manpages | [] [] [] | minicom | [] [] [] [] | mkisofs | [] [] [] | myserver | | nano | [] [] [] [] | opcodes | [] [] | parted | [] [] [] [] | pies | [] | popt | [] [] [] [] | psmisc | [] [] [] | pspp | [] [] | pwdutils | [] | radius | [] [] [] | recode | [] [] [] [] [] [] [] [] | rosegarden | () () | rpm | [] [] [] | rush | [] [] | sarg | | screem | | scrollkeeper | [] [] [] [] [] [] [] [] | sed | [] [] [] [] [] [] [] [] [] | sharutils | [] [] [] [] | shishi | [] | skencil | [] [] | solfege | [] [] [] [] | solfege-manual | [] [] [] | soundtracker | [] | sp | | sysstat | [] [] [] [] | tar | [] [] [] [] | texinfo | [] [] [] [] | tin | [] | unicode-han-tra... | | unicode-transla... | | util-linux-ng | [] [] [] [] [] | vice | [] | vmm | [] | vorbis-tools | [] [] | wastesedge | [] | wdiff | [] [] | wget | [] [] [] [] [] [] [] | wyslij-po | [] [] [] | xchat | [] [] [] [] [] [] [] [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] | +---------------------------------------------------+ nl nn or pa pl ps pt pt_BR ro ru rw sk sl sq sr 135 10 4 7 105 1 29 62 47 91 3 54 46 9 37 sv sw ta te tg th tr uk vi wa zh_CN zh_HK zh_TW +---------------------------------------------------+ a2ps | [] [] [] [] [] | 27 aegis | [] | 9 ant-phone | [] [] [] [] | 9 anubis | [] [] [] [] | 15 aspell | [] [] [] | 20 bash | [] [] [] | 12 bfd | [] | 6 bibshelf | [] [] [] | 16 binutils | [] [] | 8 bison | [] [] | 12 bison-runtime | [] [] [] [] [] [] | 29 bluez-pin | [] [] [] [] [] [] [] [] | 37 bombono-dvd | [] | 4 buzztard | [] | 7 cflow | [] [] [] | 9 clisp | | 10 coreutils | [] [] [] [] | 22 cpio | [] [] [] [] [] [] | 13 cppi | [] [] | 5 cpplib | [] [] [] [] [] [] | 14 cryptsetup | [] [] | 7 dfarc | [] | 9 dialog | [] [] [] [] [] [] [] | 30 dico | [] | 2 diffutils | [] [] [] [] [] [] | 30 dink | | 4 doodle | [] [] | 7 e2fsprogs | [] [] [] | 11 enscript | [] [] [] [] | 17 exif | [] [] [] | 16 fetchmail | [] [] [] | 17 findutils | [] [] [] [] [] | 20 flex | [] [] [] [] | 15 freedink | [] | 10 gas | [] | 4 gawk | [] [] [] [] | 18 gcal | [] [] | 5 gcc | [] [] [] | 7 gettext-examples | [] [] [] [] [] [] [] | 34 gettext-runtime | [] [] [] [] [] [] [] | 29 gettext-tools | [] [] [] [] [] [] | 22 gip | [] [] [] [] | 22 gjay | [] | 3 gliv | [] [] [] | 14 glunarclock | [] [] [] [] [] | 19 gnubiff | [] [] | 4 gnucash | () [] () [] () | 10 gnuedu | [] [] | 7 gnulib | [] [] [] [] | 16 gnunet | [] | 1 gnunet-gtk | [] [] [] | 5 gnutls | [] [] [] | 10 gold | [] | 4 gpe-aerial | [] [] [] | 18 gpe-beam | [] [] [] | 19 gpe-bluetooth | [] [] [] | 13 gpe-calendar | [] [] [] [] | 12 gpe-clock | [] [] [] [] [] | 28 gpe-conf | [] [] [] [] | 20 gpe-contacts | [] [] [] | 17 gpe-edit | [] [] [] | 12 gpe-filemanager | [] [] [] [] | 16 gpe-go | [] [] [] [] [] | 25 gpe-login | [] [] [] | 11 gpe-ownerinfo | [] [] [] [] [] | 25 gpe-package | [] [] [] | 13 gpe-sketchbook | [] [] [] | 20 gpe-su | [] [] [] [] [] | 30 gpe-taskmanager | [] [] [] [] [] | 29 gpe-timesheet | [] [] [] [] [] | 25 gpe-today | [] [] [] [] [] [] | 30 gpe-todo | [] [] [] [] | 17 gphoto2 | [] [] [] [] [] | 24 gprof | [] [] [] | 15 gpsdrive | [] [] [] | 11 gramadoir | [] [] [] | 11 grep | [] [] [] | 10 grub | [] [] [] | 14 gsasl | [] [] [] [] | 14 gss | [] [] [] | 11 gst-plugins-bad | [] [] [] [] | 26 gst-plugins-base | [] [] [] [] [] | 24 gst-plugins-good | [] [] [] [] | 24 gst-plugins-ugly | [] [] [] [] [] | 29 gstreamer | [] [] [] [] | 22 gtick | [] [] [] | 13 gtkam | [] [] [] | 20 gtkorphan | [] [] [] | 14 gtkspell | [] [] [] [] [] [] [] [] [] | 45 gutenprint | [] | 10 hello | [] [] [] [] [] [] | 21 help2man | [] [] | 7 hylafax | [] | 5 idutils | [] [] [] [] | 17 indent | [] [] [] [] [] [] | 30 iso_15924 | () [] () [] [] | 16 iso_3166 | [] [] () [] [] () [] [] [] () | 53 iso_3166_2 | () [] () [] | 9 iso_4217 | [] () [] [] () [] [] | 26 iso_639 | [] [] [] () [] () [] [] [] [] | 38 iso_639_3 | [] () | 8 jwhois | [] [] [] [] [] | 16 kbd | [] [] [] [] [] | 15 keytouch | [] [] [] | 16 keytouch-editor | [] [] [] | 14 keytouch-keyboa... | [] [] [] | 14 klavaro | [] | 11 latrine | [] [] [] | 10 ld | [] [] [] [] | 11 leafpad | [] [] [] [] [] [] | 33 libc | [] [] [] [] [] | 21 libexif | [] () | 7 libextractor | [] | 1 libgnutls | [] [] [] | 9 libgpewidget | [] [] [] | 14 libgpg-error | [] [] [] | 9 libgphoto2 | [] [] | 8 libgphoto2_port | [] [] [] [] | 14 libgsasl | [] [] [] | 13 libiconv | [] [] [] [] | 21 libidn | () [] [] | 11 lifelines | [] | 4 liferea | [] [] [] | 21 lilypond | [] | 7 linkdr | [] [] [] [] [] | 17 lordsawar | | 1 lprng | [] | 3 lynx | [] [] [] [] | 17 m4 | [] [] [] [] | 19 mailfromd | [] [] | 3 mailutils | [] | 5 make | [] [] [] [] | 21 man-db | [] [] [] | 8 man-db-manpages | | 4 minicom | [] [] | 16 mkisofs | [] [] | 9 myserver | | 0 nano | [] [] [] [] | 21 opcodes | [] [] [] | 11 parted | [] [] [] [] [] | 15 pies | [] [] | 3 popt | [] [] [] [] [] [] | 27 psmisc | [] [] | 11 pspp | | 4 pwdutils | [] [] | 6 radius | [] [] | 9 recode | [] [] [] [] | 28 rosegarden | () | 0 rpm | [] [] [] | 11 rush | [] [] | 4 sarg | | 1 screem | [] | 3 scrollkeeper | [] [] [] [] [] | 27 sed | [] [] [] [] [] | 30 sharutils | [] [] [] [] [] | 22 shishi | [] | 3 skencil | [] [] | 7 solfege | [] [] [] [] | 16 solfege-manual | [] | 8 soundtracker | [] [] [] | 9 sp | [] | 3 sysstat | [] [] | 15 tar | [] [] [] [] [] [] | 23 texinfo | [] [] [] [] [] | 17 tin | | 4 unicode-han-tra... | | 0 unicode-transla... | | 2 util-linux-ng | [] [] [] [] | 20 vice | () () | 1 vmm | [] | 4 vorbis-tools | [] | 6 wastesedge | | 2 wdiff | [] [] | 7 wget | [] [] [] [] [] | 26 wyslij-po | [] [] | 8 xchat | [] [] [] [] [] [] | 36 xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] | 63 xkeyboard-config | [] [] [] | 22 +---------------------------------------------------+ 85 teams sv sw ta te tg th tr uk vi wa zh_CN zh_HK zh_TW 178 domains 119 1 3 3 0 10 65 51 155 17 98 7 41 2618 Some counters in the preceding matrix are higher than the number of visible blocks let us expect. This is because a few extra PO files are used for implementing regional variants of languages, or language dialects. For a PO file in the matrix above to be effective, the package to which it applies should also have been internationalized and distributed as such by its maintainer. There might be an observable lag between the mere existence a PO file and its wide availability in a distribution. If June 2010 seems to be old, you may fetch a more recent copy of this `ABOUT-NLS' file on most GNU archive sites. The most up-to-date matrix with full percentage details can be found at `http://translationproject.org/extra/matrix.html'. 1.5 Using `gettext' in new packages =================================== If you are writing a freely available program and want to internationalize it you are welcome to use GNU `gettext' in your package. Of course you have to respect the GNU Library General Public License which covers the use of the GNU `gettext' library. This means in particular that even non-free programs can use `libintl' as a shared library, whereas only free software can use `libintl' as a static library or use modified versions of `libintl'. Once the sources are changed appropriately and the setup can handle the use of `gettext' the only thing missing are the translations. The Free Translation Project is also available for packages which are not developed inside the GNU project. Therefore the information given above applies also for every other Free Software Project. Contact `coordinator@translationproject.org' to make the `.pot' files available to the translation teams. dar-2.4.8/doc/0000755000175000017520000000000012023053205010033 500000000000000dar-2.4.8/doc/dar_s_doc.jpg0000644000175000017430000000706512003275573012416 00000000000000ÿØÿàJFIFHHÿÛC  !"$"$ÿÛCÿÂx "ÿÄÿÄÿÚ úPÍ:NOTÈgξ…çaÄ÷<;‡ªšM¯dÀ”M]›X_ÎyŸ¨,çç<®ï'¤£a]ÈË—Ö¸w¼×¹ò~†Ï+žz¸Qóøïï-ùÎï'e­;*åÕÏóžÎŸ‡çncdvnéÂ~oº»Nʽ>o¯£zß5C‡éwå×NŸ^8os}Xç<ûò’!JojUûº˜Bí=˜÷Øc<~Î1$×¶sº8:5+O\/m¡œö½]šÌó¯*Qͧ²Ú¤ìã?FÙ+®G?ª5nMjJÊÙé‚*JÊøWXEêÆÌ6àÙ-;¹}€‹ÿÄ'!"@ #$230ÿÚ÷g¼†#ü.GGˆÌb˜³d…á2™-ý&»AôÛÕßðàíúƒ0Û’#½·ym¸\Æ+’0Ëa[Ž`UÄ"H"‰Dl#ƒyB•͞ﲌòlY®èâ8›™Jfáþ‘‰ðî*͚Ωõg´ˆI‰®È£îeU;Ó“ Bв"|Œ7Qê0ôaG,5X‹³¦ún û;¨80ß›¦Û©!‚g±uE¤[´•=-D³íêâÏÕ<»ÝÅ=F .¿]à—?§ǔ쨖 Ÿ±"?[3Ô¸2ëlÅÉU?!|T5|{ŸÿÄ+!1 2@Q"0Aaq‘B¡±ÿÚ?ëiÈ«®‰´[43ÕtQp‡ÉOg††ºh6`§FËׅ֍Тˆ4F.-â9L+)v~E ­7ËiëèCiÝ?C"» ám¦^bÚO*2Ì“|N^¾›µÔqRåÈÇ-d~(òÃý½-ÏäÌ»E@ñíoÙb©pÑ–ˆöoCÕ¼+?’£©oe£Â’š•D’›Ü¥©<ËR*nýø¹Lû¹L™âh´è¡Çº´1¢/v\Zn͚ўÒGwÀ™fHq’k=úeÅ‘Vì|Þ.Å[¿s™”;Ž‹ì§¡‘Ÿ®‚ŠWÜÍîËɳ4\oìx]>åâʇuE¸bðßèü•û3С¸—ÑZY’kùjaÄßÏQÿÄ'!1AQa @qÁ‘0¡±ÑáÿÚ?!ù‹E±@±?¼D@³_s,º ±?O0Þ½½ ŒAè«–^çàРn}÷)[¨râkj¿!Ÿéo,,ôº XÌJàÁtÅCŠŠÄr¸jµ‰ˆ¯ÌîÕ&°[uUm–†„ÐdN²O¹…ŒA‹Ë 5ŽÇ‡øN šŸGTjø*‰ÌL°2Ì?áB¬=«EÎúbØ*#÷›–’º–·%>c&ɧîP_Lù˜h ¨ÙJ¯˜ûb£$XÇ·ýȪ.9Õظî´?p6égh%¬#X¾ ‰HÝb*îq\6T½©o#bÅŽ ) +ÀÃå ëÖÁ—v\·u”h/F(£Åj4á?rÏÄÿYЉÈf5=’'ê ôE¨ì• :ôAÙ~šF€‹˜yÕ¾q ì·Ñ,¦`¶»cz9"PuLd«dªy…’£Õ|Ã0¦¥þ¬¨·‚4õ£¹†Û-¬'5ù1Œ>y©fš® FB¡ô+MDÁÌ*Š­QwnèÆÂ‹ˆ´D Vzx%{i ¾áî{ó*Ññj‰ž«utk^{‚±ÓP•»¬r¨™‹¨Wg˜E/ÈÃtæQöøwè ¢ÛǪáÒÞ_#ÿÚ óÏ<Ão<óÏ<óÃsóÏ<ó¾}¼°[ÿÎßâ¼Å§<áe÷þç ŸÂ!+j¹óÏ>ùÜqûÏ<óÏ<óÏ<ÿÄ&!1AQaqÁ0‘¡±Ñá ÿÚ?øÂÒ"oýkCË |2Ëâf0Ù¥FûCB‡¨€ÇÑ@¶Rxþî]>r™g»¼«1~ýÄRð¯Éú™´ª9¨ ©Î\û} ãÑ9ý¼b>ÀÏê0.Z¡3º›ƒëWÄbŸ•…o5ÒXlšfÝz@Ïc±|ƒ­kk‰o<;ã‰Zñ­×Žyí7kUXï"¨5£„<Ç×¥Òˆ¬ Ì¯CÙ¿¶e‡[ÃÇ̘ò3ë¦ 6¤š‚éz¸êÜÆtî¾ÐEw‰¿x̘íì!ÂZbVŸQq‘4ün¾’­-ü P»†™†‡ÉH¿Fb!(½ÝK)ì -w¨½± rÛg´b{¼øY4üÈ W°‹Y¢¢„}™HP‰uwZ‹"aF°”‘e ÆEtûCI»al+§4ÌoTºú <0 ÅCØ4a¬:rEJŒ$¥ðc_QÿÙdar-2.4.8/doc/api_tutorial.html0000644000175000017430000041747312010477201013357 00000000000000 Libdar API - Tutorial
Dar Documentation

LIBDAR

APPLICATION INTERFACE

TUTORIAL

for API version 5.x.x




Presentation

The Libdar library has been built from source code originally located directly in the dar command line application. Libdar provides a complete abstraction layer for handling Disk ARchive (dar)'s archives. The general operations provided are:

  • archive creation,
  • file extraction,
  • archive listing,
  • archive testing,
  • archive comparison,
  • catalogue isolation
  • archive merging
  • dar_manager database manipulations
Note that Disk ARchive and libdar have been released under the Gnu General Public License (GPL). All code linked to libdar (statically or dynamically), must also be covered by the GPL.

This tutorial will show you how to use the libdar API. As dar since its release 2.0.0 also uses this API, looking at it's code may also provide a good illustration. The file src/dar_suite/dar.cpp is the primary consumer of the libdar API.

The sample codes provided here is solely illustrative and is not guaranteed to compile. More detailed API documentation is contained in the source code and can be compiled to the doc/html directory using Doxygen, which is also provided online.



Let's Start

Conventions

Language

Dar and libdar are written in C++, and so is the libdar API. While written in C++, libdar is easily usable by both C and C++ code. Access from other languages can be provided by specific bindings. I would only say that you are welcome to provide the necessary bindings yourself. :-)

Libdar namespace

All libdar symbols are defined under the libdar namespace. You can either add the using namespace libdar; line at the beginning of your source files:

using namespace libdar;

get_version(....);

 or, as shown below, you can explicitly use the namespace in front of libdar objects :


libdar::get_version(....);

Exceptions or no Exceptions

The library can be used with or without exceptions. For each example we will see a sample code for both ways. To the left is with exceptions, to the right without:


example code using exceptions


example code not using exceptions


All exceptions used by libdar inherit from the pure virtual class Egeneric. The only method you will need to know about for any exception is the get_message() call, which returns a message string describing the message (in human language). The type of the error is defined by the class of the exception. The possible exception types follow:

class libdar::Egeneric
the parent class of all exceptions (a pure virtual class)
class libdar::Ememory
memory has been exhausted
class libdar::Ebug
signals a bug, which is triggered when reaching some code that should never be executed
class libdar::Einfinint
arithmetic error detected when operating on infinint
class libdar::Elimitint
a limitint overflow is detected, indicating the maximum value of the limitint has been exceeded
class libdar::Erange
signals a range error
class libdar::Edeci
signals conversion problem between infinint and string (decimal representation)
class libdar::Efeature
a requested feature is not (yet) implemented
class libdar::Ehardware
hardware problem is found
class libdar::Euser_abort
signals that the user has aborted the operation
class libdar::Ethread_cancel A program has requested the termination of the current thread while libdar was running
class libdar::Edata
an error concerning the treated data has been encountered
class libdar::Escript
the script executed between slices returned an error code
class libdar::Elibcall
signals an error in the arguments given to a libdar call of the API
class libdar::Ecompilation
a requested feature has not been activated at compilation time




 1 - First we *must* initialize libdar by checking the libdar version


  
    // we'll want to display some messages
#include <io.h>

    // we include this header to access lidbar API
#include <dar/libdar.h>

    // all sample code shown will be inside this
    // function for simplicity's sake
void my_sample_function()
{
   try
   {
      libdar::U_I maj, med, min;

        // first we MUST call get_version()

      libdar::get_version(maj, med, min);

      if(maj != libdar::LIBDAR_COMPILE_TIME_MAJOR ||
         med < libdar::LIBDAR_COMPILE_TIME_MEDIUM)
        throw libdar::Erange("initialization",
        "we are linking against a wrong libdar");
   }

   catch(libdar::Egeneric & e)
   {
      std::cout << e.get_message() << std::endl;
   }
}

  
  
    // we'll want to display some messages
#include <io.h>

    // we include this header to access lidbar API
#include <dar/libdar.h>

    // all sample code shown will be inside this
    // function for simplicity's sake
void my_sample_function()
{

  
libdar::U_I maj, med, min;
   libdar::U_16 excode;
   std::string msg;

    // first we MUST call get_version()

   libdar::get_version_noexcept(maj, med, min,
                                excode, msg);

   if(excode != LIBDAR_NOEXCEPT)
   {
      std::cout << msg << endl;
      return;
   }
 
   if(maj != LIBDAR_COMPILE_TIME_MAJOR ||
     
med < libdar::LIBDAR_COMPILE_TIME_MEDIUM)
   {
      std::cout <<
"we are linking against wrong libdar" << std::endl;
      return;
    }

The get_version() function must be called for several reasons :
  • you must check that the library you've dynamically linked with is compatible with the features you will be using. The major number must be the same, for no compatibility is assured between two libdar versions of different major numbers. While run-time compatibility is assured between medium numbers, the medium number must be greater or equal to the one used at compilation time to be sure that all the features you want are available in the libdar library you dynamically linked with. Changes between minor versions correspond to bug fixes and is not to imply any API change, thus no constraints are present there (except the presence of more bugs in lower numbers).
  • the get_version() call, as well as returning version information, does important initialization tasks for libdar. If not called first, the libdar library will not initialized properly and its behavior will be unpredictable. Note that you may call get_version() several time if you wish, the secon time only the version information are returned, the libdar library is not reset or the like.
  • Last, if strong encryption support is activated at compilation time, libdar will by default performs libgcrypt initialization when get_version() is called if libgcrypt is not already initialized. You can avoid having dar initializing libgcrypt by calling get_version() with an additional argument set to "false" :
ger_version(maj, med, min, false)
  • Note that in multi-thread environment with strong encryption activated in libdar at compilation time, libgcrypt requires to be initialized from the application, thus you *must* avoid having libdar initializing libgcrypt by using the get_version() described just above and proceed from the application to the initialization of libgcrypt before calling get_version(). For more details see libgcrypt documentation.

1 bis - We must prepare the end right now!


As we saw, libdar used some datastructures (mutex, secured memory, etc.) that need to be released properly before ending the program. It is mandatory to invoke the following function before exiting your program if you invoked get_version() previously. It is a good idea to implement this right now not to forget later:


libdar::close_and_clean()


In particular, closes_and_clean() makes the necessary for memory to be released in the proper order. Not calling close_and_clean() at the end of your program may result in uncaught exception message from libdar at the end of the execution. This depends on the compiler, libc and option activated in libdar at compilation time.


2 - Let's see the available features

once we have called one of the get_version* function it is possible to access the list of features activated at compilation time:




void my_sample_function()
{
        // let's continue in the same function

bool ea = libdar::compile_time::ea();
bool largefile = libdar::compile_time::largefile();
bool nodump = libdar::compile_time::nodump();
bool special_alloc = libdar::compile_time::special_alloc();
U_I bits = libdar::compile_time::bits();
// bits is equal to zero for infinint,
// else it is equal to 32 or 64 depending on
// the compilation mode used.

bool thread = libdar::compile_time::thread_safe();
bool libz = libdar::compile_time::libz();
bool libbz2 = libdar::compile_time::libbz2();
bool liblzo = libdar::compile_time::liblzo();
bool libcrypto = libdar::compile_time::libgcrypt();
bool furtive_read = libdar::compile_time::furtive_read
();

}



 // here there is no difference because no exceptions
 // are thrown by the get_compile_time_feature()
 // function

You can do what you want with the resulting values. It's possible to display the available libdar features or to terminate if you don't find a desired feature. However, verifying that features are available is not strictly necessary because libdar will tell you if an operation you call requires a feature that has not been activated at compilation time, by throwing an Ecompile exception (or returning the LIBDAR_ECOMPILATION error code if you are not using exceptions).


3 -User interaction

The generic user_interaction class

To be able to report messages to the user and prompt for feedback a special class called user_interaction has been introduced. Simply put, user_interaction is a virtual class which you can derive to provide user interaction (a GUI's graphical interaction, for example). There are four methods whose prototypes you must override:

void pause (const std::string &message);
this method is called by libdar when the library needs a yes or no ("continue" or "kill") answer to a question, which is provided by the string message. The question posed by pause() must be answered by returning normally (= "true") or throwing a Euser_abort exception if the user refused the proposition. Don't worry about throwing an exception in your code; it will be trapped by libdar if you don't want to manage exceptions, and are using libdar in the "no exception" method. But if you really don't want to throw exception from your code see next:


bool pause2(const std::string &message);
This is an alternative method to pause() as seen above. In place of defining a pause() method in your inherited class, you can redefine the pause2() method. The only difference with pause() is that the user answer to the question is returned by a boolean value, your code does no more have to throw a Euser_abort exception to say "no". Note that you must not redefine both pause() and pause2().

void inherited_warning (const std::string &message);
libdar calls this protected method (through the public method named warning()) to display an informational message to the user. It is not always a warning as the name suggests, but sometimes just normal information. In API 3.0.x this method did not exist,  but the public warning() method itself was pure virtual and thus needed to be overwritten. Today, the warning() method is no more pure virtual nor it is even virtual, so the user defined implementation of message display has to be done in the inherited_warning() method.

std::string get_string (const std::string &message, bool echo);
This call is used to get an arbitrary answer from the user. This is mainly used to get a password from the user (when no password has been supplied for an encrypted archive), the echo argument indicates if the user response should be displayed back on the screen (again, very useful for handling password input). If echo is set to "false" the implementation of get_string() should hide the characters typed by the user.

 user_interactionclone () const;
A deep copy operation must be implemented here. This is because libdar stores the reference to the user_interaction class as a pointer but may want to keep a complete internal copy at some point. A simple implementation of this method should be something like this (even if you don't want to use exceptions):


user_interaction *my_own_class::clone() const
{
    my_own_class *ret = new my_own_class(*this);
    if(ret == NULL)
        throw Ememory("user_interaction_callback::clone");
    else
        return ret;
}


The callback interaction class

An inherited class from user_interaction called user_interaction_callback provides an implementation of the user interaction based on callback functions. This allows you to replace the three interactions methods (pause, warning and get_string) by three normal functions of your choice, which must be given to the user_interaction_callback's constructor. The clone() method is implemented internally, leaving only the three callback functions to be implemented. Look at dar's command line code for a practical example. dar's user interaction code is implemented using an instance of user_interaction_callback and three static functions in the module dar_suite/shell_interaction.cpp

Pay attention to the contextual value present in the arguments of these callback functions :

 
  // our own callback functions.
  // for the illustration of what these 'context' arguments
  // can be used for we will imagine the situation where
  // multiple windows or multiple threads may each one use
  // libdar, but all share the same callback functions.
 
typedef class t_window_type t_win;

  // this is an arbitrary type that here we will say
  // points to a graphical window object wrapped in a C++
  // class.
  // Note that the method show() wait_for_click() and so on
  // attributed to the t_win class are absolutely
  // imaginary. Any link to an existing class is a pure
  // coincidence...

void warning_callback(const std::string &x, void *context)
{
    (t_win *)(context)->show(x);
}
 
bool answer_callback(const std::string &x, void *context)
{
    click_type ret;

   
(t_win *)(context)->show(x);
    ret = (t_win *)(context)->wait_for_click();

    return ret == click_OK;
}

std::string string_callback(const std::string &x, bool echo, void *context)
{
    (t_win *)(context)->show(x);
    if(!echo)
      (t_win *)(context)->set_hide_typed_char();
    (t_win *)(context)->wait_for_click();
    return (t_win *)(context)->read_text();
}

---------8<-------8<-------8<-------

  // So now each window can have its user_interaction object based on the same
  // user_interaction_callback object pointing to the same functions.
  // user_interaction_callback objects can be shared among different window objects

libdar::user_interaction_callback dialog =
        libdar::user_interaction_callback(&warning_callback, &answer_callback, &string_callback,
                                          (void *)get_current_windows_id());

  // just the "context" argument changes, and will be passed as is from the constructor to the callback
  // functions



4 - Masks

Mask are used to define which files will be considered and which will not. Libdar implements masks as several classes that all inherit from a virtual class that defines the way masks are used. This root class is the class mask and provides the is_covered() method which libdar uses to determine which files are considered. There are many different basic masks classes you can use to build fairly complex masks:

class libdar::mask
the generic class, parent of all masks (a pure virtual class)
class libdar::bool_mask
boolean mask, either always true or false, it matches either all files or no files at all
class libdar::simple_mask
matches as done by the shell on the command lines (see "man 7 glob")
class libdar::regular_mask
matches regular expressions (see "man 7 regex")
class libdar::not_mask
negation of another mask
class libdar::et_mask
makes an *AND* operator between two or more masks
class libdar::ou_mask
makes the *OR* operator between  two or more masks
class lbdar::simple_path_mask

string matches if it is subdirectory of mask or is a directory that contains the specified path itself

class libdar::same_path_mask
matches if the string is exactly the given mask (no wild card expression)
class libdar::exclude_dir_mask
matches if string is the given string or a sub directory of it
class libdar::mask_list
matches a list of files defined in a given file

Let's play with some masks :


      // all files will be elected by this mask
  libdar::bool_mask m1 = true;   

      // all file that match the glob expession "A*~" will match.
      // the second argument of the constructor tell if the match is case sensitive so here
      // any file beginning by 'A' or by 'a' and ending by '~' will be selected by this mask
  libdar::simple_mask m2 = libdar::simple_mask(std::string("A*~"), false);

      // m3 is the negation if m2. This mask will thus match
      // any file that does not begin by 'A' or 'a' and also finish by '~'
  libdar::not_mask m3 = m2;

      // this mask matches any file that is a subdirectory of "/home/joe"
      // and any directory that contains /home/joe, meaning
      // "/", "/home", "/jome/joe" and any subdirectory are matched.
      // here, the second argument is also case sensitivity (so
      //  "/HoMe" will not be selected by this mask.
  libdar::simple_path_mask m4 = simple_path_mask("/home/joe", true);

      // now let's do some more complex things:
      // m5 will now match only files that are selected by both m2 AND m4
  libdar::et_mask m5;
  m5.add_mask(m2);
  m5.add_mask(m4);
     
      // we can make more silly things like this, where m5 will select files
      // that match m2 AND m4 AND m3. But m3 = not m2 so now m5 will never
      // match any file...
  m5.add_mask(m3);

      // but we could do the same with an "ou_mask" and would get a silly
      // counterpart of m1 (a mask that matches any files)
  libdar::ou_mask m6;
  m6.add_mask(m2);
  m6.add_mask(m4);
  m6.add_mask(m3);

      // lastly, the NOT, AND and OR operation can be used recursively.
      // Frankly, it's possible to have masks reference each other!
  libdar::not_mask m7 = m6;
  m6.add_mask(m7);


Now that you've seen the power of these masks, you should know that in libdar there are three masks that are required:
  • The first mask is used against the names of all files except directories. It is applied solely to the names themselves (not the file path). This mask may be any combination of the masks seen previously; it will only be applied to socket, named pipes, symbolic links, char or block devices, plain files, but again not to directories. This way you can filter by file type for save, restore, list, compare, compress, and other library operations.
  • The second mask is applied to any file including directories, including the path part of the filename. So with it you can prune directories, or in any other way restrict the operation to a particular subdirectory, as well as to a particular plain file for example. Important note about this second mask: what your own mask will be compared to by libdar is the filesystem root (as defined under the argument "fs_root" of the same call you will give your own mask to) plus the current file being  proceeded:
Assuming you choose for example tmp/A as argument to fs_root (which argument is present when creating an archive, for example), your mask will be used against strings like "tmp/A/some/file" . This is true up to libdar version 3.0.x (alias release 2.2.x). Instead, since libdar 4.0.0 the fs_root argument is expended to an absolute path, so if in the previous example, your current directory was /var your masks will be used against strings like "/var/tmp/A/some/file". Of course there is no difference between these two libdar revisions when the fs_root argument is an absolute path [this change was necessary to support masks based on a list of files]

An exception is the test operation, which has no fs_root argument (because the operation is not relative to an existing filesystem), however the subtree argument exist to receive a mask for comparing the path of file to include or exclude from the test operation. In this case the situation is as if the fs_root was set to the value "<ROOT>". For example, masks will be compared to <ROOT>/some/file when performing an archive test operation.
  • The third mask --- which is not always needed --- concerns Extended Attributes (EA). It is applied to the full EA name in the form <domain>.<name> where <domain> is any string value like but not limited to the usual "user" or "system" domains.


5 - Let's create a simple archive

Now that we have seen masks and exceptions let's start the real thing:

All the operations on archives are handled by the archive class which is defined in libdar/archive.hpp. Each operation requires some mandatory parameters and some optional parameters. Optional parameters are gathered in a archive_option auxilliary class, which default constructor set them to default values. We will see a bit further how to set these options, but for now let's keep the things simple:

  // creating an archive is simple; it is just
  // a matter of calling the "create" constructor
  // of the archive class. It may be used for full or
  // differential archives. We'll see an example of
  // of differential archives later.

  // note that while this example uses a pointer to store
  // my_arch, it is perhaps better practice to use a plain
  // stack object. In your code, use an object instead of
  // a pointer to an object under normal circumstances.

libdar::user_interaction_callback dialog = libdar::user_interaction_callback(ptr1, ptr2, ptr3);
  // where ptr1, ptr2 and ptr3 are three callback
  // functions.
libdar::statistics ret;
  // we will see this structure a bit further

libdar::archive *my_arch =
     new libdar::archive(dialog,
     "/home",  // saving all under this "root"
     "/tmp",   // where the slices will go
     "my_archive", // the basename of the slices
     "dar",  // the extension used for slices
     archive_options_create(), // default options
     &ret); // this value is returned by libdar
            // if you don't want to have statistics of the
            // operation you can set this parameter to NULL

   // creating an archive is simple; it is just
  // a matter of calling the "create" constructor
  // of the archive class. It may be used for full or
  // differential archives. We'll see an example of
  // of differential archives later.

  // note that while this example uses a pointer to store
  // my_arch, it is perhaps better practice to use a plain
  // stack object. In your code, use an object instead of
  // a pointer to an object under normal circumstances.

libdar::user_interaction_callback dialog = libdar::user_interaction_callback(ptr1, ptr2, ptr3);
  // where ptr1, ptr2 and ptr3 are three callback
  // functions.
libdar::statistics ret;
 
// we will see this structure a bit further

U_16 exception,
std::string except_msg;


libdar::archive *my_arch =    
     libdar::create_archive_noexcept(dialog,
 
     "/home",  // saving all under this "root"
     "/tmp",   // where the slices will go
     "my_archive", 
            // the basename of the slices
     "dar", // dar's slice extensions
     archive_options_create(), // default options

     &ret,  // this value is returned by libdar
            // if you don't want to have statistics of the
            // operation you can set this parameter to NULL

     exception, // this gives the status of the call
     except_msg); // and in case of error the cause.

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


When creating an archive, the created archive object can be used only as reference for an isolation or for a differential backups. You cannot use it for restoration, listing, or comparison, because the underlying file descriptors are opened in write only mode. An implementation which uses file descriptors in read-write access is not possible and is not a good idea anyway. Why? Because, for example, if you want to test the newly created archive, using the newly created object would make the test rely on information stored in virtual memory (the archive contents, the data location of a file, etc.), not on the file archive itself. If some corruption occurred in the file you would not notice it.

So to totally complete the archive creation we must destroy the archive object we have just created, which will also close any file descriptors used by the object :


     delete my_arch;


libdar::close_archive_noexcept(my_arch, exception,
                        except_msg);

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


Optional Arguments:


Back on the optional arguments. The archive constructor used above to create an archive uses an argument of type "archive_option_create". In the above example, we called the constructor of this class directly withing the argument list of the constructor. Thsi has for effect to built a anonymous temporary object of this class. Such a "just borned" object has all the necessary options set to the default values inside it (like default masks for example) to correspond to the default options. If you want to use non default options like compression, slicing, encryption , file filtering and so on, you must change the options to your need thanks to the appropriate method provided by the archive_options_create class. Assuming we want to make a compressed an sliced archive we would use the following code:

 
 // we define an options object to be able to use
   // non default options:
libdar::archive_options_create options;

  // so now we can modify only the option we wish to
options.set_slicing(1024); // 1024 bytes, well that's small
options.set_compression(bzip2); // default is no compression
options.set_compression_level(6); // default is 9

libdar::statistics ret;
  // we will see this structure a bit further

libdar::archive *my_arch =
     new libdar::archive(dialog,
     "/home",  // saving all under this "root"
     "/tmp",   // where the slices will go
     "my_archive", // the basename of the slices
     "dar",  // the extension used for slices
     options, // the object we modified above
     &ret); // this value is returned by libdar
            // if you don't want to have statistics of the
            // operation you can set this parameter to NULL

   // we define an options object to be able to use
   // non default options:
libdar::archive_options_create options;

  // so now we can modify only the option we wish to
options.set_slicing(1024);
// 1024 bytes, well that's small
options.set_compression(bzip2);  // default is no compression
options.set_compression_level(6); // default is 9

libdar::statistics ret;
 
// we will see this structure a bit further

U_16 exception,
std::string except_msg;


libdar::archive *my_arch =    
     libdar::create_archive_noexcept(dialog,
 
     "/home",  // saving all under this "root"
     "/tmp",   // where the slices will go
     "my_archive", // the basename of the slices
     "dar", // dar's slice extensions
     options, // the object we modified above

     &ret,  // this value is returned by libdar
            // if you don't want to have statistics of the
            // operation you can set this parameter to NULL

     exception, // this gives the status of the call
     except_msg); // and in case of error the cause.

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;



In the same way, each other operation (diff, testing, extraction, merging, ...) has a specific class that gathers the options parameters. These classes are defined in the file libdar/archive_options.hpp you are welcome to refer to for a complete up to date list of available options. The advantage of this class is to not break ascendant compatibility of the API when new features get added, while it also improve readability of your code. This way, the major current number '5' of the API should stay for a longer time than previous numbers, as for thoses each new feature implementation broke the ascendant compatibility by adding an new argument to an API call.

6 - Testing the archive we have created


So, as explained previously, we must create a new archive object but this time with the "read" constructor:


my_arch = new
libdar::archive(dialog,     
        "/tmp",  // where is the archive
        "my_archive", // slice name
        "dar",   // dar's archive extensions
        archive_options_read()); // default options

my_arch =
libdar::open_archive_noexcept(dialog,     
        "/tmp",  // where is the archive
        "my_archive", // slice name
        "dar",   // dar's archive extensions
       archive_options_read(), // default options
       exception,// this gives the status of the call
       except_msg); // and in case of error the
                    // cause of the error

i
f(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;




Now that we have opened the archive we can perform any operation on it. Let's thus start by testing the archive coherence:


   // for the exercice, we will change the default options:
archive_options_test options;

options.clear(); // this set back all options to default
 // here this is not required as the object has just bee
 // created, however it is used here for illustration that
 // you can recycle an archive_option_* object.
options.set_info_details(true); // to have a verbose output

ret = my_arch->op_test(dialog,
             options; // the non default options set above

             NULL);  // we don't want a progressive report



   // for the exercice, we will change the default options:
archive_options_test options;

options.clear(); // this set back all options to default
 // here this is not required as the object has just bee
 // created, however it is used here for illustration that
 // you can recycle an archive_option_* object.
options.set_info_details(true); // to have a verbose output


ret = libdar::op_test_noexcept(
dialog,
       my_arch,         // the archive to test
      
options, // the non default options set above
       NULL,  // we don't want a progressive report
       exception,// this gives the status of the call
       except_msg); // and in case of error the
                    // cause of the error

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


We have tested the archive, but have not yet seen the libdar::statistics variable. It can be used when creating an archive as well as when testing it.  This object reports the number of files treated, as well as the number files with errors and the type of error. You can have a look at the API reference guide concerning the archive class methods, for more information about the uses of these different fields. Here is an example, which relies on the class deci to display the value of an infinint variable:

     
     // we need the class deci to display the value of an infinint:
#include "deci.hpp"

 std::cout << std::string("Number of file treated :") << libdar::deci(ret.treated).human() << std::endl;

    // or much simpler (but totally equivalent):
 std::cout << std::string("Number of file treated :") libdar::<< ret.treated << std::endl;


Note that the use of the class deci may throw exceptions (in case of lack of memory, for example), and there is actually no wrapper available to trap the exceptions that may be thrown by the class deci. So you have to protect the code using a try {} catch {}  statement.

You may have noticed that we used NULL as argument for "progressive_report". This argument must either receive NULL as argument or the address of a real allocated statistics object. This object will be updated by the libdar call and if multi-threaded support is enabled it will let a concurrent thread able to reading its value to display the current number of file treated, for example. Note that there is a little overhead passing a variable to progressive_report, due to the mutex that need be used to avoid one reading data while it is updated by another thread. Follows a example of use of this progressive report feature:

     
        // we need a variable that will be visible by two threads:
   
libdar::statistics report;

        // and we need store the libdar call returned value
    libdar::statistics final_result;

        // we spawn a first task with a libdar call passing &report as argument to "progressive_report"
    final_result = some_call_to_be_defined_to_call_op_test_in_another_tread(..., &report);

        // doing a endless loop (assuming the current thread will be signaled or interrupted once the
        // previously libdar call running in another thread will end)

     while(true)
     {
         sleep(1); // updating the display each second
         some_function_to_update_the_display_with(report);
     }



7 - listing archive contents


The simple way:


my_arch->op_listing(dialog,
             archive_option_listing()); // default options

 
libdar::op_test_listing(
dialog,
       archive_options_listing(), // default options

       exception,// this gives the status of the call
       except_msg); // and in case of error the
                    // cause of the error

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << endl;


By default the library will complete the listing by calling the warning() method of the dialog object one time for each file listed. The warning text will consist of a string for each file with the relevant information in columns that would need to be parsed if individual information was desired. This may not be appropriate for you and as such there is another way to get listing information. This requires a simple reimplementation of the user_interaction object.

The user_interaction class has a listing() method which provides separate arguments for each piece of information that can be displayed:
  • filename,
  • permission,
  • user,
  • group,
  • file size,
  • last modification date,
  • if the file is a directory
  • if the file has children or is an empty dir
  • file type
  • flag about saved data / saved EA / compression used


Technical note: You may notice that file type is not explicitly given as a parameter in the listing method. File type is available as the first byte of the permissions string. This is standard POSIX stuff except for an extension: "h" for files hard linked several times (it has been removed after release 2.3.0 / API 4.0.0). See man 2 stat for more information about POSIX permissions. Note however that the last arguments of this call, let you easily know whether a file is a directory or not and whether it is empty or not.
In the user_interaction class (a virtual class), the listing() method is not a pure virtual method, so you are not obliged to overwrite it, but it has just an empty implementation so it does nothing. You understand now that, by default, this method is not used. To activate it, you must call  set_use_listing(true) protected method and of course you will have to overwrite the listing() method to have a less silly behavior:

    // here follows the definition of our own implementation of
    // of a user_interaction class

class my_user_interaction : public user_interaction
{
public :
     // the inherited pure virtual methods we must define
     // as seen at the beginning of this tutorial:
       void pause(const std::string & message);
       void warning(const std::string & message);
       std::string get_string(const std::string & message, bool echo);
       user_interaction *clone() const;

    // we can overwrite this method to have splitted fields for listing:
        void listing(const std::string & flag,
                            const std::string & perm,
                            const std::string & uid,
                            const std::string & gid,
                            const std::string & size,
                            const std::string & date,
                            const std::string & filename,
                            bool is_dir,
                            bool has_children);

     // but it will not get used by libdar unless we call the protected method set_use_listing()
     // for example this can be done in the class constructor :

     my_user_interaction() { set_use_listing(true); };
};


Now assuming we have implemented the listing() method in  my_user_interaction class, calling op_listing() exactly as we did before, only replacing the dialog object by one of the my_user_interaction class. Then this listing() method will be called for each file to be listed, in place of the warning() method.

As seen at the beginning of this tutorial, there is a child class of user_interaction based on callback functions which is called user_interaction_callback. The listing() method must also be activated here. This is done automatically when you give a callback function to the object, thanks to the set_listing_callback() method :

 
  // our mandatory callback functions:

void warning_callback(const std::string &x, void *context)
{
    ....
}
 
bool answer_callback(const std::string &x, void *context)
{
    ....
}

std::string string_callback(const std::string &x, bool echo, void *context)
{
    ....
}

  // let's build a user_interaction_callback object:

libdar::user_interaction_callback dialog =
        libdar::user_interaction_callback(&warning_callback, &answer_callback, &string_callback, NULL);

   // at this point our dialog object is perfectly operational for listing
   // but libdar will call the warning_callback function to list the archive
   // contents

   // a new callback function for listing :

void listing_callback(const std::string & flag,
                      const std::string & perm,
                      const std::string & uid,
                      const std::string & gid,
                      const std::string & size,
                      const std::string & date,
                      const std::string & filename,
                      bool is_dir,
                      bool has_children,
                      void *context)
{
    ....
}

dialog.set_listing_callback(&listing_callback);

   // now libdar will call the listing_callback function when we
   // use this dialog object for listing the archive contents.


Last point about listing, if you examin the definition of the archive_option_listing class in file libdar/archive_options.hpp, you will notice the the  set_list_mode() method. It may receive either normal, tree or xml (normal being the default).
  • normal, produces (if the listing() method of the given user_interaction object is not overwritten) a listing like tar would do.
  • tree produces a tree like directory listing (this was the original listing format in dar version 1.0.0)
  • xml, produces a XML output as described in doc/dar-catalog-1.0.dtd
Note that for these two last formats (tree and xml) the listing() is never used, so even if you provide a object which listing() method is overwritten, the archive::op_listing() method will still use the warning() method of this user_interaction object to report the archive contents.

7 bis - Dynamic archive contents listing

Well, in the previous chapter, we saw how to list the archive contents. You can imagine that when you have a huge archive this op_listing() call may take a long time to complete and produces a long output. If your application uses some graphical components and you want to have a more interesting way for listing the archive contents, you would maybe like to have just the first level of the directory tree and let the user open the subdirectories and list their contents when needed, having a sort of iterative archive listing. This would avoid having to wait for the long listing to complete as well as it would avoid having to allocate memory for all this graphical components representing each directories and files, entries that will most of the time would not be read by the user.

First step, we need to use the listing() method of the user_interaction() as seen above.
Second step, we have to call the get_children_of() method of a given archive class, instead of the op_listing() method.
In the following example, we will use the user_interaction_callback class, but you can use your own inherited class from user_interaction, and its listing() class.

 
  // our mandatory callback functions:

void warning_callback(const std::string &x, void *context)
{
    ....
}
 
bool answer_callback(const std::string &x, void *context)
{
    ....
}

std::string string_callback(const std::string &x, bool echo, void *context)
{
    ....
}

  // Now the callback function implementing the listing() method of class user_interaction

void listing_callback(const std::string & flag,
                      const std::string & perm,
                      const std::string & uid,
                      const std::string & gid,
                      const std::string & size,
                      const std::string & date,
                      const std::string & filename,
                      bool is_dir,
                      bool has_children,
                      void *context)
{
    ....
}

  // Now that our callback functions are ready, let's create a user_interaction callback object named "dialog"

libdar::user_interaction_callback dialog =
        libdar::user_interaction_callback(&warning_callback, &answer_callback, &string_callback, NULL);

  // now we must assign the listing_callback() function to "dialog".
dialog.set_listing_callback(&listing_callback);

  // Let's open an archive:
archive some_archive = archive(dialog, "/some/dir", "basename", "dar", archive_option_read());
  // we are reading a new archive, but we could have created one instead...

  // now, instead of calling op_listing() method of some_archive giving our dialog object as argument, we can rather call:
some_archive.get_children_of(dialog, "");
  // the second argument is the directory of which we want to know the subdirectories and subfiles. Here "" means the
  // root of the archive (we cannot use absolute path here).
  // get_chidren_of() method will call listing()'s dialog method (here our listing_callback() function through the
  // user_interaction_callback implementation) for each entry of the root directory.

  // let suppose that thanks to listing_callback() during the previous call to get_chidren_of() we know that the entry
  // "var" exists (filename == var) and is a directory (is_dir == true) and has some children (has_children == true),
  // suppose the user want to know what is inside this directory, we would then only have to call:

some_archive.get_children_of(dialog, "var");
  // assuming through listing_callback we know that a subdirectory tmp exist and is not empty, assuming that the user
  // want to know what is in it:

some_archive.get_children_of(dialog, "var/tmp");
  // and so on.


8 - comparing with filesystem

We can compare file in an archive with the filesystem by calling the op_diff method of the class archive.


     ret = my_arch->op_diff(dialog,
                 "/home", // what directory to take
                          // as root we shall
                          // compare the archive
                          // contents to
                 archive_options_diff(), // default options

                 NULL);   // we don't use progessive report




           
  ret =
libdar::op_diff_noexcept(dialog,
                 my_arch, // the archive to use
                 "/home", // what directory to take
                          // as root we shall
                          // compare the archive
                          // contents to
                
archive_options_diff(), // default options
                 NULL,    // we don't use progressive report
                 filesexception, // this gives the
                          // status of the call
                 except_msg); // and in case of
                          // error the cause of the
                          // error

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


Simple, no?

Just a note about the set_what_to_check() method argument of the archive_options_diff class. It may take several values:
  • cf_inode_type (default value): a file is considered as changed if its inode type has changed (directory/plain file/symbolic link/ ...)
  • cf_mtime : permission change is ignored, as well as ownership change
  • cf_ignore_owner : ownership change is ignored
  • cf_all : all fields denoting a content's file change triggers a file changed status (ownership, permission, dates, inode type).

9 - restoring files

Restoration of files is done by calling the  op_extract method of class archive.


ret = my_arch->op_extract(dialog,
             "/tmp",   // where to restore files to
             archive_options_extract(), // default options

             NULL);// no progressive report used
 
ret = libdar::op_extract_noexcept(
dialog,
       my_arch,         // the archive to test
      
"/tmp",   // where to restore files to
      
archive_options_extract(), // default options
       exception,// this gives the status of the call
       except_msg); // and in case of error the
                    // cause of the error

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


Here as we used default options, we restore all the files stored in the archive in the directory /tmp (we also restore there the directory structure stored in the archive), but we could also make a flat restoration (ignore directory structure), as well as restore only some of the files. By default too, dar asks user confirmation before overwriting a file. You can change these options and many others using the methods the class archive_options_extract. We will here restore all that is under usr/lib and only files which filename ends with ".a", we want libdar to skip file that would lead to overwriting an existing file and also have libdar display files that have been skipped from the restoration.

archive_options_extract options;

options.set_selection(simple_mask("*.a", true));
options.set_subtree(simple_path_mask("usr/lib", true));
options.set_allow_over(false);
options.set_display_skipped(true);

ret = my_arch->op_extract(dialog,
             "/tmp",   // where to restore files to
             options, // non default options set just above

             NULL);// no progressive report used
archive_options_extract options;

options.set_selection(simple_mask("*.a", true));
options.set_subtree(simple_path_mask("usr/lib", true));
options.set_allow_over(false);
options.set_display_skipped(true);

ret = libdar::op_extract_noexcept(dialog,
       my_arch,         // the archive to test
      
"/tmp",   // where to restore files to
      
options, // non default options set just above
       NULL,
    // no progressive report used
       exception,// this gives the status of the call
       except_msg); // and in case of error the
                    // cause of the error

last point about optional parameter concerns the set_what_to_check method. It serves two roles here:
  1. Which field are to be ignored when looking whether a file is more recent than the one in the filesystem (if this feature is enabled)
  2. Which field to avoid restoring (for example, when not having root privileges, avoid restoring ownership may be interesting instead of having a plethora of failure to restore ownership messages).


10 - Isolating the Catalogue

OK, I know, catalogue is not an English word (one would rather write catalog), but that's the name of the C++ class used in libdar, so we will keep using it here. Note that you don't have to directly access this class (if you really don't like French).

Isolating the catalogue creates a new archive that only contains the list of files and their attributes (ownership, dates, size, etc.), but no data and no EA are stored in it. It is very similar to the same archive one gets if one makes a differential backup of a filesystem that has not changed since the creation of a reference archive. The usage is very similar to the archive creation, but it uses a different constructor that has less arguments:

archive_options_isolate options;

  // we just want to have a compressed isolated catalogue
options.set_compression(gzip);

libdar::archive *my_cat = new libdar::archive(dialog,
                "/tmp",  // where the extracted
                         // catalogue is saved
                my_arch, // the archive of reference
                         // is the one we have been
                         // playing with previously
                "my_catalogue", // slice name
                "dar",   // file extension
                options) // non default options set above
                
      

archive_options_isolate options;

  // we just want to have a compressed isolated catalogue
options.set_compression(gzip);
          
libdar::archive *my_cat =
    libdar::op_isolate_noexcept(dialog,
               "/tmp",   // where is saved the
                         // extracted catalogue
                my_arch, // the archive of reference
                         // is the one we have been
                         // playing with previously
                "my_catalogue", // slice name
                "dar",   // file extension
                options, // non default options set above

                exception,
                       // this gives the status
                       // of the call
                except_msg);
                       // and in case of error the
                       // cause of the error

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


Now we have two archive objects. my_arch is a read-only object created by the "read" constructor. You can do any operations with it, like file restoration, file comparison, archive testing, as we have done in the previous sections. The second archive object is my_cat which is a write only object. It can only be used as a reference for another backup (a differential backup) or as a reference for a subsequent catalogue isolation (which would just clone the already isolated catalogue object here).

Note that once closed (object destruction) you can re-open the isolated catalogue and use it as a read-only object (you can then test its integrity as seen previously).

So for now we will just destroy the extracted catalogue object, so that all its file descriptors are closed:


delete my_cat;   


close_archive_noexcept (my_cat, exception,
                        except_msg);

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;



and we keep the my_arch object for our last operation:

11 - creating a differential backup

This operation is the same as the first one we did (archive creation). We will just provide the archive of reference as an optional parameter. If we had not destroyed my_cat above, we could have used it in place of my_arch for exactly the same result.

archive_options_create options;

   // we provide here the reference to an
   // existing archive object, this implies that
   // the archive will be a differential backup
options.set_reference(my_arch);

   // as we are now used to options, we will set a more
   // complex set of other options:
options.set_selection(not_mask(simple_mask("*~")));
options.set_empty_dir(true);
options.set_compression(bzip2);
options.set_compr_mask(not_mask(simple_mask("*.bz2")));
options.set_cache_directory_tagging(true);
options.set_slice_permission("0600");
options.set_slice_user_ownership("root");
options.set_slice_group_ownership("bin");
options.set_crypto_algo(crypto_blowfish);
  // if not specified with set_crypto_pass() the password
  // will be asked interactively to the user
options.set_slicing(100000000, 20480);
 
libdar::archive *my_other_arch =
     new libdar::archive(dialog,
     "/home",  // saving all under this "root"
     "/tmp",   // where the slices will go
     "my_archive", // the basename of the slices
     "dar", // dar's slice extensions
     options, // the optional parameter as defined above

     NULL); // no progressive report
archive_options_create options;

   // we provide here the reference to an
   // existing archive object, this implies that
   // the archive will be a differential backup
options.set_reference(my_arch);

   // as we are now used to options, we will set a more
   // complex set of other options:
options.set_selection(not_mask(simple_mask("*~")));
options.set_empty_dir(true);
options.set_compression(bzip2);
options.set_compr_mask(not_mask(simple_mask("*.bz2")));
options.set_cache_directory_tagging(true);
options.set_slice_permission("0600");
options.set_slice_user_ownership("root");
options.set_slice_group_ownership("bin");
options.set_crypto_algo(crypto_blowfish);
  // if not specified with set_crypto_pass() the password
  // will be asked interactively to the user
options.set_slicing(100000000, 20480);


libdar::archive *my_other_arch =    
     libdar::create_archive_noexcept(dialog,
 
     "/home",  // saving all under this "root"
     "/tmp",   // where the slices will go
     my_arch,  // differential backup
     "my_archive", // the basename of the slices
     "dar", // dar's slice extensions
    
options, // the optional parameter as defined above
     exception, // thisgives the status of the call
     except_msg); // and in case of error the cause.

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


As previously, my_other_arch is a write only object that we won't need anymore. So we destroy it:


     delete my_other_arch;


libdar::close_archive_noexcept(my_other_arch,
                        exception,
                        except_msg);

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;



We are at the end of this first part of the tutorial, where we have seen the general way to manipulate dar archives like dar command-line does. But we still have an object we need to destroy to cleanly release the memory used:


     delete my_arch;


libdar::close_archive_noexcept(my_arch, exception,
                        except_msg);

if(exception != LIBDAR_NOEXCEPT)
  std::cout << "an error occurred: " << except_msg
            << std::endl;


For more detailed information about the API you can build the API documentation from the source code using Doxygen or get it online from dar home page or mirror site.


12 - Compilation & Linking

Compilation

All the symbols found in the libdar API are defined via <dar/libdar.h> so you should only need to include this header.


> cat my_prog.cpp
#include <dar/libdar.h>


main()
{
   libdar::get_version(...);
   ...
}
> gcc -c my_prog.cpp



Linking


Of course, you need to link your program with libdar. This is done by adding -ldar plus other library libdar can be built to use like libz, libbzip2, liblzo or libgcrypt :  


> gcc -ldar -lz -lbzip2 -llzo -lgcrypt my_prog.o -o my_prog


Libdar's different flavors


Well, all the compilation and linking steps described above assume you have a "full" libdar library. Beside the full (alias infinint) libdar flavor, libdar also comes in 32 and 64 bits versions. In these last ones, in place of internally relying on a special type (which is a C++ class called infinint) to handle arbitrary large integers, libdar32 relies on 32 bits integers and libdar64 relies on 64 bits integers (there are limitations which are described in doc/LIMITATIONS). But all these libdar version (infinint, 32bits, 64bits) have the same interface and must be used the same way, except for compilation and linking.

These different libdar versions can coexist on the same system, they share the same include files. But the MODE macro must be set to 32 or 64 when compiling for linking with libdar32 or libdar64 respectively. The MODE macro defines the way the "class infinint" type is implemented in libdar, and thus changes the way the libdar headers files are interpreted by the compiler.

> cat my_prog.cpp
#include <dar/libdar.h>

main()
{
   libdar::get_version(...);
   ...
}
> gcc -c -DMODE=32 my_prog.cpp


> gcc -ldar32 my_prog.o -o my_prog


and replace 32 by 64 to link with libdar64.

Note that libdar*.pc files are installed in the $(PREFIX)/lib/pkgconfig file that should simplify (depending on the point of view) all these operations. For example, if you have all different flavors of libdar installed, the $(PREFIX)/lib/pkgconfig dir will contain (among other files) the three following ones:
  • libdar.pc
  • libdar32.pc
  • libdar64.pc
Thus, if you want to build your application with libdar32 for example, you will have to call (assuming you have pkg-config installed)

> gcc `pkg-config --cflags libdar32` -c my_prog.cpp


> gcc `pkg-config --libs libdar32` my_prog.o -o my_prog






13 - Aborting an Operation

If the POSIX thread support is available, libdar will be built in a thread-safe manner, thus you may have several thread using libdar calls at the same time. You may then wish to interrupt a given thread. But aborting a thread form the outside (like sending it a KILL signal) will most of the time let some memory allocated or even worse can lead to dead-lock situation, when the killed thread was in a critical section and had not got  the opportunity to release a mutex. For that reason, libdar proposes a set of calls to abort any processing libdar call which is ran by a given thread.

     // next is the thread ID in which we want to have lidbar call canceled
    // here for simplicity we don't describe the way the ID has been obtained
pthread_t thread_id = 161720;
  
    // the most simple call is :
libdar::cancel_thread(thread_id);
   // this will make any libdar call in this thread be canceled immediately

   // but you can use something a bit more interesting:
libdar::cancel_thread(thread_id, false);
   // this second argument is true for immediate cancellation,
   // of false for a delayed cancellation, in which case libdar aborts the operation
   // but produces something usable, for example, if you were backing up something
   // you get a real usable archive which only contains files saved so far, in place
   // of having a broken archive which miss a catalogue at the end. Note that this
   // delayed cancellation needs a bit more time  to complete, depending on the
   // size of the archive under process.


As seen above, cancellation can be very simple. What now succeeds when you ask for a cancellation this way? Well, an exception of type Ethread_cancel is thrown. All along his path, memory is released and mutex are freed. Last, the exception appears to the libdar caller. So, you can catch it to define a specific comportment. And if you don't want to use exceptions a special returned code is used.

try
{
   
libdar::archive *my_arch =
             new libdar::archive(...);
    ...
}
catch(libdar::Ethread_cancel & e)
{
    ... do something when thread has been canceled;
}



U_16 ex;
std::string msg;
archive *my_arch =
   libdar::open_archive_noexcept(...,ex,msg);

switch(ex)
{
case ...
  ....
  break;
case LIBDAR_THREAD_CANCEL:
  ... do something when thread has been canceled
  break;
case ...
}


Some helper routines are available to know the cancellation status for a particular thread or to abort a cancellation process if it has not yet been engaged.

 pthread_t tid;
  
   // how to know if the thread tid is under cancellation process ?
if(libdar::cancel_status(tid))
     cout << "thread cancellation is under progress for thread : " << tid << endl;
else
     cout << "no thread cancellation is under progress for thread : " << endl;

   // how to cancel a pending thread cancellation ?
if(libdar::cancel_clear(tid))
    cout << "pending thread cancellation has been reset, thread " << tid << " has not been canceled" << endl;
else
   cout << "too late, could not avoid thread cancellation for thread "<< tid << endl;


Last point, back to the Ethread_cancel exception, this class has two methods you may find useful, when you catch it:

try
{
   ... some libdar calls
}
catch(libdar::Ethread_cancel & e)
{
   if(e.immediate_cancel())
       cout << "cancel_thread() has been called with "true" as second argument" << endl;
   else
      cout << "cancel_thread() has been called with "false" as second argument" << endl;

   U64 flag = e.get_flag();
    ... do something with the flag variable...
}

    // what is this flag stored in this exception ?
    // You must consider that the complete definition of cancel_thread() is the following:
    // void cancel_thread(pthread_t tid, bool immediate = true, U_64 flag = 0);
   
// thus, any argument given in third is passed to the thrown Ethread_cancel exception,
    // value which can be retrieved thanks to its get_flag() method. The value given to this
    // flag is not used by libdar itself, it is a facility for user program to have the possibility
    // to include additional information about the thread cancellation.

    // supposing the thread cancellation has been invoked by :
libdar::cancel_thread(thread_id, true, 19);
   // then the flag variable in the catch() statement above would have received
   // the value 19.

A last and important point about multi-threaded environment: An object like any other variable cannot be modified or read (with the use of its methods) without precaution from several threads at the same time. Care must be taken to avoid this situation, and the use of Posix mutex is recommanded in your program if you plan to let an archive object be accessed by more than one thread. See the FAQ for more about this point.




14 - Dar_manager API


For more about dar_manager, please read the man page where are described in detail the available features. Note that for dar_manager there is not a "without exception" flavor, your program must be able to handle exceptions, which by the way are the same as the ones described above.

To get dar_manager features you need to use the class database which is defined in the libdar/database.hpp header file so you first need to include that file. Most of the methods of the database class do use options. For the same reason as previously seen for archive manipulation, these options are passed thanks to a container class. These container classes for options used by the database class are defined in the libdar/database_options.hpp file. Let's see the different method of the class database :

Database object construction

Two constructor are available:

#include <dar/database.hpp>

void my_sample_function(user_interaction & dialog)
{
    database base;   // we have created an empty database (no archive in it) called "base"

    database other  = database(dialog, "/tmp/existing_base.dmd", database_open_options());
                            // we have created a database object called "other" which contains
                            // (in RAM) all information that were contained in the
                            // database file "/tmp/existing_base.dmd"
                            // I will explain below the last argument

    database_open_option opt;
    opt.set_partial(true);
    database other2 = database(dialog, "/tmp/existing_base.dmd", opt);
                           // we have created a database object called "other2" which differs
                           // from "other" in the option we used. While "other" is a fully loaded
                           // database, "other2" is a partial database. This notion is explained
                           // below
}



So far, this is not much complicated. You can build an empty database from nothing, or load a database to memory from a file using the second constructor. As you can see over the filename to give in this later constructor, we need a user_interaction object to be able to inform the user of any problem that could be met, and an object of class database_open_options. This last object contains options to use for this call (options are set to their default unless modified explicitely). Currently, the only available option is the "partial" option which is a boolean argument:

In all the available methods for class database, some require to load the whole database in the memory while some other only require the database header. Loading just the database header is much faster than loading the whole database, of course, and as you guess it requires much less memory. While you can perform any operation with a full loaded database, only a subset of available method will be available with a partially loaded database.  If you try a method that requires a completely loaded database, you will get an exception if the object you use has been loaded with "true" as last argument (called "partial") of the constructor, and of course an empty database (built with the first constructor) is a completely loaded database, so you don't have restriction in using a new database object.

But now let's see the available method for that class:

Database's methods

First we will see methods that work with both partially and completely loaded databases:
  • dump(...) : it is used to write back the database to a file.
  • change_name() : change the basename of the archive which index is given in argument
  • set_path() : change the path to the archive which index is given in argument
  • set_options() : change the default options to always pass to dar when performing restoration
  • set_dar_path() : specify the path to dar (use empty string to rely on the PATH variable)
  • show_contents() : list the archives used to build the database
  • get_options() : list the options that will be passed to dar (as defined with the set_options() method)
  • get_dar_path() : return the path to dar (or empty string if relying on the PATH variable)

Now let's see the database methods that only work with completely loaded databases:
  • add_archive() : add an archive to the database
  • remove_archive() : remove an archive from the database
  • set_permutation() : change archive relative order within the database
  • show_files() : list the files which are present in the given archive
  • show_version() : list the archive where the given file is saved
  • show_most_recent_stats() :  compute statistics about the location of most recent file versions
  • restore() : restore a set of given files given in argument.
Well, you might now say that as description this is a bit light for a tutorial, yes. In fact these call are really very simple to use, you can find a complete description in the reference documentation of the API. This documentation is built if doxygen is available and is put under doc/html after calling make in the source package. It is also available from dar's homepage.



Thanks


I would like to thank Wesley Leggette and Johnathan Burchill for having given their feedback and having done grammar corrections to this document. Out of this document, I would also like to thanks them a second time for their work around dar and libdar (Johnathan is the author of kdar, the KDE front-end for dar).

Regards,
Denis Corbin.

dar-2.4.8/doc/Makefile.in0000644000175000017520000005145712023053151012034 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = doc DIST_COMMON = README $(dist_noinst_DATA) $(dist_pkgdata_DATA) \ $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ html-recursive info-recursive install-data-recursive \ install-dvi-recursive install-exec-recursive \ install-html-recursive install-info-recursive \ install-pdf-recursive install-ps-recursive install-recursive \ installcheck-recursive installdirs-recursive pdf-recursive \ ps-recursive uninstall-recursive am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__installdirs = "$(DESTDIR)$(pkgdatadir)" DATA = $(dist_noinst_DATA) $(dist_pkgdata_DATA) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ distdir ETAGS = etags CTAGS = ctags DIST_SUBDIRS = $(SUBDIRS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ SUBDIRS = samples mini-howto man dist_noinst_DATA = COMMAND_LINE Doxyfile dist_pkgdata_DATA = README Features.html Limitations.html Notes.html Tutorial.html Good_Backup_Practice.html FAQ.html api_tutorial.html dar_doc.jpg dar_s_doc.jpg index.html dar-catalog.dtd Known_Bugs.html authentification.html dar_key.txt from_sources.html api_4_4.html downloading.html presentation.html usage_notes.html INSTALL_HTML_MAN = $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/man ; $(INSTALL) -m 0644 man/*.html $(DESTDIR)$(pkgdatadir)/man UNINSTALL_HTML_MAN = rm -rf $(DESTDIR)$(pkgdatadir)/man @USE_DOXYGEN_TRUE@DOXYGEN = @DOXYGEN_PROG@ all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu doc/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-dist_pkgdataDATA: $(dist_pkgdata_DATA) @$(NORMAL_INSTALL) test -z "$(pkgdatadir)" || $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" @list='$(dist_pkgdata_DATA)'; test -n "$(pkgdatadir)" || list=; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgdatadir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgdatadir)" || exit $$?; \ done uninstall-dist_pkgdataDATA: @$(NORMAL_UNINSTALL) @list='$(dist_pkgdata_DATA)'; test -n "$(pkgdatadir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$files" || exit 0; \ echo " ( cd '$(DESTDIR)$(pkgdatadir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(pkgdatadir)" && rm -f $$files # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. # To change the values of `make' variables: instead of editing Makefiles, # (1) if the variable is set in `config.status', edit `config.status' # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ list='$(SUBDIRS)'; for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" $(RECURSIVE_CLEAN_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ rev=''; for subdir in $$list; do \ if test "$$subdir" = "."; then :; else \ rev="$$subdir $$rev"; \ fi; \ done; \ rev="$$rev ."; \ target=`echo $@ | sed s/-recursive//`; \ for subdir in $$rev; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(DATA) all-local installdirs: installdirs-recursive installdirs-am: for dir in "$(DESTDIR)$(pkgdatadir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool clean-local mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dist_pkgdataDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: uninstall-dist_pkgdataDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) uninstall-hook .MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) ctags-recursive \ install-am install-data-am install-strip tags-recursive \ uninstall-am .PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ all all-am all-local check check-am clean clean-generic \ clean-libtool clean-local ctags ctags-recursive distclean \ distclean-generic distclean-libtool distclean-tags distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-data-hook \ install-dist_pkgdataDATA install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs installdirs-am \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-recursive uninstall uninstall-am \ uninstall-dist_pkgdataDATA uninstall-hook @USE_DOXYGEN_TRUE@all-local: Doxyfile.tmp @USE_DOXYGEN_TRUE@Doxyfile.tmp: @USE_DOXYGEN_TRUE@ sed -e "s%##VERSION##%@PACKAGE_VERSION@%g" Doxyfile > Doxyfile.tmp @USE_DOXYGEN_TRUE@ cd .. ; $(DOXYGEN) doc/Doxyfile.tmp @USE_DOXYGEN_TRUE@ if [ -d html/search ]; then chmod u+x html/search ; fi @USE_DOXYGEN_TRUE@clean-local: @USE_DOXYGEN_TRUE@ rm -rf html Doxyfile.tmp @USE_DOXYGEN_TRUE@install-data-hook: @USE_DOXYGEN_TRUE@ cp -dR --preserve=mode html $(DESTDIR)$(pkgdatadir) @USE_DOXYGEN_TRUE@ $(INSTALL_HTML_MAN) @USE_DOXYGEN_TRUE@uninstall-hook: @USE_DOXYGEN_TRUE@ rm -rf $(DESTDIR)$(pkgdatadir)/html @USE_DOXYGEN_TRUE@ $(UNINSTALL_HTML_MAN) @USE_DOXYGEN_TRUE@ rmdir $(DESTDIR)$(pkgdatadir) || true @USE_DOXYGEN_FALSE@all-local: @USE_DOXYGEN_FALSE@clean-local: @USE_DOXYGEN_FALSE@install-data-hook: @USE_DOXYGEN_FALSE@ $(INSTALL_HTML_MAN) @USE_DOXYGEN_FALSE@uninstall-hook: @USE_DOXYGEN_FALSE@ $(UNINSTALL_HTML_MAN) # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/doc/Features.html0000644000175000017430000012604712010477201012433 00000000000000 DAR's FEATURES
Dar Documentation


DAR's FEATURES






Here follow the main features of dar/libdar tools. For each feature an overview is presented with some pointers you are welcome to follow for a more detailed information.



FILTERS
references: man darcommand line usage notes

keywords: -I -X -P -g -[ -] -am
dar is able to backup from a total file system to a single file, thanks to its filter mechanism. This one is dual headed: The first head let one decide which part of a directory tree to consider for the operation (backup, restoration, etc.) while the second head defines which type of file to consider (filter only based on filename, like for example the extension of the file).



DIFFERENTIAL BACKUP references: man dar/TUTORIAL

keywords: -A
When making a backup with dar, you have the possibility to make a full backup or a differential backup. A full backup, as expected, makes backup of all files as specified on the command line (with or without filters). Instead, a differential backup, (over filter mechanism), saves only files that have changed since a given reference backup. Additionally, files that existed in the reference backup and which do no more exist at the time of the differential backup are recorded in the backup as "been removed". At recovery time, (unless you deactivate it), restoring a differential backup will update changed files and new files, but also remove files that have been recorded as "been removed". Note that the reference backup can be a full backup or another differential backup. This way you can make a first full backup, then many differential backup, each taking as reference the last backup made, for example.



SLICES references: man dar/TUTORIAL

keywords: -s -S -p -aSI -abinary
Dar stands for Disk ARchive. From the beginning it was designed to be able to split an archive over several removable media whatever their number is and whatever their size is. To restore from such a splitted archive, dar will directly fetch the requested data in the correct slice(s). Thus dar is able to save and restore using old floppy disk, CD-R, DVD-R, CD-RW, DVD-RW, Zip, Jazz, etc... However, Dar will not un/mounting a removable medium, instead it is independent of hardware. Given the size, it will split the archive in several files (called SLICES), eventually pausing before creating the next one, allowing this way, the user to un/mount a medium, burn the file on CD-R, send it by email (if your mail system does not allow huge file in emails, dar can help you here also). By default, (no size specified), dar will make one slice whatever its size is. Additionally, the size of the first slice can be specified separately, if for example you want first to fulfill a partially filled disk before starting using empty ones. Last, at restoration time, dar will just pause and prompt the user asking a slice only if it is missing. Note that all these operation can be automatized using the "user command between slices" feature (presented below), that let dar do all you want it to do once a slice is created or before reading a slice.



DIRECTORY TREE SNAPSHOT references: man dar

keywords: -A +
Dar can make a snapshot of a directory tree and files recording the inode status of files. This may be used to detect changes in filesystem, by "diffing" the resulting archive with the filesystem at a later time. The resulting archive can also be used as reference to save file that have changed since the snapshot has been done. A snapshot archive is very small compared to the corresponding full backup, but it cannot be used to restore any data.



COMPRESSION references: man dar

keywords: -z
dar can use compression. By default no compression is used. Actually gzip, bzip2 and lzo algorithms are implemented, and there is still some room available for any other compression algorithm. Note that, compression is made before slicing, which means that using compression together with slices, will not make slices smaller, but will probably make less slices in the backup.



DIRECT ACCESS


even using compression and/or encryption dar has not to read the whole backup to extract one file. This way if you just want to restore one file from a huge backup, the process will be much faster than using tar. Dar first reads the catalogue (i.e. the contents of the backup), then it goes directly to the location of the saved file(s) you want to restore and then proceeds to restoration. In particular using slices, dar will ask only for the slice(s) containing the file(s) to restore.



SEQUENTIAL ACCESS
references: man dar
(suitable for tapes)
--sequential-read, -at
The direct access feature seen above is well adapted to random access media like disks, but not for tapes. Since release 2.4.0, dar provides a sequential mode in which dar sequentially read and write archives. It has the advantage to be efficient with tape but suffers from the same drawback as tar archive: it is slow to restore a single file from a huge archive.


HARD LINK CONSIDERATION


hard links are properly saved in any case and properly restored if possible. For example, if restoring across a mounted file system, hard linking will fail, but dar will then duplicate the inode and file contents, issuing a warning. Hard link support includes the following inode types: plain files, char devices, block devices, symlinks (Yes, you can hard link symbolic links! Thanks to Wesley Leggette for the info ;-) )



SPARSE FILES
references: man dar

--sparse-file-min-size, -ah
By default Dar takes care of sparse files, even if the underlying filesystem does not support sparse files(!). When a long sequence of zeroed bytes is met in a file during backup, those are not stored into the archive but the number of zeroed bytes is stored instead (structure known as a "hole"). When comes the time to restore that file, dar restore the normal data but when a hole is met in the archive dar directly skips at the position of the data following that hole which, if the underlying filesystem supports sparse files, will (re)create a hole in the restored file, making a sparse file. Sparse files can report to be several hundred gigabytes large while they need only a few bytes of disk space, being able to properly save and restore them avoids wasting disk space at restoration time and in archives.



EXTENDED ATTRIBUTES (EA)
references: man dar
MacOS X FILE FORKS / ACL
keywords: -u -U -am -ae --alter=list-ea
Dar is able to save and restore EA, all or just those matching a given pattern.

File Forks (MacOS X) are implemented over EA as well as Linux's ACL, they are thus transparently saved, tested, compared and restored by dar. Note that ACL under MacOS seem to not rely on EA, thus while they are marginally used they are ignored by dar.



ARCHIVE TESTING references: man dar/TUTORIAL/ Good Backup Practice

keywords: -t
thanks to CRC (cyclic redundancy checks), dar is able to detect data corruption in the archive. Only the file where data corruption occurred will not be possible to restore, but dar will restore the others even when compression or encryption (or both) is used.



DATA PROTECTION references: man dar/Parchive integration

keywords: -al
dar relies on the Parchive program for data protection against media errors. Thanks to dar's ability to run user command or script and thanks to the ad hoc provided scripts, dar can use Parchive as simply as adding a word (par2) on command-line. Depending on the context (archive creation, archive testing, ...), dar will by this mean create parity data for each slice, verify and if necessary repair the archive slices.

However, even without Parchive, dar has the ability to be restored using an isolated catalogue as backup of the internal catalogue of an archive, which if corrupted could lead the whole archive to  become unreadable. The other vital information (like the slice layout) is replicated in each slice, this let dar overcome data corruption of that part too, and restore more than nothing in case of major problem. As a last resort, Dar also proposes a "lax" mode in which the user is asked questions (like the compression algorithm used, ...) to help dar recover very corrupted archives. However this does not replace using Parchive and has to be considered as the last resort option.



REMOTE OPERATIONS references: command line usage notes, man dar/dar_slave/dar_xform
USING PIPES keywords: -i -o -
dar is able to produce an archive to its standard output or named pipe, it is also able to read an archive from its standard input or named pipe, which let one to make remote backup easily.

However this would requires to read the archive in sequential mode which leads to transfer a whole archive just to restore a single file. For that reason, dar is also able to read an archive through a pair of pipes using dar_slave at one side and dar at the other side. From the pair of pipe, one pipe let dar ask to dar_slave which portion of the archive to send through the other pipe. This makes a remote restoration much efficient and can still be protected, simply remotely running dar_slave through a ssh session for example will let all exchanges be encrypted.



ISOLATION references: man dar

keywords: -C -A -@
the catalogue (i.e.: the contents of an archive), can be extracted (this operation is called isolation) to a small file, that can in turn be used as reference for differential archive. There is then no more need to provide an archive to be able to create a differential backup based on it, just its catalogue is necessary. Such an isolated catalogue can also be used to rescue the archive it has been isolated from in the case the archive's internal catalogue has been corrupted. Such isolated catalogue can be created at the same time as the archive (operation called on-fly isolation) or as a separate operation (called isolation).



RE-SHAPE SLICES OF AN EXISTING ARCHIVE references: man dar_xform


the provided program named "dar_xform" is able to change the size of slices of a given archive. The resulting archive is totally identical to archives directly created by dar. Source archive can be taken from a set of slice, from standard input or even a named pipe. Note that dar_xform can work on encrypted and/or compressed data without having to decompress or even decrypt it.



USER COMMAND BETWEEN SLICES references: man dar dar_slave dar_xform/command line usage notes

keywords: -E -F -~
several hooks are provided for dar to call a given command once a slice has been written or before reading a slice. Several macros allow the user command or script to know the requested slice number, path and archive basename.



USER COMMAND BEFORE AND AFTER SAVING A DIRECTORY OR A FILE
references: man dar/command line usage notes

keywords: -< -> -=
It is possible to define a set of file that will have a command executed before dar start saving them and once dar has completed saving them. This is especially intended for saving live database backup. Before entering a directory dar will call the specified user command, then it will proceed to the backup of that directory. Once the whole directory has been saved, dar will call again the same user command (with slightly different arguments) and then continue the  backup process. Such user command may have for action to stop the database and to reactivate it afterward for example.




STRONG ENCRYPTION references: man dar

keywords: -K -J -# -* blowfish, twofish, aes256, serpent256, camellia256
Dar can use blowfish, twofish, aes256, serpent256 and camellia256 algorithms to encrypt the whole archive. Two "elastic buffers" are inserted and encrypted with the rest of the data, one at the beginning and one at the end of the archive to prevent a clear text attack or codebook attack.




SLICE HASHING
references: man dar

--hash, md5, sha1
When creating an archive dar can compute an md5 or sha1 hash before the archive is written to disk and produce a small file compatible with md5sum or sha1sum that let verify that each slice of the archive is not corrupted.




CONFIGURATION FILE references: man dar, conditional syntax and user targets

keywords: -B
dar can read parameter from file. This is a way to extends the command-line limited length input. A configuration file can ask dar to read (or to include) other configuration files. A simple but efficient mechanism forbids a file to include itself directly or not, and there is no limitation in the degree of recursion for the inclusion of configuration files.

Two special configuration files $HOME/.darrc and /etc/darrc are read if they exist. They share the same syntax as any configuration file which is the syntax used on the command-line, eventually completed by newlines and comments.

Any configuration file can also receive conditional statements, which describe which options are to be used in different conditions. Conditions are: "restoration", "listing", "testing", "difference", "saving", "isolation", "any operation", "none yet defined" (which may be useful in case or recursive inclusion of files) ...



SELECTIVE COMPRESSION references: man dar/samples

keywords: -Y -Z -m -am
dar can be given a special filter that determines which files will be compressed or not. This way you can speed up the backup operation by not trying to compress *.mp3, *.mpg, *.zip, *.gz and other already compressed files, for example. Moreover another mechanism allow you to say that files under a given size (whatever their name is), will not be compressed.



DAR MANAGER references: man dar_manager


The advantage of differential backup is that it takes much less space to store and time to complete than always making full backup. But, in the other hand, while you can thus have a lot of them due to the reduces space requirement, if you want to restore a particular file, you can thus spend time to find in which backup is located the most recent version. This is solved using dar_manager. This command-line program, will gather contents information of all your backups. At restoration time, it will call dar for you to restore the asked file(s) from the proper backup.



FLAT RESTORATION references: man dar

keywords: -f
It is possible to restore any file without restoring the directories and subdirectories it was in at the time of the backup. If this option is activated, all files will be restored in the (-R) root directory whatever their real position is recorded inside the archive.



NODUMP FLAG references: man dar

keywords: --nodump
Linux ext2/3/4 filesystem, provide for each inodes a set of flags, among which is the "nodump" flag, which in substance says "don't save this file for backup". This is used by the so-called dump backup program. Dar can take care to not save those files that have this flag set.



ONE FILESYSTEM references: man dar

keywords: -M
dar can backup files of a given filesystem only, even if some subdirectory in the scope are mounting points for other filesystems, dar will not recurse in these directories.



ARCHIVE MERGING references: man dar

keywords: -+ -ak -A -@
From version 2.3.0, dar supports the merging of two existing archives into a single one. This merging operation is assorted by the same filtering mechanism used for archive creation. This let the user define which file will be part of the resulting archive.

By extension, archive merging can also take as single source archive as input. This may sound a bit strange at first, but this let you make a subset of a given archive without having to extract any file to disk. In particular, if your filesystem does not support Extended Attributes (EA), thanks to this feature you can still cleanup an archive from files you do not want to keep anymore without loosing any EA or performing any change to standard file attributes (like modification dates for example) of files that will stay in the resulting archive.

Last, this merging feature give you also the opportunity to change the compression level or algorithm used as well as the encryption algorithm and pass. Of course, from a pair of source archive you can do all these sub features at the same time: filtering out files you do not want in the resulting archive, use a different compression level and algorithm or encryption password and algorithm than the source archive(s), you may also have a different archive slicing or no slicing at all (well dar_xform is more efficient for this feature only, see above "RE-SHAPE SLICES OF AN EXISTING ARCHIVE" for details).



ARCHIVE SUBSETTING
references: man dar

keywords: -+ -ak
As seen above under the "archive merging" feature description, it is possible to define a subset of files from an archive and put them into a new archive without having to really extract these files to disk. To speed up the process, it is also possible to avoid uncompressing/recompressing files that are kept in the resulting archive or change their compression, as well change the encryption scheme used. Last, you may manipulate this way files and their EA while you don't have EA support available on your system.


DECREMENTAL BACKUP references: man dar / Decremental backup

keywords: -+ -ad
As opposed to incremental backups, where the older one is a full backup and each subsequent backup contains only the changes from the previous backup, decremental backup let the full backup be the more recent while the older ones only contain changes compared to the just more recent one. This has the advantage of having a single archive to use to restore a whole system (dar_manager is not necessary) while reducing the overall amount of data to retain older versions of files (same amount required as with differential backup). It has also the advantage to not have to keep several set of backup as you just need to delete the oldest backup when you need storage space. However it has the default to require at each new backup the creation of a full backup, then the transformation of the previous full backup into a so-called decremental backup. Everything has a cost! ;-)


DRY-RUN EXECUTION
references: man dar

keywords: -e
You can run any feature without effectively performing the action. Dar will report any problem but will not create, remove or modify any file.


DIRTY FILES
references: man dar

keywords: --dirty-behavior , --retry-on-change
At backup time, dar checks that each saved file had not changed at the time it was read. If a file has changed in that situation, it is flagged as "dirty" in the archive, and handled differently from other files at restoration time. The dirty file handling is either to warn the user before restoring, to ignore and not restore them, or to ignore the dirty flag and restore them normally. Dar has room to retry saving a file when it has been found dirty, before effectively putting the "dirty" flag for that file in the archive. This retry option is limited by a maximum number of try per file, after which the file is definitively marked as dirty and the backup process continues with the next file.


ARCHIVE USER COMMENTS
references: man dar

keywords: --user-comment, -l -v, -l -q
The archive header can encompass a message from the user. This message is never ciphered nor compressed and always available to any one listing the archive summary (-l and -q options). Several macro are available to add more confort using this option, like the current date, uid and gid used for archive creation, hostname, and command-line used for the archive creation.


PADDED ZEROS TO SLICE NUMBER
references: man dar

keywords: --min-digits
Dar slice are numbered by integers starting by 1. Which makes filename of the following form: archive.1.dar, archive.2.dar, ..., archive.10.dar, etc. However, the lexicographical order used by many directory listing tools, is not adapted to show the slices in order. For that reason, dar let the user define how much zeros to add in the slice numbers to have usual file browsers listing slices as expected. For example, with 3 as minimum digit, the slice name would become: archive.001.dar, archive.002.dar, ... archive.010.dar.



CACHE DIRECTORY TAGGING STANDARD
references: man dar

keywords: --cache-directory-tagging
Many software use cache directories (mozilla web browser for example), directories where is stored temporaneous data that is not interesting to backup. The Cache Directory Tagging Standard provides a standard way for software applications to identify this type of data, which let dar able to take into account and avoid saving them.

dar-2.4.8/doc/downloading.html0000644000175000017430000002157712010477201013164 00000000000000 Dar's Documentation - Downloading dar/libdar
Dar Documentation


DOWNLOADING DAR/LIBDAR





1 - Packages Signature


Source packages and binary packages for windows are signed with author's GPG key. You can find the signatures for released packages on the home page and on its mirror site. Binary packages for your distro have to be fetched from your distro's site which will probably provide you electronic signatures.



2 - Source Packages


Source packages can be found here.


3 - Binary packages for Linux


All Linux distribution do provide a binary package for dar/libdar. In fact, the dar source package is most of the time split into tree or more packages, one for libdar library, a second for the dar command-line tools and a third for the dar documentation.


4 - Binary package for Windows


Binary packages for windows are provided here.

There is several point to be aware when using dar under windows:

the binary package is a *.zip file (thus you need winzip to unpack it). It contains a subdirectory (named dar) you will have to extract where you want in your directory tree. Optionally you can add the path to dar in the PATH variable in autoexec.bat. Considering dar has been extracted under C:\dar you can add the following line in autoexec.bat:

set PATH=%PATH%;C:\Dar

then you have to reboot. (Just kidding ! This was to respect the Windows usage and way of life ;-) ) Else if you don't setup the PATH variable, you need to specify the full path to dar executables to use them from the Windows command-line prompt.

IMPORTANT NOTES !

Note that path given to dar suite's program must respect the UNIX way (use slashes "/" not back slashes "\") thus you have to have to use /temp in place of \temp. Moreover, drive letters cannot be used the usual way,  like c:\windows\system32. Instead you will have to give the following path /cygdrive/c/windows/system32. As you see the /cygdrive directory is a virtual directory that has all the drives as children directories :

X:\some\file  has to be written  /cygdrive/X/some/file


for example:

c:\dar_win-1.2.1\dar -c /cygdrive/f/tmp/toto -s 2G -z1 -R "/cygdrive/c/My Documents"

  ^             ^         ^   ^                     ^
  |             |         |   |                     |
 ---------------         ---------------------------
here use anti-slash        but here we use slash
as usually under           in arguments given to dar
windows to point
the command



5 - Binary Packages for Mac OS

Binary packages for Mac OS can be fetched from softpedia.

6 - Portage for FreeBSD

Source package portage for freeBSD is available here.


dar-2.4.8/doc/COMMAND_LINE0000644000175000017430000001202512010477201011565 00000000000000Status flags: --------------- ! : not used - : used without arg + : used with argument * : used with optional argument --------------- letters usage: --------------- a * alteration of operation --alter argument: s[aved] | SI[-unit[s]] | binary[-unit[s]] | a[time] | c[time] | m[ask] | n[o-case] | case | r[egex] | g[lob] | e[rase_ea] | k[eep-compressed] | f[ixed_date] | d[ecremental] | l[axist] | t[ape-marks] | h[oles-recheck] | secu | list-ea | i[gnore-unknown-inode-type] | do-not-compare-symlink-mtime | test-self-reported-bug b - terminal bell --bell c + create archive --create d + difference with filesystem --diff e - simulate the operation --empty (aka dry-run) f - do not restore directory structure --flat g + recurse in this directory --go-into h - help usage --help i + path of the input pipe --input j - ask user to make room in virtual memory when it is exhausted --jog k + do not deleted files drop between two backups --no-delete / --deleted { ignore | only } l + list archive contents --list m + minimum size below which no compression will occur. --mincompr a default value is used. use -m 0 if you want to compress in any case. n - do not allow overwriting --no-overwrite o + path of the output pipe --output p + pause before creating new slice --pause q - suppress final statistics report --quiet r - do not overwrite more recent file --recent [=date] (to implement) s + size of slice --slice t + test archive structure --test u + exclude EA from operation (mask) --exclude-ea v + verbose output --verbose --verbose=skipped -vb w * do not warn before overwriting --no-warn x + extract archive --extract y * [DEPRECATED] bzip2 compression --bzip2 z * gzip compression --gzip --compress A + make a differential backup --ref B + filename taken as command-line extension --config C + extract catalogue in separated file --isolate D - store excluded dir as empty dir --empty-dir E + shell command to launch with slices --execute F + shell command to launch with slices of archive of reference --execute-ref G # [OBSOLETE] : catalogue isolation when creating an archive --on-fly-isolate H * if a file for differential backup or diff differs from exactly one hour it is assumed as identical (no change). I + include mask --include J + key for unscrambling the reference catalogue --key-ref K + key for un/scrambling --key L - license information M - only consider what is under the current mounting point. --no-mount-points N - do not read any configuration file ~/.darrc or /etc/darrc O + ignore user Id and group Id --ignore-owner P + prune a directory tree --prune Q + quite on stderr at startup no long option equivalent R + set the root directory --root S + size of the first slice --first-slice T + tree listing format --tree-format, --list-format= U + include EA for the operation (mask) --include-ea V - version information --version W - warranty disclosure information (POSIX RESERVED, sorry) X + exclude mask --exclude Y + included only filename for compression --include-compression Z + excluded filename from compression --exclude-compression - don't save files with nodump flag set --nodump # + encryption block size --crypto-block * + encryption block size for the archive of reference --crypto-block-ref , - cache directory tagging standard --cache-directory-tagging [ + include files listed in given file --include-from-file ] + exclude files listed in given file --exclude-from-file - x (forbidden by getopt) ? x (forbidden by getopt) : x (forbidden by getopt) + + merging operation --merge @ + second reference archive for merging --aux-ref $ + key for auxilliary refence archive --aux-key ~ + command to execute between slices --aux-execute % + encryption block size for the auxilliary archive of reference --aux-crypto-block / + policy to solve overwriting conflict --overwriting-policy ^ + ownership and permission of generated slices --slice-mode _ + retry on change --retry-on-change { ! } ! 0 * sequential read of the archive --sequential-read 1 + sparse-file detection tunning --sparse-file-min-size 2 + dirty file behavior --dirty-behavior {ignore|no-warn} 3 + create a hash algorithm (+algo) --hash 4 ! 5 ! 6 ! 7 ! 8 ! 9 ! " + anonymous pipe descriptor to read conf from. --pipe-fd . + user comment --user-comment ; + min_digits --min-digits archive[,ref[,aux]]; < + backup hook mask --backup-hook-include > + backup hook mask --backup-hook-exclude = + backup hook execute --backup-hook-execute \ ! dar-2.4.8/doc/mini-howto/0000755000175000017520000000000012023053205012125 500000000000000dar-2.4.8/doc/mini-howto/dar-differential-backup-mini-howto.en.html0000644000175000017430000013124412010477201022076 00000000000000 DAR differential backup mini-howto -EN-

DAR differential backup mini-howto -EN-

Author: Grzegorz Adam Hankiewicz
Contact: gradha@titanium.sabren.com
Date: 2006-10-07 19:22:40 +0200
Version: H5 (424)
Web site:http://gradha.sdf-eu.org/textos/backup.en.html
Copyright: This document has been placed in the public domain.
Translations:From the web site you can get this document in English, Italian and Spanish.

Introduction

We all should make backups of our important data. This omnipresent advice is usually ignored by most people. I ignored it too, until I lost a good deal of important data. Not happy enough, I managed to continue loosing data in a few posterior incidents, until I decided that it was enough. Then I browsed Freshmeat for backup solutions allowing differential backup and found DAR.

A complete backup means that all the files falling under your backup policy will be saved. A differential or incremental backup will contain only the files whose contents have changed since the previous backup, either full or differential.

DAR allows you to create easily a set of differential backups. The solution I've developed helps me have an automatic backup solution which runs every night. The first day of the month, a full backup is made. The rest of the month, only differential backups are made. In my situation, very few files change from day to day, sometimes the source code of the project I'm hacking on, and always my mailboxes.

The result is that I can restore the contents of my computer to a specific day with ease, if I ever need to. DAR is a command line program, and it can get slightly complex with a few options. This little mini-howto will explain my custom solution, which is very crude, but works fine for me. Yes, I've actually tested restoring the data from the backup. In fact, during the end of the year 2003 I moved to another country and I took just one CD ROM with me plus a bootable Knoppix, and I recovered the exact state of my Debian installation in a few hours. No customizing, no long installations, no missing files.

This document was written using version 1.3.0 of DAR. When I updated to DAR 2.0.3, everything kept working, I didn't even have to update my backup archives. So it looks like the interface and backup format are pretty stable, or at least backwards compatible. However, don't take everything said here for granted. Verify that the version of DAR you have installed works as expected and you can restore from the generated backup before you have to rely on it.

This version of the text uses reStructuredText (that's what the weird markup in the text version is for). See http://docutils.sourceforge.net/ for more information.

Simple DAR usage

DAR is very similar to tar in the number of options it has: there's plenty for every need, but way too much for beginners to handle. As usual, you can always get help from the program typing dar -h or man dar after you have installed it. Like tar, there's a set of mandatory switches which define the type of operation you are doing (create, extract, list, etc), and a set of switches which affect the selected option. Just for the sake of it, imagine that you want to backup one folder of your home directory. You would write something like this:

dar -c backup_file_without_extension -g file1 -g file2 ... -g fileN

The output should be similar to the following:

$ dar -c my_backup_file -g safecopy.py/ -g translate_chars.py/


 --------------------------------------------
 15 inode(s) saved
 with 0 hard link(s) recorded
 0 inode(s) not saved (no file change)
 0 inode(s) failed to save (filesystem error)
 4 files(s) ignored (excluded by filters)
 0 files(s) recorded as deleted from reference backup
 --------------------------------------------
 Total number of file considered: 19
$ ls
mailbox_date_trimmer/  my_backup_file.1.dar  sdb.py/
mailbox_reader/        safecopy.py/          translate_chars.py/

As you will notice, DAR will add a number and extension to your name. The purpose of the extension is clear, it helps to know visually that the file is a DAR backup. The number is called a slice, and this is related to DAR's built-in feature of splitting a backup over several media. If for example you wanted to make a backup to CD ROM, but your directories are bigger than the capacity of one CD ROM, you can tell DAR to split the archive across as many files as needed, which you can later burn to several units.

Would you like to recover that backup? Pretty easy, type the following:

$ mkdir temp
$ cd temp
$ dar -x ../my_backup_file
file ownership will not be restored as dar is not run as root.
to avoid this message use -O option [return = OK | esc = cancel]
Continuing...


 --------------------------------------------
 15 file(s) restored
 0 file(s) not restored (not saved in archive)
 0 file(s) ignored (excluded by filters)
 0 file(s) less recent than the one on filesystem
 0 file(s) failed to restore (filesystem error)
 0 file(s) deleted
 --------------------------------------------
 Total number of file considered: 15
$ ls
safecopy.py/  translate_chars.py/

The backup strategy

The first step to create a good backup is to determine what parts of your system need one. This doesn't necessarily mean that you can't create a full backup, but most likely splitting it in at least two parts is going to help DAR (or any backup tool) a lot.

My home system consists of two hard disks. The first hard disk is split into a 3.8 GB partition where my complete system lives, and another partition of 11 GB where all my music and other temporary files are stored, like a local Debian package repository I make for myself. The second hard disk has a 9.4 GB partition and its only purpose is to serve as backup of the primary disk. I have no interest in backing up my music, because I have all the original CDs lying around and have scripts to re-ogg them.

From the 3.8 GB I want to backup, usually between 1.3 and 1.5 GB are always empty. I will split logically the used 2.3 GB into system and home directories (at the moment of writing this my home is 588 MB). The reason for this split is that as a normal user, I can only change my home directory and other files from the partitions I won't be backing up. Meanwhile the system part of the partition remains pretty stable and unmodified because I rarely (un)install software. In fact, from my home directory the only things changing usually will be my Mail folder and projects, where I put documents like this one and other software I write/hack.

The basic distinction between home directories and system can be useful in organizations too. If you work for a university, usually all machines will have the same system configuration but depending on the machine their homes will have different data. You can make a system backup of a single machine, and home backups of each computer. Another common configuration is having a centralized server which exports home directories with NFS. Here you only have to backup the server. If you have users with high privileges, leave them the task of doing the system backup of their own machines, the exported home is something they can ignore because it will be done at the server machine.

Once you've decided what to backup, you want to decide how to configure DAR for the backups. You can use switches or configuration files. Switches are OK when you don't have many options. Configuration files are better when you want to make different complex inclusion/exclusion rules of what files you want to backup, and more importantly, you can use comments to document the switch, stating for example the reason why you included this or that directory. This can be useful if you come back several months later and you wonder why all those options are there.

For my setup, I'll be running the DAR commands inside shell scripts called periodically by cron (Setting up some scripts to automate the process), so I don't mind having long command lines, and this very same document serves for the purpose of documenting the scripts. If you prefer configuration files, read DAR's documentation to find out how to use them and the format they use.

Making a full backup with DAR

Here is the full command line I'll be using for my system backup, running as root. Don't worry about the high number of switches, I'll go on describing the purpose of each of them:

dar -m 256 -y -s 600M -D -R / -c `date -I`_data -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom
  • -m 256

    DAR can compress your backup. The compression is applied to individual files, and it can be bad for small files. By default files with 100 bytes or less won't be compressed. With the -m switch I increase this to 256, which seems to work better for all those little configuration files lying under /etc/ and /home. As you see this is a totally optional switch, basically for tuning freaks like me.

  • -y [level]

    This option activates Bzip2 archive compression, which by default is turned off. You can even specify a numeric compression level, which goes from 0 (no compression) to 9 (best compression, slow processing). Bzip2 by default uses 6, which is the best speed/compression ratio for most files. I don't specify compression level, 6 is fine for me.

  • -s 600M

    Here comes DAR's slice feature. The specified size of 600 Megabytes is the maximum file size DAR will create. If your backup is bigger, you will end up with different backup files each with a slice number before the file extension, so you can save each file to a different unit of your backup media (floppies, zip, CDROM, etc). My backups are much smaller than this size, and I keep this switch just to be safe if I happen to create a big file in my home directory and forget to delete it. If this switch is useful for you, check DAR's manual for the -S switch too.

  • -D

    Stores directories excluded by the -P option or absent from the command line path list as empty directories. This is helpful when you are recovering a backup from scratch, so you don't have to create manually all the excluded directories.

  • -R /

    Specifies the root directory for saving or restoring files. By default this points to the current working directory. We are doing a system backup here, so it will be the root directory.

  • -c `date -I`_data

    This is the mandatory switch I talked of before, and it means to create a backup archive. For those who don't understand what follows, `date -I` is the shell's back tick expansion. In short, date -I will provide a date as YYYY-MM-DD format. With back ticks and used as a parameter, the output of the command will be used as a string of the parent command. This way you can create backup archives with the creation date embedded in the name. If you still don't understand what I'm talking about, try to run the following from the command line:

    echo "Today's date is `date -I`"
    
  • -Z file_pattern

    Using normal file name globing you can specify patterns of files you want to store in your archive without compression. This only has sense if you use the -y switch. Compressing compressed files only yields bigger files and wasted CPU time.

  • -P relative_path

    With this switch you tell DAR which paths you don't want to store in your backup archive. Here you want to put the home directory (I'm the only user on this machine, there are a few more, but they are for testing/system purpose), system directories which aren't really physical files like proc, other drives you may have mounted under mnt (most notably the drive you are putting the backup file), etc, etc. Note that the paths you specify must be relative to the path specified by the -R switch.

That wasn't so hard. Check DAR's manual page for more useful switches you might want to use. And here's the command line I'll be running as a plain user inside my home directory:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_data \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer

Nothing new under the sun. As you see, most of the command line is identical to the other one, I only change the name of the directories I want to exclude with -P and the root directory with the -R switch.

Making differential backups with DAR

Once you have a full backup you can create a differential backup. The first differential backup has to be done using the full backup as reference. The following differential backups use the latest differential backup as reference. Here's the command line for a system differential backup:

dar -m 256 -y -s 600M -D -R / -c `date -I`_diff -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -A previous_backup
  • -c `date -I`_diff

    I only change the name of the file, cosmetic purpose.

  • -A previous_backup

    This new switch is used to tell DAR where is to be found the previous backup so it can create a differential backup instead of a full backup. The only thing you have to take care of is that you don't specify slice neither extension in the file name, otherwise DAR will make you an interactive question at the command line.

The user command line is exactly the same. Here it is for completeness:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_diff \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer -A previous_backup

DAR has another nice feature we don't use here: catalogues. When you create a backup archive with DAR, internally it contains the data plus a catalogue. This catalogue contains information about what files were saved, their dates, their compressed size, etc. You can extract the catalogue and store it separately. Why would you want to do this? To set up networked differential backups.

In order to create a differential backup, you need to provide the previous backup so DAR can decide which files have changed or not. Doing this can be expensive in bandwidth if you work with a network. Instead, after you create a backup, you can extract the catalogue and send it to the machine doing the backups. Next time, you can use this file with the -A switch, and it will all work as if the complete file was there.

This can be also useful if you use slices, because the catalogue is created from the first and last slice. It's more comfortable to pass a single file to the backup command rather than having to carry the disks of your previous backup with you.

Setting up some scripts to automate the process

As said before, now it's the time to put our backup solution under cron. Place the following executable script for system backup under /root/dar_backup.sh:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner \
   -P cdrom -P var/backups > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Some things to notice:

  • DIR is the variable which holds the destination directory.
  • FILE will hold the path to today's backup file.
  • I use full paths for the commands because my root account doesn't have all of them included in the default environment. This is potentially a security risk. Ideally you would like to compile DAR as root and keep your binaries where you make them so nobody can touch them. And run Tripwire over them too.
  • DAR generates statistics after each run. We don't want them in our cron because it will generate unnecessary mail. Only stdout is redirected to /dev/null. Errors will be reported and a mail generated if something goes wrong.
  • The last two find commands are optional. I use them to change file ownership to a normal user, which will later create the backup. Again, another security risk. root should backup that from root, and users should backup their stuff. But with a mono user system, I don't care. If some intruder is good enough to go through my firewall and account passwords to take a look at my backups, I'm already screwed.

Now place the following nearly identical script for differential backups under /root/dar_diff.sh:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp -P mnt \
   -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -P var/backups -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

The only two changes are the addition of the -A switch and the generation of the PREV variable with a complicated command line. Let's see what this command line does:

  • First the ls command creates a list of the files with .dar extension in the backup directory. This output is piped to the next command.
  • By default ls displays files alphabetically. tail is used to get the last file with the -n 1 switch, which says to display only the last line.
  • DAR wants to operate on filenames without slice number and extension. This means that if we don't get rid of the tail, DAR will stop the operation and ask an interactive question to the user, defeating the purpose of automation. We separate the complete filename with a Bash feature called parameter expansion. There are several possible expansions, you can type man bash to see all of them. The one using %% will remove the longest tailing pattern that matches whatever goes after the %%. The result is the base name we want to pass DAR.

We only have to put these two scripts under cron control. This is what we have to type after crontab -e:

15 0 2-31 * * ./dar_diff.sh
15 0 1    * * ./dar_backup.sh

Look up in man -S 5 crontab the syntax of the command. In short, those two lines tell cron to run the scripts 15 minutes past midnight. dar_backup.sh will be run only the first day of the month. The other script will be run all the other days.

Here are the backup scripts for your users. They are the same, changing only switches to the DAR command and paths:

#!/bin/sh
# dar_backup.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

#!/bin/sh
# dar_diff.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.zip" \
   -P instalacion_manual -P Mail/mail_pa_leer \
   -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

Don't forget to add the required crontab entries for your user pointing to the appropriate path.

Recovering your backup to a clean machine

When the time comes to restore your backup, depending on what you saved you will have a full backup of one month plus differential backups up to the last time you managed to make. The restoration process is very simple, it's the same as described on the first chapter (Simple DAR usage), only you have to do it first for the full backup, and then for the differential ones. This can be boring, so here's another shell script you can save with your backup files:

#!/bin/sh

if [ -n "$3" ]; then
   CMD="$1"
   INPUT="$2_data"
   FS_ROOT="$3"
   $CMD -x "$INPUT" -w -R "$FS_ROOT"
   for file in ${INPUT:0:8}*_diff*; do
      $CMD -x "${file:0:15}" -w -R "$FS_ROOT"
   done
   echo "All done."
else
   echo "Not enough parameters.

Usage: script dar_location base_full_backup directory

Where dar_location is a path to a working dar binary, base_full_backup
is a date in the format 'YYYY-MM-DD', and directory is the place where
you want to put the restored data, usually '/' when run as root."
fi

The script is pretty self explicative. The only things you would care is the -w switch, which tells DAR to overwrite found files. This is necessary for differential backups. Oh, and place the script in the same directory where you put your backup files. Here's an usage example:

./recover.sh /usr/local/bin/dar 2003-10-01 /tmp/temp_path/

Try to run that as a normal user with a few of your backup files. You can put the result in a temporary directory, so the nice thing is you don't have to wipe your hard disk to test it.

Adding checks to the backup scripts

Denis Corbin suggests that the scripts creating the backups could verify the exit status of the DAR command. For the purpose of these very simple scripts this is not critical because DAR itself will bail out with an error message, and cron will report any output through mail (something which doesn't happen if everything goes right).

However, testing the exit status can be useful if you are testing the scripts interactively and want to know which commands are executed:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
if /usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
      -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
      -P mnt -P dev/pts -P proc -P floppy -P burner \
      -P cdrom -P var/backups > /dev/null ; then
   if /usr/local/bin/dar -t $FILE > /dev/null ; then
      echo "Archive created and successfully tested."
   else
      echo "Archive created but test FAILED."
   fi
else
   echo "Archive creating FAILED."
fi
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

You can test this version easily running the script and killing the DAR process from another terminal or console with killall dar. That will force the termination of the DAR process and you will see that one of the failure branches is reached in the backup script.

Another possible use of testing the status code could be to remove incomplete archives from the hard disk if something went wrong, trigger additional external commands when something fails, or avoid testing the created archive when you know that the first command already failed. The latter can be done easily concatenating both the creation and testing commands with && in a single line. That will tell the shell to run both commands as a sequence and avoid running the second if the first failed.

However, if a power failure happens in the middle of a backup, this version of the script would still leave dangling invalid archives. To prevent this you could enhance the script to do a positive verification. This means creating the backup in a temporary directory along with a *.valid file if the successful branch of the script is reached.

With this strategy, another cron script monitoring the directory where the temporary backups are placed would move to the final backup directory those archives which have a *.valid file, deleting all other whose last modification timestamp is older than one hour.

Ideas for the future

I'm not going to implement these soon, because I'm very lazy, but if you are one of those hyperactive hackers, here are some things which would be nice:

  • Unify both the main and differential scripts into a single one, so if the script is run and there is no main backup for the current month, the main backup will be created. Useful if your machine happens to be down during the time the monthly backup is done.

  • Upgrade the scripts to generate daily a CDROM image with cdrecord and burn it automatically to a rewritable disc placed in your machine. So if your whole hard disk is trashed, you still have the last backup on removable media. Of course, this is limited and cannot be automated if your backup spans more than one CDROM. Do the same for ZIP/JAZZ/whatever you have.

  • Integration of generated backups with a mini Knoppix bootable distribution. Or any other floppy distribution which can be booted from CDROM. So you have a recovery CDROM with tools to format your hard disk, and near it you have a fresh backup to restore a working machine.

  • Synchronisation of backup directories through Internet with remote hosts. Even if the whole machine is burnt physically along with your house, you have up to date backups somewhere else. Could be done easily with programs like rsync through ssh running in a cron job.

  • Factor common parameters into a separate file and include it from your scripts using DAR's -B switch. For instance:

    $ cat > /var/backups/system/common.dcf
    -m 256 -y -s 600M -D -R / -Z "*.gz" -Z "*.bz2" -Z "*.zip" \
    -Z "*.png" -P home/gradha -P tmp -P mnt -P dev/pts \
    -P proc -P floppy -P burner -P cdrom -P var/backups
    

    Later on you could use this in the script:

    DIR=/var/backups/system
    FILE=${DIR}/`/bin/date -I`_data
    # Commands
    /usr/local/bin/dar -B ${DIR}/common.dcf -c $FILE > /dev/null
    /usr/local/bin/dar -t $FILE > /dev/null
    /usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
    

    Which you can reuse in the differential version too!

In fact, clever people out there have already started making such scripts for themselves and are not afraid to share them. To avoid cluttering this mini-howto I'm going to store them as-is at my web page: http://gradha.sdf-eu.org/dar_scripts/.

Feel free to send me your own improvement and I'll add it to the directory. Whether you are sending a single script file or .tar.gz with a whole backup suite, please add a simple .txt file which I'll put near the file, so people can read what the file does before downloading. Please use English in your description, and don't forget to put your name and email so people can send you bugfixes or improvements!

The end

And that's the whole magic. If you have problems, something is unclear or wrong (which is worse), drop me an email. If you find this document useful and want to translate it, send me a translation of the file source.en.txt so I can distribute it along this version and users can find easily their localized version. Talking about locations, you should be able to get the source of this document from my personal home page (link at the beginning of the document).
Enjoy!
dar-2.4.8/doc/mini-howto/Makefile.in0000644000175000017520000003102512023053152014114 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = doc/mini-howto DIST_COMMON = README $(dist_pkgdata_DATA) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__installdirs = "$(DESTDIR)$(pkgdatadir)" DATA = $(dist_pkgdata_DATA) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ dist_pkgdata_DATA = dar-differential-backup-mini-howto.en.html dar-differential-backup-mini-howto.it.html dar-differential-backup-mini-howto.es.html index.html all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/mini-howto/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu doc/mini-howto/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-dist_pkgdataDATA: $(dist_pkgdata_DATA) @$(NORMAL_INSTALL) test -z "$(pkgdatadir)" || $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" @list='$(dist_pkgdata_DATA)'; test -n "$(pkgdatadir)" || list=; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgdatadir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgdatadir)" || exit $$?; \ done uninstall-dist_pkgdataDATA: @$(NORMAL_UNINSTALL) @list='$(dist_pkgdata_DATA)'; test -n "$(pkgdatadir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ test -n "$$files" || exit 0; \ echo " ( cd '$(DESTDIR)$(pkgdatadir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(pkgdatadir)" && rm -f $$files tags: TAGS TAGS: ctags: CTAGS CTAGS: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(pkgdatadir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dist_pkgdataDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-dist_pkgdataDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) uninstall-hook .MAKE: install-am install-data-am install-strip uninstall-am .PHONY: all all-am check check-am clean clean-generic clean-libtool \ distclean distclean-generic distclean-libtool distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-data-hook \ install-dist_pkgdataDATA install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am uninstall uninstall-am \ uninstall-dist_pkgdataDATA uninstall-hook install-data-hook: $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/mini-howto $(INSTALL) -m 0644 $(dist_pkgdata_DATA) $(DESTDIR)$(pkgdatadir)/mini-howto uninstall-hook: rm -rf $(DESTDIR)$(pkgdatadir)/mini-howto # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/doc/mini-howto/index.html0000644000175000017430000000351212003275573014057 00000000000000 Dar - Mini-Howto
Dar Documentation


Mini Howto



This Mini-Howto has been written by Grzegorz Adam Hankiewicz. For convenience you can find bellow a local copy of his work but can also fetch updated version from the official site.

This Mini-Howto has been translated in several languages:


dar-2.4.8/doc/mini-howto/dar-differential-backup-mini-howto.es.html0000644000175000017430000013754012010477201022110 00000000000000 DAR differential backup mini-howto -ES-

DAR differential backup mini-howto -ES-

Author: Grzegorz Adam Hankiewicz
Contact: gradha@titanium.sabren.com
Translator:Grzegorz Adam Hankiewicz
Date: 2006-10-07 19:22:40 +0200
Version: H5 (424)
Web site:http://gradha.sdf-eu.org/textos/backup.es.html
Copyright: Este documento está bajo dominio público.
Translations:De la página web puede obtener este documento en inglés, italiano y español.

Introducción

Todos deberíamos hacer copias de seguridad de nuestros datos importantes. Este consejo omnipresente es habitualmente ignorado por la mayoría de las personas. Yo lo ignoré también, hasta que perdí una buena cantidad de datos importantes. Insatisfecho, continué perdiendo datos en algunos incidentes posteriores, hasta que decidí que era bastante. Entonces busqué programas de copias de seguridad en Freshmeat que permitiesen hacer copias de seguridad diferenciales y encontré DAR.

Una copia de seguridad completa significa que todos los ficheros bajo su política de seguridad serán guardados. Una copia de seguridad diferencial o incremental, sólo contendrá aquellos ficheros cuyos contenidos han cambiado desde la copia de seguridad anterior, ya sea esta completa o diferencial.

DAR le permite crear de forma sencilla un conjunto de copias de seguridad diferenciales. El método que he desarrollado me ayuda a tener copias de seguridad automáticas que se ejecutan cada noche. El primer día del mes, se realiza una copia de seguridad completa. El resto del mes, sólo se realizan copias de seguridad diferenciales. En mi situación, muy pocos ficheros cambian de un día a otro, algunas veces el código fuente del proyecto en el que estoy trabajando, y siempre mis buzones de correo.

El resultado es que puedo recuperar el contenido de mi ordenador a un día específico con facilidad, en caso de necesitarlo. DAR es un programa de línea de comando, y puede hacerse ligeramente complejo con algunas opciones. Este pequeño mini-howto le explicará mi solución personal, que es muy cruda, pero me da buenos resultados. Si, he verificado que puedo recuperar datos de las copias de seguridad. De hecho, a finales del año 2003 me trasladé a otro país y solamente llevé conmigo un CD ROM con una Knoppix autoarrancable, y recuperé el estado exacto de mi instalación Debian en cuestión de horas. Sin personalizaciones, sin largas instalaciones, sin ficheros perdidos.

Este documento fue escrito usando la versión 1.3.0 de DAR. Cuando me actualicé a DAR 2.0.3, todo seguía funcionando, ni si quiera tuve que actualizar mis archivos de copias de seguridad. Así que parece que la interfaz y el formato de copias de seguridad son bastante estables, o al menos compatibles hacia atrás. No obstante, no confíe a ciegas en este documento. Verifique que la versión de DAR que tiene instalada funciona como espera y que puede recuperar una copia de seguridad generada antes de tener que depender de ella.

Esta versión del texto usa reStructuredText (para eso son las marcas extrañas en la versión en modo texto). Lea más sobre esto en http://docutils.sourceforge.net/.

Uso simple de DAR

DAR es muy similar a tar en el número de opciones que tiene: hay suficiente para cada necesidad, pero demasiadas para un novato. Como es habitual, siempre puede obtener ayuda del programa tecleando dar -h o man dar tras su instalación. Al igual que tar, hay un conjunto de parámetros obligatorios que definen el tipo de operación que va a realizar (crear, extraer, listar, etc), y un conjunto de parámetros que afectan la opción seleccionada. Simplemente por probar, imagínese que quiere realizar una copia de seguridad de su directorio home. Escribiría algo así:

dar -c fichero_sin_extension -g file1 -g file2 ... -g fileN

La salida debería ser similar a esto:

$ dar -c mi_copia -g safecopy.py/ -g translate_chars.py/


 --------------------------------------------
 15 inode(s) saved
 with 0 hard link(s) recorded
 0 inode(s) not saved (no file change)
 0 inode(s) failed to save (filesystem error)
 4 files(s) ignored (excluded by filters)
 0 files(s) recorded as deleted from reference backup
 --------------------------------------------
 Total number of file considered: 19
$ ls
mailbox_date_trimmer/  mi_copia.1.dar        sdb.py/
mailbox_reader/        safecopy.py/          translate_chars.py/

Tal y como se habrá dado cuenta, DAR añade un número y extensión a su nombre. El propósito de la extensión es claro, ayuda a saber visualmente que el fichero es una copia de seguridad de DAR. El número es un trozo, y está relacionada con la característica de DAR de repartir la copia de seguridad en varios dispositivos de almacenamiento. Si por ejemplo quisiese hacer una copia de seguridad en CD ROM, pero sus directorios son mayores que la capacidad de uno, puede decirle a DAR que reparta el archivo en tantos ficheros como sea necesario, que luego puede grabar en varios CD ROMs.

¿Quiere recuperar su copia de seguridad? Muy sencillo, teclee lo siguiente:

$ mkdir temp
$ cd temp
$ dar -x ../mi_copia
file ownership will not be restored as dar is not run as root.
to avoid this message use -O option [return = OK | esc = cancel]
Continuing...


 --------------------------------------------
 15 file(s) restored
 0 file(s) not restored (not saved in archive)
 0 file(s) ignored (excluded by filters)
 0 file(s) less recent than the one on filesystem
 0 file(s) failed to restore (filesystem error)
 0 file(s) deleted
 --------------------------------------------
 Total number of file considered: 15
$ ls
safecopy.py/  translate_chars.py/

La estrategia de copias de seguridad

El primer paso para crear una buena copia de seguridad es determinar qué partes de su sistema necesitan una. Esto no significa necesariamente que no puede crear una copia de seguridad completa, sólo que repartir la copia en al menos dos partes puede ayudar mucho a DAR (y cualquier otra herramienta de copias de seguridad).

Mi sistema en casa se compone de dos discos duros. El primero está partido en una partición de 3.8 GB donde vive mi sistema completo, y otra partición de 11 GB donde almaceno mi música y otros ficheros temporales, como un repositorio local de paquetes Debian que hago para mí mismo. El segundo disco duro tiene una partición de 9.4 GB cuyo único propósito es servir de copia de seguridad del disco primario. No tengo interés en realizar copias de seguridad de mi música, porque tengo todos los CDs originales y scripts para recomprimirlos en formato ogg.

De las 3.8 GB que quiero hacer copia de seguridad, normalmente entre 1.3 y 1.5 GB están vacías. Repartiré las 2.3 GB usadas a nivel lógico entre directorios de sistema y home (en el momento de escribir esto, mi home ocupa 588 MB). La razón de esta separación es que como usuario normal sólo puedo cambiar cosas en mi directorio home y otros ficheros de las particiones que no hago copias de seguridad. Mientras, la parte sistema de la partición es bastante estable y no se modifica porque (des)instalo software muy de vez en cuando. De hecho, de mi directorio home las únicas cosas que cambian normalmente son mis directorios Mail y projects, donde pongo este documento y otro software que escribo/hackeo.

La diferenciación básica entre directorios home y de sistema también puede ser útil en organizaciones. Si trabaja para una universidad, normalmente todas las máquinas tendrán la misma configuración de sistema, pero dependiendo de la máquina sus directorios home contendrán datos diferentes. Puede hacer un a copia de seguridad de sistema de una sola máquina, y copias de seguridad del home de cada máquina. Otra configuración común es tener un servidor central que exporta los directorios home por NFS. Aquí sólo tiene que hacer copia de seguridad del servidor. Si tiene usuarios con privilegios altos, déjeles la tarea de hacer una copia de seguridad de sistema de sus propias máquinas, el directorio home exportado es algo que pueden ignorar dado que será realizado en el servidor.

Una vez haya decidido qué quiere guardar en su copia de seguridad, debe decidir cómo configurar DAR. Puede usar parámetros o ficheros de configuración. Los parámetros están bien cuando no tiene muchas opciones. Los ficheros de configuración son mejores cuando quiere añadir complejas reglas de inclusión/exclusión de ficheros, y además, puede usar comentarios para documentar los parámetros, indicando por ejemplo la razón por la que incluye tal o cual directorio. Esto puede ser útil si vuelve dentro de unos meses y se pregunta qué hacen todas estas opciones.

Con mi configuración, ejecutaré comandos DAR desde scripts shell llamados periódicamente por cron (Configurando algunos scripts para automatizar el proceso), así que no me importa tener largas líneas de comando, y este mismo documento tiene doble propósito para documentar esos scripts. Si prefiere ficheros de configuración, lea la documentación de DAR para aprender su formato y cómo usarlos.

Copia de seguridad completa con DAR

Aquí está la línea de comando completa que usaré para mi copia de seguridad de sistema, ejecutada como root. No se preocupe por el gran número de parámetros, iré describiendo su propósito uno a uno:

dar -m 256 -y -s 600M -D -R / -c `date -I`_data -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom
  • -m 256

    DAR puede comprimir su copia de seguridad. La compresión se aplica a ficheros individuales, y puede ser perjudicial para pequeños ficheros. Por defecto los ficheros con 100 bytes o menos no serán comprimidos. Con el parámetro -m incremento este valor a 256, el cual parece funcionar mejor para esos pequeños ficheros de configuración que se almacenan en /etc/ y /home. Como puede ver, esta opción es completamente opcional, básicamente para fanáticos del ajuste como yo.

  • -y [nivel]

    Esta opción activa la compresión Bzip2 del archivo, que por defecto está desactivada. Incluso puede especificar un nivel numérico de compresión, que va de 0 (no compresión) hasta 9 (mejor compresión, procesado lento). Bzip2 por defecto usa 6, que es la mejor relación velocidad/compresión para la mayoría de los ficheros. Yo no uso nivel de compresión, el 6 me va bien.

  • -s 600M

    Aquí está la característica de DAR de trocear. El tamaño especificado de 600 Megabytes es el tamaño máximo de fichero que DAR creará. Si su copia de seguridad es mayor, obtendrá varios ficheros de copia de seguridad, cada uno con su número de trozo antes de la extensión del fichero, para que pueda salvar cada uno en una unidad diferente de almacenamiento (disquetes, zip, CDROM, etc). Mis copias de seguridad son mucho más pequeñas que este tamaño, y mantengo este parámetro sólo por si acaso se me ocurre crear un fichero grande en mi directorio home y olvido borrarlo. Si este parámetro le resulta útil, lea también en el manual de DAR sobre el parámetro -S.

  • -D

    Almacena directorios como directorios vacíos aquellos excluidos por la opción -P o aquellos ausentes en la línea de comando como parámetros. Esto es útil cuando recupera una copia de seguridad desde cero, para que no tenga que crear manualmente todos los directorios que fueron excluidos.

  • -R /

    Especifica el directorio raíz para salvar o recuperar ficheros. Por defecto esto apunta al directorio de trabajo actual. Estamos realizando una copia de seguridad de sistema, así que apuntará al directorio raíz.

  • -c `date -I`_data

    Este es uno de los parámetros obligatorios de los que hablé antes, y significa crear una copia de seguridad. Para aquellos que no entienden lo que sigue, `date -I` es la expansión de comillas de la shell de línea de comando. En pocas palabras, date -I proporcionará la fecha en formato AAAA-MM-DD. Con comillas y usado como parámetro, la salida del comando será usada como cadena del comando padre. De este modo puede crear copias de seguridad con la fecha de creación empotrada en el nombre. Si todavía no sabe de lo que hablo, intente ejecutar lo siguiente desde la línea de comando:

    echo "La fecha de hoy es `date -I`"
    
  • -Z patrón_fichero

    Usando las reglas normales de meta caracteres en ficheros puede especificar patrones de ficheros que quiere almacenar en la copia de seguridad sin compresión. Esto sólo tiene sentido si usa el parámetro -y. Comprimir ficheros comprimidos únicamente crea ficheros mayores y malgasta tiempo de la CPU.

  • -P ruta_relativa

    Con este parámetro le dice a DAR qué rutas no quiere almacenar en su copia de seguridad. Aquí posiblemente quiere poner el directorio home (soy el único usuario de la máquina, hay algunos más, pero con el propósito de pruebas/sistema), directorios de sistema que no son realmente ficheros físicos como proc, otras unidades que pueda tener montadas bajo mnt (destacando la unidad donde va a poner la copia de seguridad), etc, etc. Tenga en cuenta que las rutas que especifique aquí deben ser relativas a la ruta especificada por el parámetro -R.

Eso no fue tan difícil. En el manual de DAR puede leer sobre más parámetros que pueda querer usar. Y aquí está la linea de comando que ejecutaré como usuario dentro de mi directorio home:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_data \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer

Nada nuevo bajo el sol. Como puede ver, la mayoría de la línea de comando es idéntica a la anterior, únicamente cambio el nombre de los directorios que quiero excluir con -P y el directorio raíz con el parámetro -R.

Haciendo copias de seguridad diferenciales con DAR

Un vez tenga una copia de seguridad completa puede crear una copia de seguridad diferencial. La primera copia de seguridad diferencial debe ser realizada usando la copia de seguridad completa como referencia. Las siguientes copias de seguridad diferenciales usan la última copia de seguridad diferencial como referencia. Aquí está la línea de comando para una copia de seguridad diferencial de sistema:

dar -m 256 -y -s 600M -D -R / -c `date -I`_diff -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -A copia_previa
  • -c `date -I`_diff

    Sólo cambio el nombre del fichero, por razones cosméticas.

  • -A copia_previa

    Este nuevo parámetro se usa para decirle a DAR dónde puede encontrar la copia de seguridad anterior para que pueda crear una copia de seguridad diferencial en lugar de una completa. La única cosa con la que debe tener cuidado es no especificar ni trozo ni extensión en el nombre del fichero, de lo contrario DAR le realizará una pregunta interactiva en la línea de comando.

La línea de comando de usuario es exactamente igual. Aquí está:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_diff \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer -A copia_previa

DAR tiene otra buena característica que no usamos: catálogos. Cuando crea una copia de seguridad con DAR, internamente contiene todos los datos más un catálogo. Este catálogo contiene información sobre qué ficheros fueron guardados, sus fechas, su tamaño comprimido, etc. Puede extraer un catálogo y almacenarlo por separado. ¿Para qué querría hacer esto? Para configurar copias de seguridad diferenciales por red.

Para poder crear una copia de seguridad diferencial, necesita proporcionar a DAR la copia de seguridad previa para que pueda decidir qué ficheros han cambiado. Realizar esto puede consumir mucho ancho de banda en una red. En su lugar, tras crear la copia de seguridad, puede extraer el catálogo y enviarlo a la máquina que realiza las copias de seguridad. La siguiente vez, puede usar este fichero con el parámetro -A, y funcionará como si el fichero completo estuviese ahí.

Esto también puede ser útil si usa trozos, porque el catálogo se crea a partir del primer y último trozo. Es mucho más cómodo usar un solo fichero con el comando de copia de seguridad en lugar de tener que llevar consigo los discos de la copia de seguridad anterior.

Configurando algunos scripts para automatizar el proceso

Tal y como se mencionó anteriormente, es hora de configurar las copias de seguridad bajo cron. Ponga el siguiente script ejecutable para copias de seguridad de sistema bajo /root/dar_backup.sh:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner \
   -P cdrom -P var/backups > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Algunas cosas a destacar:

  • DIR es la variable que contiene el directorio destino.
  • FILE contendrá la ruta a la copia de seguridad del día.
  • Uso rutas completas para los comandos porque mi cuenta root no las tiene incluidas en el entorno por defecto. Esto es un riesgo de seguridad potencial. Idealmente querría compilar DAR como root y guardar los binarios donde los cree para que nadie pueda tocarlos. Y también ejecutar Tripwire sobre ellos.
  • DAR genera estadísticas tras cada ejecución. No las queremos en nuestro cron porque generarían emails innecesarios. Sólo stdout (la salida estándar) es redireccionada a /dev/null. Los errores serán mostrados y un email enviado si algo va mal.
  • Los últimos dos comandos find son opcionales. Los uso para cambiar el propietario a un usuario normal, quien creará posteriormente las copias de seguridad. De nuevo, otro riesgo de seguridad. El usuario root debería hacer copias de seguridad como root, y los usuarios deberían realizar sus propias copias. Pero en un sistema monousuario me da igual. Si algún intruso es lo suficientemente bueno para atravesar el cortafuegos y las palabras claves de mis cuentas de usuarios para poder leer las copias de seguridad, ya la he fastidiado.

Ahora ponga el siguiente script casi idéntico para copias de seguridad diferenciales en /root/dar_diff.sh:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp -P mnt \
   -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -P var/backups -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Los únicos dos cambios son la adición del parámetro -A y la generación de la variable PREV con una complicada línea de comando. Veamos qué es lo que hace esta línea de comando:

  • Primero el comando ls crea un listado de los ficheros con la extensión .dar en el directorio de copias de seguridad. La salida se pasa por una tubería al siguiente comando.
  • Por defecto ls muestra los ficheros en orden alfabético. Usamos tail para obtener el último fichero con el parámetro -n 1, el cual hace que sólo se muestre la última línea.
  • DAR quiere operar siempre con nombres de fichero sin número de trozo o extensión. Esto significa que si no nos deshacemos de éstas, DAR detendrá la operación para realizar una pregunta interactiva al usuario, fastidiando toda la automatización. Separamos el nombre completo del fichero con una característica de Bash llamada expansión de parámetros. Hay varios tipos de expansiones posibles, puede teclear man bash para verlas todas. Aquella que usa %% eliminará el patrón final más largo que coincida con lo que va tras %%. El resultado es el nombre base que queremos pasar a DAR.

Ahora sólo tenemos que poner estos dos scripts bajo cron. Esto es lo que tenemos que teclear tras crontab -e:

15 0 2-31 * * ./dar_diff.sh
15 0 1    * * ./dar_backup.sh

Puede informarse sobre la sintaxis con man -S 5 crontab. En pocas palabras, estas dos líneas le dicen a cron que ejecute los scripts 15 minutos tras medianoche. dar_backup.sh se ejecutará sólo el primer día del mes. El otro script se ejecutará el resto de los días.

Aquí están los scripts de copia de seguridad para sus usuarios. Son iguales, cambiando únicamente los parámetros del comando DAR y algunas rutas:

#!/bin/sh
# dar_backup.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

#!/bin/sh
# dar_diff.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.zip" \
   -P instalacion_manual -P Mail/mail_pa_leer \
   -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

No olvide añadir las entradas crontab requeridas por su usuario apuntando a la ruta adecuada.

Recuperando su copia de seguridad desde cero

Cuando llegue el momento de recuperar su copia de seguridad, dependiendo de lo que haya guardado tendrá una copia de seguridad completa del mes más copias de seguridad diferenciales hasta la última vez que las pudo realizar. El proceso de recuperación es muy simple, es el mismo descrito en el primer capítulo (Uso simple de DAR), sólo que debe hacerlo primero con la copia de seguridad completa, y entonces con las copias de seguridad diferenciales. Esto puede ser muy aburrido, así que aquí tiene otro script que puede guardar junto con sus ficheros de copia de seguridad:

#!/bin/sh

if [ -n "$3" ]; then
   CMD="$1"
   INPUT="$2_data"
   FS_ROOT="$3"
   $CMD -x "$INPUT" -w -R "$FS_ROOT"
   for file in ${INPUT:0:8}*_diff*; do
      $CMD -x "${file:0:15}" -w -R "$FS_ROOT"
   done
   echo "All done."
else
   echo "Not enough parameters.

Usage: script dar_location base_full_backup directory

Where dar_location is a path to a working dar binary, base_full_backup
is a date in the format 'YYYY-MM-DD', and directory is the place where
you want to put the restored data, usually '/' when run as root."
fi

Este script es auto explicativo. La única cosa por la que debe preocuparse es el parámetro -w, que le dice a DAR que sobreescriba los ficheros que encuentre. Esto es necesario para copias de seguridad diferenciales. Oh, y ponga el script en el mismo directorio que sus ficheros de copia de seguridad. Aquí tiene un ejemplo de uso:

./recover.sh /usr/local/bin/dar 2003-10-01 /tmp/temp_path/

Pruebe ejecutar eso como un usuario normal con algunos ficheros de copias de seguridad. Puede poner el resultado en un directorio temporal, así que lo bueno es que no necesita borrar su disco duro para probarlo.

Añadiendo verificaciones a los scripts

Denis Corbin sugiere que los scripts que crean las copias de seguridad podrían verificar el código de salida del comando DAR. Para el propósito de estos scripts tan simples esto no es crítico porque el propio DAR abortará la operación con un mensaje de error, y cron informará de cualquier salida de error por email (algo que no ocurre si todo va bien).

No obstante, verificar el código de salida puede ser útil si está probando los scripts de forma interactiva y quiere saber qué comandos están siendo ejecutados:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
if /usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
      -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
      -P mnt -P dev/pts -P proc -P floppy -P burner \
      -P cdrom -P var/backups > /dev/null ; then
   if /usr/local/bin/dar -t $FILE > /dev/null ; then
      echo "Archive created and successfully tested."
   else
      echo "Archive created but test FAILED."
   fi
else
   echo "Archive creating FAILED."
fi
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Puede probar esta versión fácilmente ejecutando el script y matando el proceso DAR desde otra terminal o consola con killall dar. Esto forzará la terminación del proceso DAR y verá que una de las ramas de error es alcanzada en el script.

Otro posible uso de la verificación del código de retorno del comando sería borrar archivos incompletos del disco duro si algo falla, ejecutar comandos externos adicionales si algo falla, o evitar verificar el archivo creado cuando sabe que el primer comando falló. Esto último se puede hacer fácilmente concatenando los comandos de creación y verificación con && en una sola línea. Esto le dice a la shell que ejecute ambos comandos como una secuencia para evitar ejecutar el segundo si el primero falla.

No obstante, si falla la corriente eléctrica durante una copia de seguridad, esta versión del script todavía dejaría a medio escribir archivos inválidos. Para prevenir esto podría mejorar el script para realizar una verificación positiva. Esto significa crear el fichero de copia de seguridad en un directorio temporal junto con un fichero *.valid si se alcanza la rama adecuada del script con éxito.

Continuando esta estrategia, otro script cron monitorizando el directorio donde se crean los ficheros temporales de copias de seguridad movería al directorio final aquellos archivos con un fichero *.valid correspondiente, borrando todos los demás cuya última fecha de modificación fuese mayor que una hora.

Ideas para el futuro

No voy a implementar estas pronto, porque soy muy vago, pero si usted es uno de esos hackers hiper activos, aquí tiene algunas cosas que estaría bien tener:

  • Unificar tanto el script principal como el diferencial en uno, por lo que si el script se ejecuta y no hay fichero de copia de seguridad principal para el mes actual, será creado, y de lo contrario se creará uno diferencia. Útil si su máquina está apagada por alguna razón durante el día del mes que realiza la copia de seguridad no diferencial.

  • Mejorar los scripts para generar una imagen CDROM diaria con cdrecord y grabarla automáticamente en un disco regrabable colocado en su máquina. Por lo que si su disco duro entero resulta dañado, todavía tiene la última copia de seguridad en un otro medio de almacenamiento. Por supuesto, esto es limitado y no puede ser automático si su copia de seguridad necesita más de un CDROM. Haga lo mismo para ZIP/JAZZ/loquesea.

  • Integrar las copias de seguridad generadas con una mini distribución Knoppix autoarrancable. O cualquier otra distribución basada en disquetes que puede arrancar desde CDROM. Así tendría un CDROM de rescate con las herramientas para formatear su disco duro, y justo al lado una copia de seguridad fresca con la cual restablecer su máquina a un estado funcional.

  • Sincronización de los directorios con copias de seguridad a través de Internet con máquinas remotas. Así, si su máquina acaba quemándose físicamente junto con su casa, todavía tiene copias de seguridad seguras en alguna otra parte. Podría hacerse de forma sencilla con programas como rsync funcionando por ssh como tarea del cron.

  • Extraer parámetros comunes en un fichero separado e incluirlo en sus scripts usando el parámetro -B de DAR. Por ejemplo:

    $ cat > /var/backups/system/common.dcf
    -m 256 -y -s 600M -D -R / -Z "*.gz" -Z "*.bz2" -Z "*.zip" \
    -Z "*.png" -P home/gradha -P tmp -P mnt -P dev/pts \
    -P proc -P floppy -P burner -P cdrom -P var/backups
    

    Más tarde puede usar esto en el script:

    DIR=/var/backups/system
    FILE=${DIR}/`/bin/date -I`_data
    # Commands
    /usr/local/bin/dar -B ${DIR}/common.dcf -c $FILE > /dev/null
    /usr/local/bin/dar -t $FILE > /dev/null
    /usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
    

    ¡Que también puede reusar en la versión diferencial!

De hecho, hay personas listas que han comenzado a hacer scripts de este estilo para sí mismas y no les asusta compartirlos. Para evitar engordar este mini-howto, voy a guardarlos tal y como son en mi página web: http://gradha.sdf-eu.org/dar_scripts/.

Sientase libre de enviarme sus propias mejoras y las añadiré al directorio. Ya sea un fichero único o un .tar.gz con una suite de copias de seguridad completa, por favor añada un fichero simple .txt que pondré al lado del fichero. Por favor use inglés en su descripción, ¡y no olvide poner su nombre y dirección de correo para que la gente pueda enviarle correcciones o mejoras!

El fin

Y esa es toda la magia. Si tiene problemas, algo no está claro o es incorrecto (lo cual es peor), mándeme un email. Si encuentra este documento útil y quiere traducirlo, mándeme una traducción del fichero source.en.txt para que pueda distribuirla junto con esta versión y otros usuarios puedan encontrar fácilmente su versión traducida. Hablando de localizar, debería ser capaz de obtener el código fuente de este documento de mi página personal (enlace al comienzo del documento).
¡Disfrute!
dar-2.4.8/doc/mini-howto/Makefile.am0000644000175000017430000000055612003275573014123 00000000000000dist_pkgdata_DATA = dar-differential-backup-mini-howto.en.html dar-differential-backup-mini-howto.it.html dar-differential-backup-mini-howto.es.html index.html install-data-hook: $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/mini-howto $(INSTALL) -m 0644 $(dist_pkgdata_DATA) $(DESTDIR)$(pkgdatadir)/mini-howto uninstall-hook: rm -rf $(DESTDIR)$(pkgdatadir)/mini-howto dar-2.4.8/doc/mini-howto/dar-differential-backup-mini-howto.it.html0000644000175000017430000013771612010477201022122 00000000000000 DAR differential backup mini-howto -IT-

DAR differential backup mini-howto -IT-

Author: Grzegorz Adam Hankiewicz
Contact: gradha@titanium.sabren.com
Translator:David Gervasoni
Contact: davidgerva@gmail.com
Date: 2006-10-07 19:22:40 +0200
Version: H5 (424)
Web site:http://gradha.sdf-eu.org/textos/backup.en.html
Copyright: This document has been placed in the public domain.
Translations:From the web site you can get this document in English, Italian and Spanish.

Introduzione

"Chiunque dovrebbe fare le copie di backup dei suoi dati importanti". Questo avviso presente ovunque è generalmente ignorato da molta gente. Anche io l'ho ignorato, fino al giorno in cui ho perso una considerevole mole di dati. Non abbastanza contento ho fatto in modo di perderne ancora in una serie di successivi incidenti, per poi decidere che ne avevo abbastanza. Ho cercato quindi su Freshmeat qualche programma per la creazione di backup che supportasse anche la creazione di backup differenziali e ho trovato DAR.

Fare un backup completo (o base) significa salvare tutti i files che ricadono sotto le cartelle interessate dalla politica di backup. Un backup differenziale o incrementale conterrà invece solo i files il cui contenuto è cambiato rispetto al precedente backup, fosse esso completo o differenziale.

DAR permette di creare facilmente una serie di backup differenziali. Una soluzione che ho sviluppato esegue ogni notte dei backup automatici. Il primo giorno del mese viene fatto un backup completo. Il resto del mese vengono fatti solo backup differenziali. Per quanto mi riguarda i files che cambiano giornalmente non sono molti: il codice sorgente del progetto a cui sto lavorando e, più spesso, le e-mail.

Così, quando mi serve, posso recuperare con facilità il contenuto che presentava il mio computer uno specifico giorno. DAR si presenta come un programma semplice ed essenziale eseguibile da linea di comando, ma si può rendere un po' più complicato con poche opzioni. Questo piccolo mini-howto vi illustrerà la mia specifica configurazione, molto grossolana, ma, nel mio caso, funzionale. Ho già sperimentato il recupero dei dati dalle copie di backup. Infatti verso la fine del 2003 mi sono trasferito in un altro paese e ho portato con me giusto un CD ROM e una Knoppix bootable e ho recuperato l'esatto stato della mia vecchia installazione Debian in poche ore. Senza modifiche, senza alcuna ulteriore installazione e senza perdere alcun file.

Questo documento è stato scritto usando la versione 1.3.0 di DAR. Quando sono passato alla 2.0.3 tutto funzionava. Non ho nemmeno dovuto aggiornare i miei backup. Quindi sembra che l'interfaccia e i formati di backup siano stabili o al limite compatibili con le versioni precedenti. Comunque non prendete tutto ciò che dico (scrivo) quì come garantito. Verificate prima che la versione di DAR che avete installato funzioni come dovrebbe e potrete, in futuro, recuperare i files dai backup senza problemi.

Per questa versione del testo ho usato reStructuredText (ecco spiegato il misterioso markup nella versione txt). Vedi http://docutils.sourceforge.net/ per maggiori informazioni.

Utilizzo essenziale di DAR

DAR è molto simile a tar nel numero di opzioni che ha: ce n'è una per ogni necessità, ma questo comporta una maggiore difficoltà iniziale per il nuovo utente. Come sempre, in qualsiasi momento, potete avere degli aiuti relativamente ai comandi disponibili scrivendo dar -h o man dar dopo che l'avete installato. Come nel programma tar, esistono una serie di opzioni obbligatorie che definiscono il tipo di operazione che intendete fare (creare, estrarre, listare etc) e un'ulteriore serie di opzioni che modificano la scelta prima effettuata. Giusto per esempio immaginate di voler fare un backup di una cartella della vostra directory /home. Dovrete scrivere qualcosa di simile a questo:

dar -c backup_file_without_extension -g file1 -g file2 ... -g fileN

L'output dovrebbe essere simile al seguente:

$ dar -c my_backup_file -g safecopy.py/ -g translate_chars.py/


 --------------------------------------------
 15 inode(s) saved
 with 0 hard link(s) recorded
 0 inode(s) not saved (no file change)
 0 inode(s) failed to save (fileystem error)
 4 file(s) ignored (excluded by filters)
 0 file(s) recorded as deleted from reference backup
 --------------------------------------------
 Total number of file considered: 19
$ ls
mailbox_date_trimmer/  my_backup_file.1.dar  sdb.py/
mailbox_reader/        safecopy.py/          translate_chars.py/

Come avrete notato DAR aggiunge al nome del file un numero e un'estensione. Il motivo dell'estensione è chiaro, aiutare a capire che il file è un backup fatto con DAR. Il numero è chiamato slice ed è connesso alla possibilità di DAR di dividere il file di backup in base a grandezze specificate, in modo da poterle memorizzare su diversi supporti. Se per esempio voleste avere i backup su CD ROM, ma i backup delle vostre directory sono più grandi della capacità del CD ROM, potete chiedere a DAR di dividere l'archivio in tanti files che potrete poi memorizzare su diverse unità.

Volete recuperare questo backup? Scrivete semplicemente i seguenti comandi:

$ mkdir temp
$ cd temp
$ dar -x ../my_backup_file
file ownership will not be restored as dar is not run as root.
to avoid this message use -O option [return = OK | esc = cancel]
Continuing...


 --------------------------------------------
 15 file(s) restored
 0 file(s) not restored (not saved in archive)
 0 file(s) ignored (excluded by filters)
 0 file(s) less recent than the one on fileystem
 0 file(s) failed to restore (fileystem error)
 0 file(s) deleted
 --------------------------------------------
 Total number of file considered: 15
$ ls
safecopy.py/  translate_chars.py/

La politica di backup

Il primo passo per creare backup funzionali è determinare quali parti del vostro sistema necessitano di essere archiviate. Questo non sta a significare che non potete semplicemente fare un backup del vostro intero sistema, ma dividerlo in almeno due parti aiuterà molto DAR (o qualsiasi altro tool di backup) nel suo lavoro.

Il sistema inplementato in casa mia conta di due hard disk. Il primo hard disk è diviso in una partizione da 3.8 GB, dove risiede il mio intero sistema, e un'altra partizione da 11 GB dove sono memorizzati tutta la mia musica e altri file temporanei, ad esempio alcuni pacchetti Debian fatti da me. Il secondo hard disk ha una partizione da 9.4 GB e il suo unico scopo è di servire come backup del disco primario. Non mi interessa fare il backup dei file musicali perchè ho tutti i cd originali e uno script per estrarre di nuovo le tracce e riconvertirle in ogg.

Della partizione da 3.8 GB di cui voglio fare il backup generalmente sono liberi all'incirca 1.3 - 1.5 Gb. Ho diviso "logicamente" i 2.3 GB occupati in system e home directories (mentre scrivo, la mia home è di 588 MB). La ragione di questa divisione è che, come un normale utente, posso esclusivamente modificare il contenuto della mia home directory e alcuni file della partizione di cui non ho intenzione di fare il backup. Contemporaneamente il settore della partizione in cui risiede il sistema rimane abbastanza stabile e immutato perchè raramente (dis)installo software. Infatti anche nella mia home directory le sole cose che cambiano sono abitualmente la mia cartella Mail e progetti, dove metto documenti come questo e altri software che scrivo/modifico.

La distinzione di base fra home directories e system può essere anche utile nella normale organizzazione. Se lavori per una università spesso tutte le macchine hanno la stessa configurazione di base, ma ogni macchina avrà i suoi dati memorizzati. Puoi fare un singolo system backup di una singola macchina e più home backup per ogni computer. Un'altra configurazione comune è l'esistenza di un server centrale che condivide le home directories in NFS. In questo modo dovete solo fare il backup del server. Se vi sono utenti con privilegi di alto livello permettete loro di fare il backup del sistema delle loro proprie macchine, il backup delle home lo possono ignorare visto che se ne occuperà il server.

come configurare DAR. Potete usare le opzioni o i file di configurazione. Le opzioni sono utili quando non ne avete troppe da specificare. I file di configurazione sono invece meglio quando volete fare backup differenti, complessi, con inclusioni/esclusioni; inoltre potete usare commenti per documentare le opzioni specificate spiegando per esempio perchè includete/escludete questa o quella directory. Può essere utile ciò se tornate ad utilizzare il computer dopo molto tempo e volete sapere il perchè di ogni opzione.

La mia configurazione fa partire il programma DAR con una script shell richiamato periodicamente da cron (Qualche script per automatizzare i processi), così non devo digitare ogni volta lunghe stringhe di comando. Questo breve documento vuole anche introdurre brevemente alla creazione di tali scripts. Se preferite utilizzare i file di configurazione leggete la documentazione allegata a DAR per sapere come e quale sintassi utilizzare.

Eseguire backup di base (full backup) con DAR

Ecco qua sotto l'intera linea di comando che, da root, devo utilizzare per il backup del mio sistema. Non dovete preoccuparvi vedendo il gran numero di opzioni inserite, successivamente descriverò il motivo di ognuna di esse:

dar -m 256 -y -s 600M -D -R / -c `date -I`_data -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom
  • -m 256

    DAR può comprimere i backup. La compressione è applicata a ogni file e può essere anche inutile per file di ridotte dimensioni. Di default, file di 100 bytes o meno non vengono compressi. Con l'opzione -m si porta questo limite a 256, cosa che sembra funzionare meglio per tutti quei piccoli file di configurazione che stanno sotto /etc/ e /home. Come potete notare questa è un'opzione assolutamente facoltativa, quasi un "capriccio".

  • -y [level]

    Questa opzione attiva la compressione Bzip2 che di default non è attiva. Potete anche specificare un livello di compressione tramite un numero che può andare da 0 (nessuna compressione, processo veloce) a 9 (miglior compressione, processo lento). Bzip2 di default usa il livello 6 che è il rapporto migliore velocità/compressione per la maggior parte dei file. Personalmente non specifico il livello di compressione, 6 mi va più che bene.

  • -s 600M

    Ecco quà l'opzione di DAR che vi permette di definire la dimensione dei file di backup o, meglio, delle slice. La grandezza specificata, in questo caso di 600 MB, sarà il massimo spazio occupato dai file creati. Se il vostro backup è più grande, ritroverete differenti file di backup con un numero di progressione inserito appena prima dell'estensione, cosìcchè potrete salvare ogni file su differenti supporti (floppies, zip, CDROM, etc). I miei backup sono molto più piccoli di questa dimensione e mantengo questa opzione giusto per tranquillità, nel caso i file diventassero più grandi. Se pensate che questa opzione possa esservi utile potete leggere il manuale di dar per saperne di più.

  • -D

    Memorizza il nome e il percorso delle directory escluse dall'opzione -P o che non ci sono fra quelle specificate alla linea di comando. Questa è un'opzione utile quando state recuperando un backup dal nulla; in questo modo non dovete creare manualmente tutte le directory escluse.

  • -R /

    Specifica la directory di root (directory radice) in cui salvare o dalla quale 'leggere' i file interessati dal backup. Di default questa è la directory in cui si sta lavorando (./). Se stiamo facendo un backup di sistema dalla cartella x, ecco che questa sarà la directory di root.

  • -c `date -I`_data

    Questa è l'opzione obbligatoria di cui vi ho parlato prima e definisce la creazione del backup. Per chi non capisce ciò che segue `date -I` è un trucchetto della shell. Brevemente, date -I restituisce una data con formato YYYY-MM-DD. L'output del comando fra gli apici singoli sarà usato come input dell'opzione -c. In questo modo potete creare backup con la data di creazione direttamente nel nome del file. Se ancora non capite di cosa sto parlando, provate la seguente istruzione dalla linea di comando:

    echo "La data di oggi è `date -I`"
    
  • -Z file_pattern

    Usando come argomento normali estensioni di file potete decidere quali file volete memorizzare nel vostro backup senza che siano compressi. Questo ha senso solo se usate anche l'opzione -y. Comprimendo file compressi otterrete al massimo file più grandi, nonchè spreco di risorse e occupazione della CPU.

  • -P relative_path

    Con questa opzione dite a DAR quali directory non volete memorizzare nel vostro backup. Quì potreste mettere ad esempio la /home (Sono l'unico utilizzatore di questa macchina, ce ne sono pochi altri, ma solo per testare alcune funzioni), directory di sistema che non sono realmente dei file, come proc, altri file che potreste aver montati sotto mnt (come, ovviamente, il drive in cui metterete i file di backup) etc, etc. Notate che i percorsi che inserite devono essere relativi a quello specificato con l'opzione -R.

Tutto ciò non è poi così difficile. Controllate le pagine di manuale di DAR per maggiori informazioni sulle opzioni che vi interessa usare. Ed ecco quì il comando che uso all'interno della mia home:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_data \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer

Nulla di nuovo sotto il sole. Come potete vedere molti dei comandi sono identici a quelli 'di cui sopra', ho solo cambiato il nome delle directories che voglio escludere utilizzando l'opzione -P e la directory radice con l'opzione -R.

Eseguire backup differenziali con DAR

Una volta che avete creato un backup base, potete creare quelli differenziali. Il primo backup differenziale deve essere creato usando quello di base come riferimento. I backup differenziali successivi useranno come riferimento l'ultimo backup differenziale disponibile. Ecco quì il comando per un backup differenziale del sistema:

dar -m 256 -y -s 600M -D -R / -c `date -I`_diff -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -A previous_backup
  • -c `date -I`_diff

    Ho solo cambiato il nome del file, per un motivo... "pratico".

  • -A previous_backup

    Questa nuova opzione viene usata per dire a DAR dove trova il file di backup precedente in modo da creare un backup differenziale invece di uno base. L'unica cosa alla quale fare attenzione è che voi non dovete specificare nè il numero progressivo nè l'estensione, diversamente DAR porrebbe una richiesta alla linea di comando.

La linea di comando dell'utente è esattamente la stessa. Ecco quà per completezza:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_diff \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer -A previous_backup

DAR ha un'altra interessante caratteristica che quì non usiamo: i cataloghi. Quando create un backup con DAR questo contiene i dati e un catalogo. Questo catalogo contiene informazioni inerenti i file che sono stati salvati: la loro data, la loro dimensione dopo la compressione, etc. Potete estrarre il catalogo e memorizzarlo separatamente. Perchè dovreste farlo? Per implementare backup differenziali in rete, ad esempio.

Al fine di creare un backup differenziale dovete procurare a DAR il backup precedente in modo che il programma possa decidere quali file sono stati modificati e quali no. Facendo questo lavoro su di una rete ciò può occupare molta banda. Invece, dopo aver creato il backup, potete estrarre il catalogo e inviarlo alla macchina designata alla creazione dei backup. Successivamente potete usare questo file con l'opzione -A, in questo modo DAR lavorerà come se il file del backup base fosse quello.

Questo può essere anche utile se usate le slices perchè il catalogo è creato per la prima e l'ultima slice. E' più semplice passare al comando un singolo file piuttosto che dover utilizzare tutti i dischi del vostro precedente backup.

Qualche script per automatizzare i processi

Come ho detto prima è venuto il momento di mettere la nostra procedura di backup sotto cron. Mettendo il seguente script eseguibile per il backup del sistema sotto /root/dar_backup.sh:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner \
   -P cdrom -P var/backups > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Alcune cose da notare:

  • DIR è la variabile che rappresenta la directory di destinazione.
  • FILE rappresenta il percorso del file di backup di oggi.
  • Uso percorsi assoluti nei comandi perchè il mio account di root non li ha tutti inclusi nell'ambiente di default. Questo è potenzialmente un rischio in ambito di sicurezza. Idealmente dovreste compilare DAR come root e mantenere i binari dove li avete creati, così nessuno potrà toccarli o eseguirvi Tripwire.
  • DAR genera statistiche dopo ogni esecuzione. A noi non servono se eseguite in cron perchè produrrebbero solo mail inutili. Lo stdout è rediretto a /dev/null. Gli errori saranno invece riportati in una mail nel caso qualcosa andasse storto.
  • Gli ultimi due comandi find sono opzionali. Li uso per cambiare i permessi dei file per un normale utente che creerà successivamente i backup. Un ulteriore rischio in fatto di sicurezza. Root dovrebbe eseguire il backup dei file da root e gli utenti i loro. Ma con un sistema mono-user questo non è importante. Se un ipotetico intruso è capace di passare attraverso il mio firewall, inserire la mia password e quindi guardare tutti i miei backup: sono fregato.

Ora ponete il seguente script per i backup differenziali, quasi identico al precedente, sotto /root/dar_diff.sh:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp -P mnt \
   -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -P var/backups -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Gli unici due cambiamenti sono le aggiunte dell'opzione -A e la generazione della variabile PREV con una linea di comando un po' complicata. Vediamo cosa fa questa linea di comando:

  • Prima di tutto, il comando ls crea una lista dei file con estensione .dar presenti nella directory di backup; euesto output è rediretto al comando successivo.
  • Di default ls elenca i file in ordine alfabetico. tail è usato per ottenere l'ultimo file con l'opzione -n 1 che ordina di mostrare solo l'ultima riga.
  • DAR necessita di lavorare con filenames senza il numero di slice e senza estensione. Se non correggiamo noi il nome del file, DAR fermerà il processo e chiederà all'utente se effettuare l'operazione in modo automatico o meno. Separiamo quindi il nome del file con una feature Bash, chiamata parametro d'espansione. Ci sono diverse possibili espansioni, potete digitare man bash per vederle tutte. Usando %% rimuoviamo la più lunga "coda" di caratteri che si trova dopo il %%. Il risultato è il nome base che vogliamo passare a DAR.

Ora dobbiamo solo mettere questi due script sotto il controllo di cron. Questo è ciò che dobbiamo scrivere dopo il comando crontab -e:

15 0 2-31 * * ./dar_diff.sh
15 0 1    * * ./dar_backup.sh

Controllate in man -S 5 crontab la sintassi del comando. In breve queste due linee dicono a cron di far partire i processi 15 minuti dopo la mezzanotte. dar_backup.sh verrà eseguito solo il primo giorno del mese. L'altro script verrà eseguito tutti gli altri giorni.

Ecco quì gli scripts di backup per i vostri utenti. Essi sono identici, cambiano solo alcune opzioni di DAR e i percorsi:

#!/bin/sh
# dar_backup.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

#!/bin/sh
# dar_diff.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.zip" \
   -P instalacion_manual -P Mail/mail_pa_leer \
   -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

Non dimenticate di aggiungere a crontab le stringhe richieste per i votri utenti.

Estrarre i backup su macchine vuote

Venuto il momento di recuperare i vostri backup, in base a quello che avete salvato, avrete il backup completo del mese e tanti backup differenziali quanti quelli che avete fatto. Il processo di recupero dei dati è molto semplice: è uguale a quello descritto nel primo paragrafo (Utilizzo essenziale di DAR), l'importante è che prima recuperiate il backup base e solo successivamente quelli differenziali. Questo può essere noioso, così ecco quà un'altro script che potete salvare fra i vostri file di backup:

#!/bin/sh

if [ -n "$3" ]; then
   CMD="$1"
   INPUT="$2_data"
   FS_ROOT="$3"
   $CMD -x "$INPUT" -w -R "$FS_ROOT"
   for file in ${INPUT:0:8}*_diff*; do
      $CMD -x "${file:0:15}" -w -R "$FS_ROOT"
   done
   echo "All done."
else
   echo "Not enough parameters.

Usa: script dar_location base_full_backup directory

Dove dar_location è un percorso alla directory con i binari di dar,
base_full_backup è una data in formato 'YYYY-MM-DD' e directory è
il posto dove volete mettere i file recuperati, solitamente '/'
quando eseguito come root."
fi

Lo script si spiega da solo. L'unica cosa alla quale dovete fare attenzione è l'opzione -w che dice a DAR di sovrascrivere i file trovati. Questo è obbligatorio per i backup differenziali. Ricordate di mettere lo script nella stessa directory dove mettete i file di backup. Ecco un'utilizzo di esempio:

./recover.sh /usr/local/bin/dar 2003-10-01 /tmp/temp_path/

Provate ad utilizzare questo come utente normale con pochi file di backup. Potete mettere i file recuperati in una directory temporanea, così non dovete svuotare il vostro hard disk per provarlo.

Aggiungere dei controlli allo script di backup

Denis Corbin suggerisce che lo script di creazione dei backup verifichi anche l'exit status dei comandi di DAR. Per quanto riguarda questo script così semplice, ciò non è di importanza critica perchè DAR stesso stamperebbe a schermo un messaggio d'errore e cron lo riporterebbe via mail (cosa che normalmente non succede se tutto va per il verso giusto)

Comunque testare l'exit status può essere utile se state verificando il funzionamento dello script e volete sapere quali comandi sono eseguiti:

#!/bin/sh

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
if /usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
      -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
      -P mnt -P dev/pts -P proc -P floppy -P burner \
      -P cdrom -P var/backups > /dev/null ; then
   if /usr/local/bin/dar -t $FILE > /dev/null ; then
      echo "Archive created and successfully tested."
   else
      echo "Archive created but test FAILED."
   fi
else
   echo "Archive creating FAILED."
fi
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Potete testare facilmente questa versione facendo partire lo script e killando i processi di DAR manualmente da un'altro terminale o un'altra console con killall dar, che forzerà la fine dei processi DAR e vedrete che uno dei rami di fallimento sarà raggiunto nello script di backup.

Un'ulteriore possibile utilizzo per testare il codice può essere la rimozione di archivi incompleti dall'hard disk se qualcosa andasse male o evitare di testare l'archivio creato quando sapete che il primo comando è già fallito. Successivamente si possono facilmente concatenare i comandi di creazione e di test con && in una singola linea di testo. Ciò indica alla shell di eseguire entrambi i comandi in sequenza e impedisce l'esecuzione del secondo se il primo è fallito.

una procedura di backup, questa versione dello script lascierà archivi errati vaganti. Per prevenire ciò potete fare in modo che lo script esegua una positive verification. Ciò creerà il backup in una directory temporanea insieme con un file *.valid.

Così un'altro script monitora la directory dove i file temporanei sono messi e sposta in una directory definitiva i file con *.valid eliminando quelli la cui ultima modifica è precedente a un'ora.

Idee per il futuro

Non ho programmato di aggiornare questo testo presto perchè sono molto pigro, ma se voi siete fra quegli hackers imperattivi, ecco quà qualcosa che mi piacerebbe inserire:

  • Unificare gli script dei backup di base e differenziali in uno unico, cosicchè se all'esecuzione dello script non esistono backup base per il mese corrente questo venga reato. Utile per macchine che rimangono spente molto tempo dopo che il backup mensile è stato fatto.

  • Aggiornare lo script in modo che crei giornalmente un immagine per CD ROM con cdrecord e la masterizzi automaticamente su un cd riscrivibile presente nel drive. Così nel caso l'intero hard disk si guasti sarebbe disponibile l'ultimo backup su un media rimovibile. Certo la cosa è limitata e non può essere automatica nel caso i backup occupino più spazio di un CDROM. La stessa cosa vale per ZIP/JAZZ/qualsiasi cosa vogliate.

  • Integrazione dei backup generati con una mini Knoppix bootable o qualsiasi altra ditribuzione che possa essere avviata da CDROM. Così avreste un CDROM per recuperare i dati che può partire automaticamente e formattare il vostro hard disk.

  • Sincronizzazione delle directory di backup attraverso internet con hosts remoti. In questo modo se l'intera macchina è bruciata fisicamente, ad esempio con la vostra casa, voi avete i vostri backup in qualche altro posto. Potrebbe essere fatto facilmente con programmi come rsync attraverso ssh eseguiti tramite cron.

  • Inserimento dei parametri comuni in un file separato da inculdere dallo script utilizzando l'opzione di DAR, -B. Per esempio:

    $ cat > /var/backups/system/common.dcf
    -m 256 -y -s 600M -D -R / -Z "*.gz" -Z "*.bz2" -Z "*.zip" \
    -Z "*.png" -P home/gradha -P tmp -P mnt -P dev/pts \
    -P proc -P floppy -P burner -P cdrom -P var/backups
    

    Successivamente si può utilizzare questo nello script:

    DIR=/var/backups/system
    FILE=${DIR}/`/bin/date -I`_data
    # Commands
    /usr/local/bin/dar -B ${DIR}/common.dcf -c $FILE > /dev/null
    /usr/local/bin/dar -t $FILE > /dev/null
    /usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
    

    Che può essere riutilizzato anche nella versione differenziale!

In effetti, qualcuno ha già iniziato a creare qualche script a proprio uso e consumo e non ha problemi a condividerli. Per evitare di "disordinare" questo mini-howto ho intenzione di archiviarli come sono nel mio spazio web: http://gradha.sdf-eu.org/dar_scripts/.

Sentitevi liberi di inviare i vostri lavori e i vostri aggiornamenti e li aggiungerò alla directory. Se avete intenzione di inviare un singolo file di script o un .tar.gz con una intera suite di backup, inserite un semplice file .txt descrittivo che metterò assieme agli altri files, così la gente potrà leggere cosa sono e cosa fanno i files prima di scaricarli. Usate l'inglese nella vostra descrizione e non dimenticate di mettere nome e e-mail così la gente potrà inviarvi bugfixes o miglioramenti.

The end

And that's the whole magic. Se avete qualche problema, qualcosa non è chiaro o sbagliato (il che è peggio) inviatemi un'e-mail. Se trovi questo documento utile e lo vuoi tradurre inviami una traduzione del file source.en.txt così posso distribuirla assieme a questa versione e gli utenti troveranno più semplice la versione nella loro lingua. Dovreste raggiungere facilmente il codice di questo dodumento alla mia home page (link at the beginning of the document).
Enjoy!

Per finire

Versione un po' corretta, con un italiano un po' più scorrevole. Ecco lo scopo che mi ero prefissato per questa revisione. Non so se l'obbiettivo è stato raggiunto, ma non immaginavo che tradurre dall'inglese fosse così difficile. Termini che initaliano non sai come rendere, plurali di nomi inglesi che in italiano rimangono singolari, modi di dire che, una volta tradotti letteralmente, non ti escono più dalla testa. Spero, comunque, che riusciate a capire in modo più agevole questa correzione rimanendo, come sempre, a disposizione. David (link at the beginning of the document)
dar-2.4.8/doc/mini-howto/README0000644000175000017430000000044312003275573012742 00000000000000 Mini Howto What you will find here is a mini howto compiled by Grzegorz Adam Hankiewicz. David Gervasoni has made a italian translation. An up to date version of these howto can be find on the web site as well as pdf formated versions. http://gradha.sdf-eu.org/textos/backup.en.html dar-2.4.8/doc/authentification.html0000644000175000017430000000356412003275573014223 00000000000000 DAR - Disk ARchive - Authentification page

Dar Documentation


DAR's Authentication




PGP/ GnuPG key


All Released packages are signed with the following PGP public key. The signature does only prove I have personally released the sources or binary package. This means there is no malicious code inside the signed packages (If you trust me, of course).

Fingerprint is: 3D7F 383C B41E 33D7 0250 A9AC A42E 4223 C818 1A52




dar-2.4.8/doc/FAQ.html0000644000175000017430000035621712010477201011270 00000000000000 DAR - Frequently Asked Questions
Dar Documentation


DAR's - Frequently Asked Questions


Questions:

I restore/save all files but dar reported some files have been ignored, what are those ignored files?
Dar hangs when using it with pipes, why?
Why, when I restore 1 file, dar report 3 files have been restored?
While compiling dar I get the following message : " g++: /lib/libattr.a: No such file or directory", what can I do?
I cannot find the binary package for my distro, where to look for?
Can I use different filters between a full backup and a differential backup? Would not dar consider some file not included in the filter to be deleted?
Once in action dar makes all the system slower and slower, then it stops with the message "killed"! How to overcome this problem?
I have a backup I want to change the size of slices?
I have a backup in one slice, how can I split it in several slices?
I have a backup in several slice, how can I stick all them in a single file?
I have a backup, how can I change its encryption scheme?
I have a backup, how can I change its compression algorithm?
Which options can I use with which options?
Why dar reports corruption for the archive I have transfered with FTP?
Why DAR does save UID/GID instead of plain usernames and usergroups?
Dar_Manager does not accept encrypted archives, how to workaround this?
How to overcome the lack of static linking on MacOS X?
Why cannot I test, extract file, list the contents of a given slice from an archive?
Why cannot I merge two isolated catalogues?
Why cannot dar use the full power of my multi-processor computer?
Is libdar thread-safe, which way do you mean it is?
How to solve "configure: error: Cannot find size_t type"?
Why dar became much slower since release 2.4.0 ?
How to search for questions (and their answers) about known problems similar to mines?
Why dar tells me that he failed to open a directory, while I have excluded this directory?
Dar reports a "SECURITY WARNING! SUSPICIOUS FILE" what does that mean!?
Can dar help copy a large directory tree?


Answers:

I restore/save all files but dar reported some files have been ignored, what are those ignored files?
When restoring/saving, all files are considered by default. But if you specify some files to restore or save, all other files are "ignored", this is the case when using -P -X -I or -g.

Dar hangs when using it with pipes, why?
Dar can produce archive on its standard output, if you give '-' as basename. But it cannot read an archive from its standard input in direct access mode. To feed an archive to dar through pipes, you need dar_slave and two pipes or use the sequential mode (--sequential-mode option, which is very slow compared to the default direct access mode). To use dar with dar_slave over pipes in direct access mode (which is the  more efficient way to proceed), see the detailed notes or more precisely dar and ssh note.

Why, when I restore 1 file, dar report 3 files have been restored?
if you restore for example the file usr/bin/emacs dar will first restore usr (if the directory already exists, it will get its date and ownership restored, all existing files will be preserved), then /usr/bin will be restored, and last usr/bin/emacs will be restored. Thus 3 inodes have been restored or modified while only one file has been asked for restoration.

While compiling dar I get the following message : " g++: /lib/libattr.a: No such file or directory", what can I do?
The problem comes from an incoherence in your distro (Redhat and Slackware seem(ed) concerned at least): Dar (Libtool) finds /usr/lib/gcc-lib/i386-redhat-linux/3.3.3/../../../libattr.la  file to link with. This file defines where is located libattr static and dynamic libraries but in this file both static and dynamic libraries are expected to be found under /lib. While the dynamic libattr is there,  the static version has been moved to /usr/lib. A workaround is to make a symbolic link:

ln -s /usr/lib/libattr.a /lib/libattr.a


I cannot find the binary package for my distro, where to look for?
For any binary package, ask your distro maintainer to include dar (if not already done), and check on the web site of your preferred distro for a dar package

Can I use different filters between a full backup and a differential backup? Would not dar consider some file not included in the filter to be deleted?
Yes, you can. No, there is no risk to have dar deleting the files that were not selected for the differential backup. Here is the way dar works:

During a backup process, when a file is ignored due to filter exclusion, an "ignored" entry is added to the catalogue. At the end of the backup, dar compares both catalogues, the one of reference and the new one built during the backup process, and adds a "detruit" (destroyed in English) entry, when an entry of the reference is not present in the new catalogue. Thus, if an "ignored" is present no "detruit" will be added for that name. Then all "ignored" entries are removed and the catalogue is dumped in the archive.


Once in action dar makes all the system slower and slower, then it stops with the message "killed"! How to overcome this problem?
Dar needs virtual memory to work. Virtual memory is the RAM + SWAP space. Dar memory requirement grows with the amount of file saved, not with the amount of data saved. If you have a few huge files you will have little chance to see any memory limitation problem. At the opposite, saving a plethora of files (either big or small), will make dar request a lot of virtual memory. Dar needs this memory to build the catalogue (the contents) of the archive it creates. Same thing, for differential backup, except it also needs to load in memory the catalogue of the archive of reference, which most of the time will make dar using twice more memory when doing a differential backup than a full backup.

Anyway, the solution is:
  1. Read the limitatons file to understand the problem and be aware of the limitations you will bring at step 3, bellow.
  2. If you can, add swap space to your system (under Linux, you can either add a swap partition or a swap file, which is less constraining but also a bit less efficient). Bob Barry provided a script that can give you a raw estimation of the required virtual memory (doc/samples/dar_rqck.bash).
  3. If this is not enough, or if you don't want/can add swap space, recompile dar giving --enable-mode=64 argument to the configure script.
  4. If this not enough, and you have some money, you can add some RAM on you system
  5. If all that fails, ask for support on the dar-support mailing-list.
There is still a workaround which is to make several smaller archives of the files to backup. For example, make a backup for all in /usr/local another for all in /var and so on. These backup can be full or differential. The drawback is not big as you can store these archive side by side and use them at will. Moreover, you can feed a unique dar_manager database with all these different archives. This which will hide you the fact that there are several full archives and several differential archives concerning different set of files.


I have a backup I want to change the size of slices?
dar_xform is your friend!

dar_xform -s <size> original_archive new_archive

dar_xform will create a new archive with the slices of the requested size, (you can also make use of -S option for the first slice). Note that you don't need to decrypt the archive, not dar will uncompress it, this is thus a very fast processing. See dar_xform man page for more.


I have a backup in one slice, how can I split it in several slices?
dar_xform is your friend!

dar_xform -s <size> original_archive new_archive

see above for more.

I have a backup in several slice, how can I stick all them in a single file?
dar_xform is your friend!

dar_xform original_archive new_archive

dar_xform without -s option creates a single sliced archive. See dar_xform man page for more.


I have a backup, how can I change its encryption scheme?
The merging feature let you do that. The merging has two roles, putting in one archive the contents of two archives, and at the same time filtering file contents to not copy certain files in the resulting archive. The merging feature can take two but also only one archive as input, so we will use it in that special way here:
  • a single input (our original archive)
  • no file filtering (so we keep all the files)
  • Keep files compressed (no decompression/re compression) to speed up the process
dar -+ new_archive -A original_archive -K "<new_algo>:new pass" -ak

If the original archive was not in clear you need to add the -J option to provide the encryption key, and if you don't want to have password in clear on the command line (command that can be seen with top or ps by other users), simply provide "<algo>:" then dar will ask you on the fly the password, if using blowfish you can then just provide ":" for the keys:

dar -+ new_archive -A original_archive -K ":" -J ":" -ak

Note that you can also change slicing of the archive at the same time thanks to -s and -S options:

dar -+ new_archive -A original_archive -K ":" -J ":" -ak -s 1G

I have a backup, how can I change its compression algorithm?
Same thing as above : we will use the merging feature :

to use bzip2 compression:

dar -+ new_archive -A original_archive -zbzip2

to use gzip compression

dar -+ new_archive -A original_archive -zgzip

to use lzo compression

dar -+ new_archive -A original_archive -zlzo

to use no compression at all:

dar -+ new_archive -A original_archive

Note that you can also change encryption scheme and slicing at the same time you change compression:

dar -+ new_archive -A original_archive -zbzip2 -K ":" -J ":" -s 1G

Which options can I use with which options?
DAR provides seven commands:

-c   to create a new archive
-x   to extract files from a given archive
-l    to list the contents of a given archive
-d   to compare the contents of an archive with filesystem
-t    to test the coherence of a given archive
-C  to isolate an archive (extract its contents to a usually small file)
-+   to merge two archives in one or create a sub archive from one or two other archives

Follow for each command the available options (those marked OK):


short option
long  option
-c
-x
-l
-d
-t
-C
-+
-v
--verbose
OK
OK
OK
OK
OK
OK
OK
-vs
--verbose=s
OK
OK
--
OK
OK
-- OK
-b
--beep
OK
OK
OK
OK
OK
OK
OK
-n
--no-overwrite
OK
OK
-- -- -- OK
OK
-w
--no-warn
OK OK -- -- -- OK OK
-wa
--no-warn=all
-- OK -- -- -- -- --
-A
--ref
OK OK --
OK OK OK OK
-R
--fs-root
OK OK -- OK -- -- --
-X
--exclude
OK OK OK OK OK -- OK
-I
--include
OK OK OK OK OK -- OK
-P
--prune
OK OK OK OK OK -- OK
-g
--go-into
OK OK OK
OK OK -- OK
-]
--exclude-from-file
OK OK OK
OK OK -- OK
-[
--include-from-file
OK OK OK
OK OK -- OK
-u
--exclude-ea
OK OK -- -- -- -- OK
-U
--include-ea
OK OK -- -- -- -- OK
-i
--input
OK OK OK OK OK OK OK
-o
--output
OK OK OK OK OK OK OK
-O
--comparison-field
OK OK -- OK -- -- --
-H
--hour
OK OK -- -- -- -- --
-E
--execute
OK OK OK OK OK OK OK
-F
--ref-execute
OK -- -- -- -- OK OK
-K
--key
OK OK OK OK OK OK OK
-J
--ref-key
OK -- -- -- -- OK OK
-#
--crypto-block
OK OK OK OK OK OK OK
-*
--ref-crypto-block
OK -- -- -- -- OK OK
-B
--batch
OK OK OK OK OK OK OK
-N
--noconf
OK OK OK OK OK OK OK
-e
--empty
OK -- -- -- -- OK OK
-aSI
--alter=SI
OK OK OK OK OK OK OK
-abinary
--alter=binary
OK OK OK OK OK OK OK
-Q

OK OK OK OK OK OK OK
-aa
--alter=atime
OK -- -- OK -- -- --
-ac
--alter=ctime
OK -- -- OK -- -- --
-am
--alter=mask
OK OK OK OK OK OK OK
-an
--alter=no-case
OK OK OK OK OK OK OK
-acase
--alter=case
OK OK OK OK OK OK OK
-ar
--alter=regex
OK
OK
OK
OK
OK
OK
OK
-ag
--alter=glob
OK
OK
OK
OK
OK
OK
OK
-j
--jog
OK OK OK OK OK OK OK
-z
--compression
OK -- -- -- -- OK OK
-y
--bzip2
deprecated -- -- -- -- deprecated deprecated
-s
--slice
OK -- -- -- -- OK OK
-S
--first-slice
OK -- -- -- -- OK OK
-p
--pause
OK -- -- -- -- OK OK
-@
--aux
OK -- -- -- -- -- OK
-$
--aux-key
-- -- -- -- -- -- OK
-~
--aux-execute
-- -- -- -- -- -- OK
-%
--aux-crypto-block
-- -- -- -- -- -- OK
-D
--empty-dir
OK OK -- -- -- -- OK
-Z
--exclude-compression
OK -- -- -- -- -- OK
-Y
--include-compression
OK -- -- -- -- -- OK
-m
--mincompr
OK -- -- -- -- -- OK
-ak
--alter=keep-compressed
--
--
--
--
--
--
OK
-af
--alter=fixed-date
OK
--
--
--
--
--
--

--nodump
OK -- -- -- -- -- --
-M
--no-mount-points
OK -- -- -- -- -- --
-,
--cache-directory-tagging
OK -- -- -- -- -- --
-k
--deleted
-- OK -- -- -- -- --
-r
--recent
-- OK -- -- -- -- --
-f
--flat
-- OK -- -- -- -- --
-ae
--alter=erase_ea
-- OK -- -- -- -- --
-T
--list-format
-- -- OK -- -- -- --
-as
--alter=saved
-- -- OK -- -- -- --
-ad
--alter=decremental
-- -- -- -- -- -- OK
-q
--quiet
OK
OK
OK
OK
OK
OK
OK
-/
--overwriting-policy
-- OK -- -- -- -- OK
-<
--backup-hook-include
OK -- -- -- -- -- --
->
--backup-hook-exclude
OK -- -- -- -- -- --
-=
--backup-hook-execute
OK -- -- -- -- -- --
-ai
--alter=ignore-unknown-inode-type
OK
--
--
--
--
--
--
-at
--alter=tape-marks
OK
--
--
--
--
--
OK
-0
--sequential-read
OK
OK
OK
OK
OK
OK
--
-;
--min-digits
OK
OK
OK
OK
OK
OK
OK
-1
--sparse-file-min-size
OK
--
--
--
--
--
OK
-ah
--alter=hole-recheck
--
--
--
--
--
--
OK
-^
--slice-mode
OK
--
--
--
--
OK
OK
-_
--retry-on-change
OK
--
--
--
--
--
--
-asecu
--alter=secu
OK
--
--
--
--
--
--
-.
--user-comment
OK
--
--
--
--
OK
OK
-3
--hash
OK
--
--
--
--
OK
OK
-2
--dirty-behavior
--
OK
--
--
--
--
--
-al
--alter=lax
--
OK
--
--
--
--
--
-alist-ea
--alter=list-ea
--
--
OK
--
--
--
--



Why dar reports corruption of the archive I have transfered with FTP?

Dar archive are binary files, they must be transfered in binary mode when using FTP. This is done in the following way for the ftp command-line client :

ftp <somewhere>
<login>
<password>
bin
put <file>
get <file>
bye

If you transfer an archive (or any other binary file) in ascii mode (the opposite of binary mode), the 8th bit of each byte will be lost and the archive will become impossible to recover (due to the destruction of this information). Be very careful to test your archive after transferring back to you host to be sure you can delete the original file.


Why DAR does save UID/GID instead of plain usernames and usergroups?

In each file property is not present the name of the owner nor the name of the group owner, but instead are present two numbers, the user ID and the group ID (UID & GID in short). In the /etc/password file these numbers are associated to names and other properties, like the login shell, the home directory, the password (see also /etc/shadow). Thus, when you do a directory list (with the 'ls' command for example or with any GUI program for another example), the listing application used does open each directory, there it finds a list of name and a inode number associated, then the listing program fetchs the inode attributes for each file and looks among other information for the UID and the GID. To be able to display the real user name and group name, the listing application use a well-defined standard C library call that will do the lookup in /etc/password, eventually NIS system if configured and any other additional system, [this way applications have not to bother with the many system configuration possible, the same API interface is used whatever is the system], then lookup returns the name if it exist and the listing application display for each file found in a directory the attributes and the user name and group name as returned by the system.

As you can see, the user name and group name are not part of any file attribute, but UID and GID *are* instead. Dar is a backup tool mainly, it does preserve at much as possible the files property to be able to restore them as close as possible to their original state. Thus a file saved with UID=3 will be restored with UID=3. The name corresponding the UID 3 may exist or not,  may exist and be the same or may exist and be different, the file will be anyway restored in UID 3.

Scenario with dar's way of restoring

Thus, when doing backup and restoration of a crashed system you can be confident, the restoration will not interfere with the bootable system you have used to launch dar to restore your disk. Assuming you have UID 1 labeled 'bin' in your real crashed system, but this UID 1 is labeled 'admin' in the boot system, while UID 2 is labeled 'bin' in this boot system, files owned by bin in the system to restore will be restored under UID 1, not UID 2 which is used by the temporary boot system. At that time after restoration still running the from the boot system, if you do a 'ls' you will see that the original files owned by 'bin' are now owned by user 'admin'.

This is really a mirage: in your restoration you will also restore the /etc/password file and other system configuration files (like NIS configuration files if they have been used), then at reboot time on the newly restored real system, the UID 1 will be backed associated to user 'bin' as expected and files originally owned by user bin will now been listed as owned by bin as expected.

Scenario with plain name way of restoring

If dar had done else, restoring the files owned by 'bin' to the UID corresponding to 'bin', these files would have been given UID 2 (the one used by the temporary bootable system used to launch dar). But once the real restored system would have been launched, this UID 2 would have become some other user and not 'bin' which is mapped to UID 1 in the restored /etc/password.

Now, if you want to change some UID/GID when moving a set of files from one live system to another system, there is no problem if you are not restoring dar under the 'root' account. Other account than 'root' are usually not allowed to modify UID/GID, thus restored files by dar will have group and user ownership of the dar process, which is the one that has launched dar.

But if you really need to move a directory tree containing a set of files with different ownership and you want to preserve these different ownership from one live system to another, while the corresponding UID/GID do not match between the two system, dar can still help you:

  • Save your directory tree on the source live system
  • From the root account in the destination live system do the following:
  • restore the archive in a empty directory
  • change the UID of files according to the one used by the destination filesystem with the command:
find /path/to/restored/archive -uid <old UID>  -print -exec chown <new name> {} \;

find /path/to/restored/archive -gid <old GID> -print -exec chgrp <new name> {} \;

The first command will let you remap an UID to another for all files under the /path/to/restored/archive directory
The second command will let you remap a GID to another for all files under the /path/to/restored/archive directory

Example on how to globally modify ownership of a directory tree user by user

For example, you have on the source system three users: Pierre (UID 100), Paul (UID 101), Jacques (UID 102)
but on the destination system, these same users are mapped to different UID: Pierre has UID 101, Paul has UID 102 and Jacques has UID 100.

We temporary need an unused UID on the destination system, we will assume UID 680 is not used. Then after the archive restoration in the directory /tmp/A we will do the following:

find /tmp/A -uid 100 -print -exec chown 680 {} \;
find /tmp/A -uid 101 -print -exec chown pierre {} \;
find /tmp/A -uid 102 -print -exec chown paul {} \;
find /tmp/A -uid 680 -print -exec chown jacques  {} \;

which is:
change files of UID 100 to UID 680 (the files of Jacques are now under the temporary UID 680 and UID 100 is now freed)
change files of UID 101 to UID 100 (the files of Pierre get their UID of the destination live system, UID 101 is now freed)
change files of UID 102 to UID 101 (the files of Paul get their UID of the destination live system, UID 102 is now freed)
change files of UID 680 to UID 102 (the files of Jacques which had been temporarily moved to UID 680 are now set to their UID on the destination live system, UID 680 is no more used).

You can then move the modified files to appropriated destination or make a new dar archive to be restored in appropriated place if you want to use some of dar's feature like for example only restore files that are more recent than those present on filesystem.



Dar_Manager does not accept encrypted archives, how to workaround this?

Yes, that's true, dar_manager does not accept encrypted archives. The first reason is that while dar_manager database cannot be encrypted this is not very fair to add to them encrypted archives. The second reason is because the dar_manager database should hold the key for each encrypted archive making this archive the weakest point in your data security: Breaking the database encryption would then provide access to any encryption key, and with original archive access it would bring access to data of any of the archive added to the database.

OK, there is however a feature in the pipe to provide to dar_manager the support to encrypt its archives, then next another feature to provide dar_manager the possibility to store the different archive keys, then is needed another feature to have key being passed from dar_manager to dar out of command-line (which would expose the keys to the sight of other users on your multi-user system), then yet another feature to be able to feed the database with the archive keys also without using the command-line. ... well there is a lot of feature to add and test before you can expect finding it in a released version of dar.

In the meanwhile, you can proceed as follows:
  • isolate your encrypted archive to unencrypted 'extracted catalogue': Do not use the -K option while isolating, you will however need to use the -J option to let dar able to read the encrypted archive. Note that still for key protection, you are encouraged to use a DCF (Dar Command File, which  is a plain file with a list of options to be passed to dar) file with restricted permissions and containing the '-J <key>' option to be passed for dar. The dar's -B option would then receive this filename. this will avoid other users of your system to have a chance to read the key you have used for your archives,
  • add these extracted catalogue to the dar_manager database of your choice,
  • change the name and path of the added catalogue to point to your real encrypted archives (-b and -p options of dar_manager).
Note that the database is not encrypted this will expose the archive file listing (not the file's contents) of your encrypted archives to anyone able to read the database, thus it is recommended to set restrictive permission to this database file.

When will come the time to use dar_manager to restore some file, you will have to make dar_manager pass the key to dar for it be able to restore the needed files from the archive. This can be done in several ways: dar_manager's command-line, dar_manager database or dar.dcf file.
  1. dar_manager's command-line: simply pass the -e "-K <key>" to dar_manager . Note that this will expose the key twice: on dar_manager's command-line and on dar's command-line.
  2. dar_manager database: the database can store some constant command to be passed to dar. This is done using the -o option, or the -i option. The -o option exposes the arguments you want to be passed to dar because they are on dar_manager command-line. While the -i option, let you do the same thing but in an interactive manner, this is a better choice. However, if -i option it is a safe way to feed the dar_manager database with the '-K <key>' option to be passed to dar, this option will be received by dar on command-line. Thus still the key will be visible by other users on your same system.
  3. A better way is to use a DCF file with restrictive permission. This one will receive the '-K <key>' option for dar to be able to read the encrypted archives. And dar_manager will ask dar to read this file thanks to the '-B <filename>' option you will have give either on dar_manager's command-line (-e -B <filename> ...) or from the stored option in the database (-o -B <filename>).
  4. The best way is let dar_manager pass the -K option to dar, but without password : simply passing the -e "-K :" option to dar_manager. When dar will get the -K option with the ":" argument, it will dynamically ask for the password and store it in secured memory.
note that you must prevent other users reading any file holding the archive key, this covers the dar_manager database as well as the DCF files you could temporarily use. Second note, in this workaround approach we have assumed that all encrypted archive do share the same key.


How to overcome the lack of static linking on MacOS X?

The answer comes from Dave Vasilevsky in an email to the dar-support mailing-list. I let him explain how to do:

Pure-static executables aren't used on OS X. However, Mac OS X does have other ways to build portable binaries. HOWTO build portable binaries on OS X?

First, you have to make sure that dar only uses operating-system libraries that exist on the oldest version of OS X that you care about.
You do this by specifying one of Apple's SDKs, for example:

export CPPFLAGS="-isysroot /Developer/SDKs/MacOSX10.2.8.sdk"
export LDFLAGS="-Wl,-syslibroot,/Developer/SDKs/MacOSX10.2.8.sdk"


Second, you have to make sure that any non-system libraries that dar links to are linked in statically. To do this edit dar/src/dar_suite/Makefile, changing LDADD to '../libdar/.libs/libdar.a'. If any other non-system libs are used (such as gettext), change the makefiles so they are also linked in statically. Apple should really give us a way to force the linker to do this automatically!

Some caveats:

* If you build for 10.3 or lower, you will not get EA support, and therefore you will not be able to save special Mac information like
resource forks.
* To work on both ppc and x86 Macs, you need to build a universal binary. For instructions, use Google :-)
* To make a 10.2-compatible binary, you must build with GCC 3.3.
* These instructions won't work for the 10.1 SDK, that one is harder to use.


Why cannot I test, extract file, list the contents of a given slice from an archive?

Well this is due to dar's design. Since release 2.4.0 two feature can help you be close to that point, namely --sequential-reading which asks dar to read the archive sequentially and -al option which asks dar to be relaxed on sanity and coherence checks. You can put a single slice into a given directory, and create as much empty files as necessary to simulate slices of that archive which has lower numbers than the real slice(s) that remains of a partially lost archive. Then using sequential-reading (--sequential-read option) and laxist mode (-al option) you will get to the requested information:

mkdir tempo
cd tempo
ln -s ../sowhere/backup.3.dar
touch backup.1.dar
touch backup.2.dar
dar -l backup --sequential-reading -al

Note however that using the laxist mode skips a lot a sanity checks. This method is to be used as last ressort method upon heavy archive corruption. It is still a good option to test your archive once on destination medium and if possible in addition to add redundancy data using Parchive to be able to repair an archive corrupted due to media problem.

Why cannot I merge two isolated catalogues?

Since version 2.4.0, isolated catalogues can also be used to rescue an corrupted internal catalogue of the archive it has been isolated from. For that feature be possible, a mecanism let dar know if an given isolated catalogue and a given archive correspond to the same contents. Merging two isolated catalogues would break this feature as the resulting archive would not match any real archive an could only be used as reference for a differential backup.

Why cannot dar use the full power of my multi-processor computer?

Parallel computing programming is a science by itself. For having done a specialization in that area during my studies, I can explain briefly here the constraints. A program can use several processor if the algorithm it uses is able to be parallelized. Such an algorithm can either statically (at programming time) or dynamically (at execution time) be cut in several independent execution threads. These different execution threads must be as much autonomous as possible between them, if you don't want to have one thread waiting for another (which is not what we want). The constraint is this: if you cannot have different threads with no or very little communication and dependence then parallelization does not worth it.

Back to dar. From a very abstracted point of view, dar works by fetching files from the filesystem and by appending their data in a single file (the archive). For each file, dar records in memory the location of the data and once all files have been treated, this location information (contained in the so called "catalogue") is added at the end of the archive.

One could say that to parallelize file treatment, instead of proceeding file by file, let's do all file at the same time (or rather let's say N files at the same time). OK, but first you would have an important loss of performance at disk level as the disk heads would spend most of the time seeking from one of the N file's data to another of the N file's data. The second point would be that to add a file to the archive you must know the position of the end of the last added file, which is not possible to know in advance because of compression and/or encryption.  thus a given thread would have to wait that another has finished to be able to drop in turn the data of the file it owns... As you can guess, parallelizing this way would bring worse performance than the sequential algorithm.

Another possibility is to have several thread doing :
  • file lookup (report which file are present on filesystem)
  • file filtering (determine which file to save, which file to compress, and so on)
  • file compression
  • file encryption
This would be a bit better, but : File lookup is very fast and does not consume much CPU, as well as file filtering.  Instead, file compression or file encryption are very CPU intensive. Thus, first, if you only use compression OR encryption parallelizing this way will not bring you much extra power as the encryption or the compression are not possible to parallelize (compressing a file is done sequentially, same thing when encrypting it). Rawly you will get the same execution time as the sequential execution. Second if you use no compression and no encryption, your CPU will stay idle most of the time and the time to execute dar will only depend on the speed of your hard disk, so you will not get any improvement here. Last, only if you use both encryption and compression you could gain some performance having parallelization, but dar could only use at most two CPU! no more! And second, the gain of time will be less than 2 (it will not be twice faster, but much less) as for a given amount of data, compression needs much more time to proceed than encryption. Thus the encryption thread will most of the time wait for compressed data.

OK, you have maybe found also another possibility : having N threads for compression and M threads for encryption. Assuming  encryption is faster than compression, we could choose N > M.  We could also have a fixed value for N and a dynamic value for M depending on how fast compression is running. Well, this would let dar be able to compress and encrypt several files at the same time, assuming that reading data and data writing time is negligible compared to compression time (which must be demonstrated as several files have potentially to be read at the same time), we could maybe have a real performance gain. But, ... while several files can now be compressed at the same time, only one can be written to disk at a given time. Thus, during the time the compression of a file has started and the time it has finished all other threads have to keep their compressed data in memory. Then a next thread can drop its data to the archive while all other keep compressing to memory (RAM). We will quickly lack of RAM! Or your computer will start to swap, or you have to store the data back to disk in a temporary file, which file will have to be read again and wrote back to archive. So, doing so will bring huge disk performance degradation, as disk will server for read file's data, writing its compressed data to temporary file, reading back its compressed data, writing its compressed data to archive.

Last, when using parallelization there is a always a cost due to inter-process communication and concurrent I/O operations on the hardware (here, hard disk are used at the same time to read files to backup and to write them into the archive). This cost becomes negligible when the number of parallel thread increase, assuming all thread are well busy ... here there is a bottleneck, which is the archive creation that seems to avoid a real impressive parallelization.

Conclusion, unless you can find another way to parallelize dar, it will not bring noticeable improvement to have a parallelized version of dar. Parallelization is strongly related to the algorithm used, some algorithms are well adapted to this operation some others are not.

Is libdar thread-safe, which way do you mean it is?

libdar is the part of dar's source code that has been rewritten to be used by external programs (like kdar). It has been modified to be used in a multi-threaded environment, thus, *yes*, libdar is thread-safe. However, thread-safe does not mean that you do not have to take some precautions in your programs while using libdar (or any other library).

Let's take an example, considering a simple library that provides two functions that both receive the address of an integer as argument. The first increments the given integer up to an specific user key pressed, while the second decrements the given integer up to another user key pressed. This library is thread-safe in the way that there is no static variable in it nor it has any given state at a particular time. It is just a set of two functions.

Now, your multi-threaded program is the following: at a given time you have one thread running the first library function while another runs the other library function. All will work fine unless you provided to both threads the same integer. One thread would then increment it while the other would decrement it, and you would not have the expected behavior you could get if you were not using multi-threaded environment. The problem would be the same if instead of using an external library you were accessing this same integer from two different threads at the same time.

Care must thus be taken for two different threads not acting on the same variables at the same time. This is however possible with the use of posix mutex, which would define a portion of code (known as a critical section) that cannot be entered by a thread while another one is accessing it (such a thread is suspended until the other thread exits the critical section).

For libdar, this is the same, you must pay attention not having two or more different threads acting on the same data. Libdar provides a set of classes, which can be seen as a set of type (like a C struct) with associated functions (known as methods in the object oriented world). From these classes, your program will create objects: each object *is* a variable. Technically, invoking a method on an object is exactly the same as invoking a function giving it as hidden argument a pointer to the object ; while semantically, invoking a method is a way to read or modify this variable (= the object). Thus, if you plan to act on a given object from several threads at the same time, you must use posix mutex or any other mean to mutually exclude the access to this object between all your threads, this way only one thread may read or modify this variable (=this object) at a given time.

Note that internally libdar uses some static variables. By static variables, I mean variable that exist even when no thread is running a libdar function or method. These variables are enclosed in critical sections for libdar's user may use it normally. In other words, this is transparent to you. For example, to cancel a libdar call, the mechanism uses an array in which the tid (thread id) by which a call is ran must be canceled: If you wish to cancel a libdar call ran by thread 10, another thread will add the tid 10 to this list. At regular checkpoints, all libdar function check that this same list does not contain the tid the call is ran from. If so, the call aborts/returns and the thread can continue its execution out of libdar code. As you see, several thread may read or write this array of tid at the same time. thanks to a set of mutex this is transparent to you and for this reason, libdar can be said to be thread-safe.

How to solve "configure: error: Cannot find size_t type"?

This error shows when you lack support for C++ compilation. Check the gcc compiler has been compiled with C++ support activated, or if you are using gcc binary from a distro, double check you have installed the C++ support for gcc.

Why dar became much slower since release 2.4.0 ?

This is the drawback of new features!
  • Espetially to be able to read dar archive through pipes in sequential mode, dar inserts so-called "escape sequence" to know for example when a new file starts. This way dar can skip to the next mark upon archive corruption or if the given file has not to be restored. However, if such a sequence of byte is found into a file's data, it must be modified not to collide with real escape sequences. This implies to dar to inspect all data added to an archive for such sequence of byte, instead of just copying the data to the archive (eventually compressing and cyphering it).
  • The other feature that brings an important overhead is the sparse file detection mechanism. To be able to detect a hole in a file and store it into the archive, dar needs here too, to inspect each file's data.
You can disable both of these features, using respectively the options -at option, which suppress "tape marks" (just another name for escape sequences), but does not allow the generated archive to be used in sequential read mode, and -1 0 option, which completely disables the sparse file detection. The execution time becomes back the same as the one of dar 2.3.x releases.

How to search for questions (and their answers) about known problems similar to mines?

Before sending an email to the dar-support mailing-list, you are welcome to first look in the already sent email if your problem has not yet been exposed and solved. This will first for you be the fastest way to get an answer to your problem, and for me a way to preserve time for development.

But yes, there is now tones of emails subjects to read to have a chance to have a chance to find the answer to your problem. The most simple way is to use the search engine at gmane

Dar-support  mailing-list is archived at sourceforge *and* at gmane.org  Only this second archive owns a search engine (look there for the green box at the bottom of the page).

This search engine is available for all the mailing list archived at gmane used around dar.

Why dar tells me that he failed to open a directory, while I have excluded this directory?

Reading the contents of a directory is done using the usual system call (opendir/readdir/closedir). The first call (opendir) let dar design which directory to inspect, the dar call readdir to get the next entry in the opened directory. Once nothing has to be read, closedir is called. The problem here is that dar cannot start reading a directory do some treatment and start reading another directory. In brief, the opendir/readdir/closedir system call are not re-entrant.

This is in particular critical for dar as it does a depth lookup in the directory tree. In other words, from the root if we have two directories A and B, dar reads A's contents, the contents of its subdirectories, then once finished, it read the next entry of the root directory (which is B), then read the contents of B and then of each of its subdirectories, then once finished for B, it must go back to the root again, and read the next entry. In the meanwhile dar had to open many directories to get their contents.

For this reason dar caches the directory contents (when it first meet a directory, it read its whole content and stores it in the RAM). This is only after, that dar decide whether to include or not a given directory. But at this point then, its contents has already been read thus you may get the message that dar failed to read a given directory contents, while you explicitly specify not to include that particular directory in the backup.


Dar reports a "SECURITY WARNING! SUSPICIOUS FILE" what does that mean!?

When dar reports the following message:

SECURITY WARNING! SUSPICIOUS FILE <filepath>: ctime changed since archive of reference was done, while no inode or data changed

You should be concerned by finding an explanation to the root cause that triggered dar to ring this alarm. As you probably know, a unix file has three dates:
  1. atime is changed anytime you read the file's contents or write to it (this is the last access time)
  2. mtime is changed anytime you write to the file's data (this is the last modification time)
  3. ctime is changed anythime ou modify the file's attributs (the is the last change time)
In other words:
  • if you only read the data of file, only its atime will be updated
  • if you write some data to a file, its atime and mtime will change, ctime will stay unchanged
  • if you change ownership, permission, extended attributes, etc, only ctime will change
  • if you write to a file and modify its atime or mtime to let think the file has not been read or modified, ctime will change in nay case.
Yes, the point is that in most (if not all) unix systems, over the kernel itself, user program can also manually set the atime and mtime manually to any arbitrary value (see the "touch" command for example), but to my knowledge, no system provides a mean to manually set the ctime of a file. This value cannot thus be faked.

However, some rootkits and other nasty programs that tend to hide themselves from the system administrator use this trick and modify the mtime to become more difficult to detect. However the ctime keeps track of the date and time of their infamy. However, ctime may also change while neither mtime nor atime do, in several almost rare but normal situations. Thus, if you are faced to this message, you should first verify the following points before thinking your system has been infected by a rootkit:
  • have you added or removed a hardlink pointing to that file and this file's data has not been modified since last backup?
  • have you changed this file's extended attributs (including Linux ACL and MacOS file forks) while file's data has not been modified since last backup?
  • have you recently restored your data and are now performing a differential backup taking as reference the archive used to restore that same data? Or in other words, does that particular file has just been restored from a backup (was removed by accident for example)?
  • have you just moved from a dar version older than release 2.4.0 to dar version 2.4.0 or more recent?
How to know atime/mtime/ctime of a file?
  • mtime is provided by the command: ls -l
  • atime is provided by the command : ls -l --time=atime
  • ctime is provided by the command : ls -l --time=ctime
Note: With dar version older than 2.4.0 (by default, unless -aa option is use) once a file has been read for backup, dar set back the atime to the value it had before dar read it. This trick was used to accomodate some programs like leafnode (NNTP caching program) that base their cache purging scheme on the atime of files. When you do a backup using dar 2.3.11 for example, file that had their mtime modified are saved as expected and their atime is set back to their original values (value they had just before dar read them), which has the slide effect to modify the ctime. If then you upgrade to dar 2.4.0 or more recent and do a differential backup, if that same file has not been modified since, dar will see that the ctime has changed while no other metadata did (user, ownership, group, mtime), thus this alarm message will show for all saved files in the last 2.3.11 archive made. The next differential backup made using dar 2.4.0 (or more recent), the problem will not show anymore.

Well, if you cannot find an valid explanation from the one presented above, you'd better consider that your system has been infected by a rootkit or virus and use all the necessary tools (see below for examples) to find some evidence of it.

Unhide
clam anti-virus
and others...

Last point, if you can explain the cause of the alarm and are annoyed by it (you have hundred of files concerned for example ) you can disable this feature adding the "-asecu" switch to the command-line.

Can dar help copy a large directory tree?

The answer is "yes" and even for more than one reason:
  1. Many backup/copy tools do not take care of hard linked inode (hard linked plain files, named pipes, char devices, block devices, symlinks)... dar does,
  2. Many backup/copy tools do not take care of sparse files... dar does,
  3. Many backup/copy tools do not take care of Extended Attributes... dar does,
  4. Many backup/copy tools do not take care of Posix ACL (Linux)... dar does,
  5. Many backup/copy tools do not take care of file forks (MacOS X)... dar does,
  6. Many backup/copy tools do not take any precautions while working on a live system... dar does.
Using the following command will do the trick:

dar -c - -R <srcdir> -N --retry-on-change 3 | dar -x - --sequential-read -N -R <dstdir>

<srcdir>
contents will be copied to <dstdir> both must exist before running this command, and <dstdir> should be an empty dir.

Here is an example: we will copy the content of /home/my to /home2/my:

first we create the destination directory:
mkdir /home2/my

then we run dar:
dar -c - -R /home/my -N --retry-on-change 3 | dar -x - --sequential-read -N -R /home2/my

The "--retry-on-change" let dar retry the copy of a file up to three times if that file has changed at the time dar was reading it. You can increase this number at will. If a file fails to be copied correctly after more than the allowed retry, a warning is issued about that file.

"piping" ('|' shell syntax) the first dar's output to the second dar's input makes the operation not requiering any temporary storage, only virtual memory is used to perform this copy. Compression is thus not requested as it would only slow down the whole process.

last point, you should compare the copied data to the original one, before removing it, as no backup file has been dropped down to filesystem. This can simply be done using:

    diff -r <srcdir> <dstdir>

But, no, diff will not check extended Attributes, File Forks or Posix ACL, hard linked inodes, etc. If you want a more controlable way of copying a large directory, simply use dar with a real archive file, compare the archive toward the original filesystem, restore the archive contents to its new place, and compare the restored filesystem toward the original archive.

Any better idea? Feel free to contact dar's author for an update of this documentation!


dar-2.4.8/doc/index.html0000644000175000017430000003214312010477201011755 00000000000000 Dar's Documentation
DAR's Documentation



1 - Presentation

2 - Obtaining dar/libdar

3 - Command-line tools usage

Dar's command-line tools is set of six command-line. Since release 2.0.0, dar source code has been split in two parts, one is command-line specific, the second is the libdar library with its API (application interface). But both part stay released under the same package. The command-line tools rely on libdar to manage dar archives. Some other external tools do also use dar or libdar, to provide additional features or other interfaces like a Graphical User Interface. Here we will see how to use the dar command-line tools, only.

You are invited to read the following documents:
If you lack some information from the previous documents, you are welcome to ask for support

4 - Libdar and its API

Libdar has (I hope) a well documented interface that let you access dar's features from your program. Libdar is written in C++ thus you have either to use C or C++ or use bindings for the language of your choice (bindings you are welcome to be make public but cannot be maintained beside libdar due to the load overhead it would generate in detriment to dar's evolution. You are welcome to inform me about any existing binding for I add a link to from dar's homepage).

You are invited to read the following documents in order up to the time you get enough information to address your need. Most of the time the two first source of information should be enough, but don't hesitate to subscribe to libdar-api mailing-list if some points are not clear about the API and the way to use it.

5 - Dar's internals

If you want to know more about the way dar is implemented you can read the  dar/libdar internals notes. The dar-discussion  mailing-list is also a place where discussions about the current implementation, alternative way of doing and future evolution of dar are welcome.

6 - Actions

Important : I no longer answer support requests made by email directly addressed to me. The reason is simple: posting your request in a public area (like the dar-support mailing-list), makes it visible to anyone. Answers to your problem might concern other people, and so a public area is the best place for answers to reside as well. I do not have as much time as I wish to develop DAR (adding new features and porting to new systems), so keeping support public will save me a little time, since it avoids me repeating the same answers to the same questions.
 Sharing must be both directions.




Stay informed about dar/libdar events

You can subscribe to the dar-news mailing-list, which is a read-only mailing-list that has a very low email volume (in average, less than one a month), to be informed about major events like new releases or security issues.


Asking for support

If FAQ, tutorial, mini-howto, usage notes, or man page do not answer to your problem, you can read the dar-support mailing-list archive at gmane, and use the search engine (the green text box at the bottom of the page) to look for keywords that match you problem. If that still does not solve your problem, you can subscribe to dar-support mailing-list and send an email to the mailing-list that explain your problem.


Reporting a Bug


First, check that the problem has not already been seen and addressed in dar-support mailing-list. Then, check the Bug Tracker to see whether the bug has not already been reported (thanks to avoid duplicated). If found, you can monitor the corresponding item to be informed of any update concerning this bug. If the item is fixed you can grab the source code with the fix from CVS. But if the bug you met is not yet reported feel free to create a new report providing as much information and details as you can to ease reproducing and fixing the bug.
    Note that you need to register at Sourceforge to be able to open a bug (subscription to Sourceforge is free, and won't spam your email: I use it since 2002 and never got spam on the email I gave and which is only known by sourceforge). Giving a real email address is beneficial, because you'll get a notification when the status of the bug changes (like when it has been resolved) it also avoids me wasting time trying to reproduce a bug reported anonymously and impossible to reproduce (real bug with very little information, user mistake, ...).


Asking for a new feature


Use the New Feature tracker, first checking that no one has already thought about the request you are about to ask (thanks to avoid duplications). Note that the ability to ask a feature does not mean an obligation for me to implement it, in particular if the feature is not compatible with an already existing feature.

Submitting a patch

Feel free to use the Patch Tracker at Sourceforge.






dar-2.4.8/doc/man/0000755000175000017520000000000012023053205010606 500000000000000dar-2.4.8/doc/man/Makefile.in0000644000175000017520000002502012023053151012572 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = doc/man DIST_COMMON = $(dist_noinst_DATA) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = DATA = $(dist_noinst_DATA) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ dist_noinst_DATA = index.html @USE_GROFF_TRUE@TARGET = dar.html dar_slave.html dar_xform.html dar_manager.html dar_cp.html all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/man/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu doc/man/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags: TAGS TAGS: ctags: CTAGS CTAGS: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am @USE_GROFF_FALSE@all-local: all-am: Makefile $(DATA) all-local installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." @USE_GROFF_FALSE@clean-local: clean: clean-am clean-am: clean-generic clean-libtool clean-local mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: all all-am all-local check check-am clean clean-generic \ clean-libtool clean-local distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ uninstall uninstall-am @USE_GROFF_TRUE@%.1: @USE_GROFF_TRUE@ cp ../../man/$@ . @USE_GROFF_TRUE@%.html: %.1 @USE_GROFF_TRUE@ sed -e 's%\-%\\-%g' < ./$< | groff -man -Thtml | sed -e 's% $@ @USE_GROFF_TRUE@all-local: $(TARGET) @USE_GROFF_TRUE@clean-local: @USE_GROFF_TRUE@ rm -f $(TARGET) *.1 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/doc/man/index.html0000644000175000017430000000506012010477201012526 00000000000000 Dar - Dynamically Generated Documentation
Dar Documentation


Dynamically Generated Documentation





The following documentation has not been built manually. If you cannot access the following links while reading this page from a source package, this is maybe because you have not typed 'make' or because you lack some requirements like Groff and Doxygen. You can also access to Dar Documentation on dar's homepage or mirror page with all dynamcially generated documentation available.


Libdar's API Documentation:

dar-2.4.8/doc/man/Makefile.am0000644000175000017430000000051612003275573012600 00000000000000dist_noinst_DATA=index.html if USE_GROFF TARGET=dar.html dar_slave.html dar_xform.html dar_manager.html dar_cp.html %.1: cp ../../man/$@ . %.html: %.1 sed -e 's%\-%\\-%g' < ./$< | groff -man -Thtml | sed -e 's% $@ all-local: $(TARGET) clean-local: rm -f $(TARGET) *.1 endif dar-2.4.8/doc/usage_notes.html0000644000175000017430000026701712010477201013174 00000000000000 DAR's Usage Notes
Dar Documentation


Command-line Usage Notes





Introduction

You will find here a collection of example of use for several features of dar suite command-line tools.

Contents




Dar and remote backup server

The situation is the following : you have a host (called local in the following), on which resides an operational system, which you want to backup regularly, without perturbing users. For security reasons you want to store the backup on another host (called remote host in the following), only used for backup. Of course you have not much space on local host to store the archive.

Between these two hosts, you could use NFS and nothing more would be necessary to use dar as usually. but if for security reasons you don't want to use NFS (insecure network, local user must not have access to backups), but prefer to communicate through an encrypted session, (using ssh for example) then you need to use dar features brought by version 1.1.0:

dar can output its archive to stdout instead of a given file. To activate it, use "-" as basename. Here is an example :

dar -c - -R / -z | some_program
or
dar -c - -R / -z > named_pipe_or_file

Note, that file splitting is not available as it has not much meaning when writing to a pipe. (a pipe has no name, there is no way to skip (or seek) in a pipe, while dar needs to set back a flag in a slice header when it is not the last slice of the set). At the other end of the pipe (on the remote host), the data can be redirected to a file, with proper filename (something that matches "*.1.dar").

some_other_program > backup_name.1.dar

It is also possible to redirect the output to dar_xform which can in turn on the remote host split the data flow in several files, pausing between them if necessary, exactly as dar is able to do:

some_other_program | dar_xform -s 100M - backup_name

this will create backup_name.1.dar and so on. The resulting archive is totally compatible with those directly generated by dar. OK, you are happy, you can backup the local filesystem to a remote server through a secure socket session, in a full featured dar archive without using NFS. But, now you want to make a differential backup taking this archive as reference. How to do that? The simplest way is to use the new feature called "isolation", which extracts the catalogue from the archive and stores it in a little file. On the remote backup server you would type:

dar -A backup_name -C CAT_backup_name -z

if the catalogue is too big to fit on a floppy, you can slit it as usually using dar:

dar -A backup_name -C CAT_backup_name -z -s 1440k

the generated archive (CAT_backup_name.1.dar, and so on), only contains the catalogue, but can still be used as reference for a new backup (or backup of the internal catalogue of the archive, using -x and -A at the same time). You just need to transfer it back to the local host, either using floppies, or through a secured socket session, or even directly isolating the catalogue to a pipe  that goes from the remote host to the local host:

on remote host:
dar -A backup_name -C - -z | some_program

on local host:
some_other_program > CAT_backup_name.1.dar

or use dar_xform as previously if you need splitting :
some_other_program | dar_xform -s 1440k CAT_backup_name

then you can make your differential backup as usual:
dar -A CAT_backup_name -c - -z -R / | some_program

or if this time you prefer to save the archive locally:
dar -A CAT_backup_name -c backup_diff -z -R /

For differential backups instead of isolating the catalogue, it is also possible to read an archive or its extracted catalogue through pipes. Yes, two pipes are required for dar to be able to read an archive. The first goes from dar to the external program "dar_slave" and carries orders (asking some portions of the archive), and the other pipe, goes from "dar_slave" back to "dar" and carries the asked data for reading.

By default, if you specify "-" as basename for -l, -t, -d, -x, or to -A (used with -C or -c), dar and dar_slave will use their standard input and output to communicate. Thus you need additional program to make the input of the first going to the output to the second, and vice versa. Warning: you cannot use named pipe that way, because dar and dar_slave would get blocked upon opening of the first named pipe, waiting for the peer to open it also, even before they have started (dead lock at shell level). For named pipes, there is -i and -o options that helps, they receive a filename as argument, which may be a named pipe. The -i argument is used instead of stdin and -o instead of stdout. Note that for dar -i and -o are only available if "-" is used as basename. Let's take an example:

You now want to restore an archive from your remote backup server. Thus on it you have to run dar_slave this way

on remote server:
some_prog | dar_slave backup_name | some_other_prog
or
dar_slave -o /tmp/pipe_todar -i /tmp/pipe_toslave backup_name

and on the local host you have to run dar this way:

some_prog | dar -x - -v ... | some_other_prog
or
dar -x - -i /tmp/pipe_todar -o /tmp/pipe_toslave -v ...

there is no order to run dar or dar_slave first, and dar can use -i and/or -o, while dar_slave does not. What is important here is to connect in a way or in an other their input and output, it does not matter how. The only restriction is that communication support must be perfect: no data loss, no duplication, no order change, thus communication over TCP should be fine.

Of course, you can also isolate a catalogue through pipes, test an archive, make difference, use a reference catalogue this way etc, and even then, output the resulting archive to pipe ! If using -C or -c with "-" while using -A also with "-", it is then mandatory to use -o: The output catalogue will generated on standard output, thus to send order to dar_slave you must use another channel with -o:

       LOCAL HOST                                   REMOTE HOST
   +-----------------+                     +-----------------------------+
   |   filesystem    |                     |     backup of reference     |
   |       |         |                     |            |                |
   |       |         |                     |            |                |
   |       V         |                     |            V                |
   |    +-----+      | backup of reference |      +-----------+          |
   |    | DAR |--<-]=========================[-<--| DAR_SLAVE |          |
   |    |     |-->-]=========================[->--|           |          |
   |    +-----+      | orders to dar_slave |      +-----------+          |
   |       |         |                     |      +-----------+          |
   |       +--->---]=========================[->--| DAR_XFORM |--> backup|
   |                 |        saved data   |      +-----------+ to slices|
   +-----------------+                     +-----------------------------+

on local host :
dar -c - -A - -i /tmp/pipe_todar -o /tmp/pipe_toslave | some_prog

on the remote host :

dar_slave -i /tmp/pipe_toslave -o /tmp/pipe_todar full_backup
dar_slave provides the full_backup for -A option

some_other_prog | dar_xform - diff -s 140M -p ...
while dar_xform make slice of the output archive provided by dar

See below an example with netcat and another using ssh.



dar and ssh

As reported "DrMcCoy" in the historical forum "Dar Technical Questions", the netcat program can be very helpful if you plane to backup over the network.

The context in which will take place the following examples are a "local" host named "flower" has to be backup or restored form/to a remote host called "honey" (OK, the name of the machines are silly...)

Example of use with netcat. Note that netcat command name is "nc"

Creating a full backup of "flower" saved on "honey"
on honey:
nc -l -p 5000 > backup.1.dar

then on flower:
dar -c - -R / -z | nc -w 3 honey 5000

but this will produce only one slice, instead you could use the following to have several slices on honey:

on honey:
nc -l -p 5000 | dar_xform -s 10M -S 5M -p - backup

on flower:
dar -c - -R / -z | nc -w 3 honey 5000

by the way note that dar_xform can also launch a user script between  slices exactly the same way as dar does, thanks to the -E and -F options.

Testing the archive
testing the archive can be done on honey but you could also do it remotely even if it is not very interesting doing it that way !

on honey:
nc -l -p 5000 | dar_slave backup | nc -l -p 5001

on flower:
nc -w 3 honey 5001 | dar -t - | nc -w 3 honey 5000

note also that dar_slave can run a script between slices, if for example you need to load slices from a robot, this can be done automatically, or if you just want to mount/unmount a removable media eject or load it and ask the user to change it ...

Comparing with original filesystem
on honey:
nc -l -p 5000 | dar_slave backup | nc -l -p 5001

on flower:
nc -w 3 honey 5001 | dar -d - -R / | nc -w 3 honey 5000

Making a differential backup
Here the problem is that dar needs two pipes to send orders and read data coming from dar_slave, and a third pipe to write out the new archive. This cannot be realized only with stdin and stdout as previously. Thus we will need a named pipe (created by the mkfifo command). 

on honey:
nc -l -p 5000 | dar_slave backup | nc -l -p 5001
nc -l -p 5002 | dar_xform -s 10M -p - diff_backup

on flower:
mkfifo toslave
nc -w 3 honey 5000 < toslave &
nc -w 3 honey 5001 | dar -A - -o toslave -c - -R / -z | nc -w 3 honey 5002


with netcat the data goes in clear over the network. You could use ssh instead if you want to have encryption over the network. The principle are the same.

Example of use with ssh

Creating full backup of "flower" saved on "honey"
we assume you have a sshd daemon on flower.
on honey:
ssh flower dar -c - -R / -z > backup.1.dar

or still on honey:
ssh flower dar -c - -R / -z | dar_xform -s 10M -S 5M -p - backup

Testing the archive
on honey:
dar -t backup

or from flower: (assuming you have a sshd daemon on honey)

ssh honey dar -t backup

Comparing with original filesystem
on flower:
mkfifo todar toslave
ssh honey dar_slave backup > todar < toslave &
dar -d - -R / -i todar -o toslave


Important. Depending on the shell you use, it may be necessary to invert the order in which "> todar" and "< toslave" are given on command line. The problem is that the shell hangs trying to open the pipes. Thanks to "/PeO" for his feedback.

or on honey:
mkfifo todar toslave
ssh flower dar -d - -R / > toslave < todar &
dar_slave -i toslave -o todar backup


Making a differential backup
on flower:
mkfifo todar toslave
ssh honey dar_slave backup > todar < toslave &

and on honey:
ssh flower dar -c - -A - -i todar -o toslave > diff_linux.1.dar
or
ssh flower dar -c - -A - -i todar -o toslave | dar_xform -s 10M -S 5M -p - diff_linux


Bytes, bits, kilo, mega etc.


you probably know a bit the metric system, where a dimension is expressed by a base unit (the meter for distance, the liter for volume, the joule for energy, the volt for electrical potential, the bar for pressure, the watt for power, the second for time, etc.), and declined using prefixes:

      prefix (symbol) = ratio
    ================
deci  (d) = 0.1
centi (c) = 0.01
milli (m) = 0.001
micro (u) = 0.000,001 (symbol is not "u" but the "mu" Greek letter)
nano  (n) = 0.000,000,001
pico  (p) = 0.000,000,000,001
femto (f) = 0.000,000,000,000,001
atto  (a) = 0.000,000,000,000,000,001
zepto (z) = 0.000,000,000,000,000,000,001
yocto (y) = 0.000,000,000,000,000,000,000,001
deca (da) = 10
hecto (h) = 100
kilo  (k) = 1,000  (yes, this is a lower case letter, not an upper case!)
mega  (M) = 1,000,000
giga  (G) = 1,000,000,000
tera  (T) = 1,000,000,000,000
peta  (P) = 1,000,000,000,000,000
exa   (E) = 1,000,000,000,000,000,000
zetta (Z) = 1,000,000,000,000,000,000,000
yotta (Y) = 1,000,000,000,000,000,000,000,000

This way two milliseconds (noted "2 ms") are 0.002 second, and 5 kilometers (noted "5 km") are 5,000 meters. All was fine and nice up to the recent time when computer science appeared: In that discipline, the need to measure the size of information storage raised. The smallest size, is the bit (contraction of binary digit), binary because it has two possible states: "0" and "1". Grouping bits by 8 computer scientists called it a byte. A byte has 256 different states, (2 power 8). The ASCII (American Standard Code for Information Interchange) code arrived and assigned a letter or more generally a character to some value of a byte, (A is assigned to 65, space to 32, etc). And as most text is composed of a set of character, they started to count size in byte. Time after time, following technology evolution, memory size approached 1000 bytes.

But as memory is accessed through a bus which is a fixed number of cables (or integrated circuits), on which only two possible voltages are authorized to mean 0 or 1, the total amount of byte that a bus can address is always a power of 2. With a two cable bus, you can have 4 values (00, 01, 10 and 11, where a digit is the state of a cable) so you can address 4 bytes. Giving a value to each cable defines an address to read or write in the memory. Unfortunately 1000 is not a power of 2 and approaching 1000 bytes, was decided that a "kilobyte" would be 1024 bytes which is 2 power 10. Some time after, and by extension, a megabyte has been defined to be 1024 kilobytes, a terabyte to be 1024 megabytes, etc. at the exception of the 1.44 MB floppy where here the capacity is 1440 kilobytes thus here "mega" means 1000 kilo...

In parallel, in the telecommunications domain, going from analogical to digital signal made the bit to be used also. In place of the analogical signal, took place a flow of bits, representing the samples of the original signal. For telecommunications the problem was more a problem of size of flow: how much bit could be transmitted by second. At some ancient time appeared the 1200 bit by second, then 64000, also designed as 64 kbit/s. Thus here, kilo stays in the usual meaning of 1000 time the base unit. You can also find Ethernet 10 Mbit/s which is 10,000,000 bits by seconds, same thing with Token-Ring that had rates at 4, 16 or 100 Mbit by seconds (4,000,000 16,000,000 or 100,000,000 bits/s). But, even for telecommunications, kilo is not always 1000 times the base unit: the E1 bandwidth at 2Mbit/s for example, is in fact 32*64kbit/s thus 2048 kbit/s ... not 2000 kbit/s

Anyway, back to dar, you have to possibility to give the size in byte or using a single letter as suffix (k, M, T, P, E, Z, Y, the base unit being implicitely the byte) thus the possibility to provide a size in kilo, mega, tera, peta, exa, zetta or yotta byte, with the computer science definition of these terms (power of 1024) by default.

These suffixes are for simplicity and to not have to compute how much make powers of 1024. For example, if you want to fill a CD-R you will have to use the "-s 650M" option which is equivalent to "-s 6815744400", choose the one you prefer, the result is the same :-). Now, if you want 2 Megabytes slices in the sense of the metric system, simply use "-s 2000000" or read below:

Starting version 2.2.0, you can alter the meaning of all the suffixes used by dar, the

--alter=SI-units

(which can be shorten to -aSI or -asi) change the meaning of the prefixes that follow on the command-line, to the metric system (or System International) up to the end of the line or to a

--alter=binary-units

arguments (which can be shortened to -abinary), after which we are back to the computer science meaning of kilo, mega, etc. up to the end of the line or up to a next --alter=SI-units. Thus in place of -s 2000000 one could use:

   -aSI -s 2M


Yes, and to make things more confuse, marketing arrived and made sellers count gigabits a third way: I remember some time ago, I bought a hard disk which was described as "2.1 GB", (OK, that's several couple of years ago!), but in fact it had only 2097152 bytes available. This is far from 2202009 bytes (= 2.1 GiB for computer science meaning), and a bit more than 2,000,000 bytes (metric system). OK, if it had these 2202009 bytes (computer science meaning of 2.1 GB), this hard disk would have been sold under the label "2.5 GB"! ... just kidding :-)

Note that to distinguish kilo, mega, tera and so on, new abbreviations are officially defined, but are not used within dar:
ki = 1024
Mi = 1024*1024
GiB = and so on...
Ti
Pi
Ei
Zi
Yi

For example, we have 1 kiB for 1 kilobytes (= 1024 bytes), and 1 kibit for 1 kilobits (= 1024 bits) and 1 kB (= 1000 Bytes) and 1 kbit (= 1000 bits), ...



Running DAR in background


DAR can be run in background:

dar [command-line arguments] < /dev/null &



Files' extension used

dar suite programs use several type of files:
  • slices (dar, dar_xform, dar_slave, dar_manager)
  • configuration files (dar, dar_xform, dar_slave)
  • databases  (dar_manager)
  • user commands for slices (dar, dar_xform, dar_slave, using -E, -F or -~ options)
  • user commands for files (dar only, during the backup process using -= option)
  • filter lists (dar's -[ and -] options)
If for slice the extension and even the filename format cannot be customized, (basename.slicenumber.dar) there is not mandatory rule for the other type of files.

In the case you have no idea how to name these, here is the extensions I use:
"*.dcf": Dar Configuration file, aka DCF files (used with dar's -B option)
"*.dmd": Dar Manager Database, aka DMD files (used with dar_manager's -B and -C options)
"*.duc": Dar User Command, aka DUC files (used with dar's -E, -F, -~ options)
"*.dbp": Dar Backup Preparation, aka DBP files (used with dar's -= option)
"*.dfl": Dar Filter List, aka DFL files (used with dar's -[ or -] options)

but, you are totally free to use the filename you want !   ;-)




Running command or scripts from DAR


You can run command from dar at two different places:
  • when dar has finished writing a slice only in backup, isolation or merging modes, or before dar needs a slice (DUC files), in reading mode (testing, diffing, extracting, ...) and when reading an archive of reference.
  • before and after saving a given file during the backup process (DBP files)

A - Between slices:

This concerns -E, -F and -~ options. They all receive a string as argument. Thus, if the argument must be a command with its own arguments, you have to put these between quotes for they appear as a single string to the shell that interprets the dar command-line. For example if you want to call

df .

[This is two worlds: "df" (the command) and "." its argument] then you have to use the following on DAR command-line:

-E "df ."
or
-E 'df .'


DAR provides several substitution strings in that context:
  • %% is replaced by a single % Thus if you need a % in you command line you MUST replace it by %% in the argument string of -E, -F  or -~
  • %p is replaced by the path to the slices
  • %b is replaced by the basename of the slices
  • %n is replaced by the number of the slice
  • %N is replaced by the number of the slice with padded zeros (it may differ from %n only when --min-digits option is used)
  • %c is replaced by the context replaced by "operation", "init" or "last_slice" depending on the context.
The number of the slice (%n) is either the just written slice or the next slice to be read. For example if you create an new archive (either using -c, -C or -+), in -E option, the %n macro is the number of the last slice completed. Else (using -t, -d, -A (with -c or -C), -l or -x), this is the number of the slice that will be required very soon. While %c (the context) is substituted by "init", "operation" or "last_slice".

  • init : when the slice is asked before the catalogue is read
  • operation : once the catalogue is read and/or data treatment has begun.
  • last_slice : when the last slice has been written (archive creation only)

What the use of this feature? For example you want to burn the brand-new slices on CD as soon as they are  available.

let's build a little script for that:

%cat burner
#!/bin/bash

if [ "$1" == "" -o "$2" == "" ] ; then
  echo "usage: $0 <filename> <number>"
  exit 1
fi

mkdir T
mv $1 T
mkisofs -o /tmp/image.iso -r -J -V "archive_$2" T
cdrecord dev=0,0 speed=8 -data /tmp/image.iso
rm /tmp/image.iso
# Now assuming an automount will mount the just newly burnt CD:
if diff /mnt/cdrom/$1 T/$1 ; then
  rm -rf T
else
  exit 2
endif
%

This little script, receive the slice filename, and its number as argument, what it does is to burn a CD with it, and compare the resulting CD with the original slice. Upon failure, the script return 2 (or 1 if syntax is not correct on the command-line). Note that this script is only here for illustration, there are many more interesting user scripts made by several dar users. These are available in the examples part of the documentation.

One could then use it this way:

-E "./burner %p/%b.%n.dar %n"

which can lead to the following DAR command-line:

dar -c ~/tmp/example -z -R / usr/local -s 650M -E "./burner %p/%b.%n.dar %n" -p

First note that as our script does not change CD from the device, we need to pause between slices (-p option). The pause take place after the execution of the command (-E option). Thus we could add in the script a command to send a mail or play a music to inform us that the slice is burned. The advantage, here is that we don't have to come twice by slices, once the slice is ready, and once the slice is burnt.

Another example:

you want to send a huge file by email. (OK that's better to use FTP, but sometimes, people think than the less you can do the more they control you, and thus they disable many services, either by fear of the unknown, either by stupidity). So let's suppose that you only have mail available to transfer your data:

dar -c toto -s 2M my_huge_file -E "uuencode %b.%n.dar %b.%n.dar | mail -s 'slice %n' your@email.address ; rm %b.%n.dar ; sleep 300"

Here we make an archive with slices of 2 Megabytes, because our mail system does not allow larger emails. We save only one file: "my_huge_file" (but we could even save the whole filesystem it would also work). The command we execute each time a slice is ready is:

  1. uuencode the file and send the output my email to our address.
  2. remove the slice
  3. wait 5 minutes, to no overload too much the mail system, This is also
  4. useful, if you have a small mailbox, from which it takes time to retrieve mail.
Note that we did not used the %p substitution string, as the slices are saved in the current directory.

Last example, is while extracting: in the case the slices cannot all be present in the filesystem, you need a script or a command to fetch the next to be requested slice. It could be using ftp, lynx, ssh, etc. I let you do the script as an exercise. :-). Note, if you plan to share your DUC files, thanks to use the convention fo DUC files.

B - Before and after saving a file:

This concerns the -=, -< and -> options. The -< (include) and -> (exclude) options, let you define which file will need a command to be run before and after their backup. While the -= option, let you define which command to run for those files.

Let's suppose you have a very large file changing often that is located in /home/my/big/file, and several databases that each consist of several files under /home/*/database/data that need to have a coherent status and are also changing very often.

Saving them without precaution, will most probably make your big file flagged as "dirty" in dar's archive, which means that the saved status of the file may be a status that never existed for that file: when dar saves a file it reads the first byte, then the second, etc. up to the end of file. While dar is reading the middle of the file, an application may change the very begin and then the very end of that file, but only modified ending of that file will be saved, leading the archive to contain a copy of the file in a state it never had.

For a database this is even worse, two or more files may need to have a coherent status. If dar saves one first file while another file is modified at the same time, this will not lead having the currently saved files flagged as "dirty", but may lead the database to have its files saved in incoherent states between them, thus leading you to have saved the database in a corrupted state.

For that situation not to occur, we will use the following options:

-R / "-<" home/my/big/file  "-<" "home/*/database/data"

First, you must pay attention to quote the -< and -> options for the shell not to consider you ask for redirection to stdout or from stdin. Back to the example, that says that for the files /home/my/big/file and for any "database/data" directory (or file) in the home directory of a user, a command will be run before and after saving that directory of file. We need thus to define the command to run using the following option:

-= "/root/scripts/before_after_backup.sh %f %p %c"

Well as you see, here too we may (and should) use substitutions macro:
  • %% is replaced by a litteral  %
  • %p is replaced by the full path (including filename) of the file/directory to be saved
  • %f is replaced by the filename (without path) of the file/directory to be saved
  • %u is the uid of the file's owner
  • %h is the gid of the file's owner
  • %c is replaced by the context, which is either "start" or "end" depending on whether the file/directory is about to be saved or has been completely saved.

 And our script here could look like this:

cat /root/scripts/before_after_backup.sh
#!/bin/sh


if [ "$1" == "" ]; then
   echo "usage: $0 <filename> <dir+filename> <context>"
   exit 1
fi

# for better readability:
filename="$1"
path_file="$2"
context="$3"

if [ "$filename" = "data" ]; then
   if ["$context" = "start" ]; then
       # action to stop the database located in "$2"
   else
       # action to restart the database located in "$2"
   fi
else
   if ["$path_file" = "/home/my/big/file"]; then
     if ["$context" = "start" ]; then
       # suspend the application that writes to that file
     else
       # resume the application that writes to that file
     fi
   else
     # do nothing, or warn that no action is defined for that file
fi


So now, if we run dar with all these command, dar will execute our script once before entering any database/data directory located in a home directory of some user, and once all files of that directory will have been saved. It will run our script also before and after saving our /home/my/big/file file.

If you plan to share your DBP files, thanks to use the DBP convention.



Convention for DUC files

Since version 1.2.0 dar's user can have dar calling a command or scripts between slices, thanks to the -E,  -F and -~ options, called DUC files. To be able to easily share your DUC commands or scripts, I propose you the following convention:

- use the ".duc" extension to show anyone the script/command respect the following
- must be called from dar with the following arguments:

example.duc %p %b %n %e %c [other optional arguments]

- when called without argument, it must provide brief help on what it does and what are the expected arguments. This is the standard "usage:" convention.

Then, any user, could share their DUC files and don't bother much about how to use them. Moreover it would be easy to chain them:

if for example two persons created their own script, one "burn.duc" which burns a slice onDVD-R(W) and "par.duc" which makes a Parchive redundancy file from a slice, anybody could use both at a time giving the following argument to dar:

-E "par.duc %p %b %n %e %c 1 ; burn.duc %p %b %n %e %c"

or since version 2.1.0 with the following argument:

-E "par.duc %p %b %n %e %c 1" -E "burn.duc %p %b %n %e %c"

of course a script has not to use all its arguments, in the case of burn.duc for example, the %c (context) is probably useless, and not used inside the script, while it is still possible to give it all the "normal" arguments of a DUC file, extra not used argument are simply ignored.

If you have interesting DUC scripts, you are welcome to contact me by email, for I add them on the web site and in the following releases. For now, check doc/samples directory for a few examples of DUC files.

Note that all DUC scripts are expected to return a exit status of zero meaning that the operation has succeeded. If another exit status has been returned, dar asks the user for decision (or aborts if no user has been identified, for example, dar is not ran under a controlling terminal).



Convention for DBP files

Same as above, the following convention is proposed to ease the sharing of Dar Backup Preparation files:


- use the ".dbp" extension to show anyone the script/command respect the following
- must be called from dar with the following arguments:

example.duc %p %f %u %g %c [other optional arguments]

- when called without argument, it must provide brief help on what it does and what are the expected arguments. This is the standard "usage:" convention.

Identically to DUC files, DBP files are expected to return a exist status of zero, else the backup process is suspended for the user to decide wether to retry, ignore the failure or abort the whole backup process.



User targets in DCF

Since release 2.4.0, a DCF file (on given to -B option) can contain user targets. A user target is an extention of the conditional syntax. So we will first make a brief review on conditional syntax.

Conditional syntax in DCF files:

The conditional syntax gives the possiblility to have options in a DCF file that are only active in a certain context:
  • archive extraction
  • archive creation
  • archive listing
  • archive testing
  • archive isolation
  • archive merging
  • no action yet defined
  • all context
  • when a archive of reference is used
  • when an auxilliary archive of reference is used
These works with the following reserved keywords (see dar's man page for an exhaustive list). Let's take an example:

cat sample.dcf
# this is a comment

all:
-K aes:

extract:
-R /

reference:
-J aes: 

auxilliary:
-~ aes:

create:
-ac
-Z "*.mp3"
-Z "*.avi"

default:
-V

This way, the -Z options are only used when creating an archive, while the -K option is used in any case. Well, now that we have briefly review the conditional syntax, you may have guess that new "targets" (or keywords) if you prefer can be added. Let's add the following in our DCF file:

compress:
-z lzo:5

In the usual situation all that follows the target "compress" up to the next target or the end of the file will not be used to configure dar, unless you provide the "compress" keyword on command-line:

dar -c test -B sample.dcf compress

Which will do exactly the same as if you have typed:

dar -c test -z lzo:5

Of course, you can use as many user target as you wish in your files, the only constraint is that it must not have the name of the reserved keyword of a conditional syntax, but you can also mix conditional syntax and user targets. Here follows an example:

cat sample.dcf
# this is a comment

all:
-K aes:

extract:
-R /

reference:
-J aes: 

auxilliary:
-~ aes:

create:
-ac
-Z "*.mp3"
-Z "*.avi"

default:
-V

# our first user target named "compress":
compress:
-z lzo:5

# a second user target named "verbose":
verbose:
-v
-vs

# a third user target named "ring":
ring:
-b

# a last user target named "hash":
--hash sha1

So now, you can use dar and ctivate a set of commands by simply adding the name of the target on command-line:

dar -c test -B sample.dcf compress ring verbose hash

which is equivalent to:

dar -c test -K aes:
-ac -Z "*.mp3" -Z "*.avi" -z lzo:5 -v -vs -b --hash sha1

Last for those that like complicated things, you can recusively use DCF inside user targets, which may contain conditional syntax and the same or some other user targets of you own.





 Using data protection with DAR & Parchive

Parchive (PAR in the following) is a very nice program that makes possible to recover a file which has been corrupted. It creates redundancy data stored in a separated file (or set of files), which can be used to repair the original file. This additional data may also be damaged, PAR will be able to repair the original file as well as the redundancy files, up to a certain point, of course. This point is defined by the percentage of redundancy you defined for a given file. But,... check the official PAR site here:
Since version 2.4.0, dar is provided with a default /etc/darrc file. It contains a set of user targets among which is "par2". This user target invokes the dar_par.dcf file provided beside dar that automatically creates parity file for each slice during backup and verifies and if necessary repaires slices when testing an archive. So now you only need to use dar this way to activate Parchive with dar:

dar [options] par2

Simple no?



Examples of file filtering

File filtering is what defines which files are saved, listed, restored, compared, tested, and so on. In brief, in the following we will say which file are elected for the operated, meaning by "operation", either a backup, a restoration, an archive contents listing, an archive comparison, etc.

File filtering is done using the following options -X, -I, -P, -R, -[,  -] or -g.

OK, Let's start with some concretes examples:

dar -c toto

this will backup the current directory and all what is located into it to build the toto archive, also located in the current directory. Usually you should get a warning telling you that you are about to backup the archive itself

Now let's see something less obvious:

dar -c toto -R / -g home/ftp

the -R option tell dar to consider all file under the / root directory, while the -g "home/ftp" argument tells dar to restrict the operation only on the home/ftp subdirectory of the given root directory thus here /home/ftp.

But this is a little bit different from the following:

dar -c toto -R /home/ftp

here dar will save any file under /home/ftp without any restriction. So what is the difference? Yes, exactly the same files will be saved as just above, but the file /home/ftp/welcome.msg for example, will be stored as <ROOT>/welcome.msg . Where <ROOT> will be replaced by the argument given to -R option (which defaults to "."), at restoration or comparison time. While in the previous example the same file would have been stored with the following path <ROOT>/home/ftp/welcome.msg .

dar -c toto -R / -P home/ftp/pub -g home/ftp -g etc

as previously, but the -P option make all files under the /home/ftp/pub not to be considered for the operation. Additionally the /etc directory and its subdirectories are saved.

dar -c toto -R / -P etc/password -g etc

here we save all the /etc except the /etc/password file. Arguments given to -P can be plain files also. But when they are directory this exclusion applies to the directory itself and its contents. Note that using -X to exclude "password" does have the same effect:

dar -c toto -R / -X "password" -g etc

will save all the /etc directory except any file with name equal to "password". thus of course /etc/password will no be saved, but if it exists, /etc/rc.d/password will not be saved neither if it is not a directory. Yes, if a directory /etc/rc.d/password exist, it will not be affected by the -X option. As well as -I option, -X option do not apply to directories. The reason is to be able to filter some kind of file without excluding a particular directory for example you want to save all mp3 files and only MP3 files,

dar -c toto -R / -I "*.mp3" -I "*.MP3" home/ftp

will save any mp3 or MP3 ending files under the /home/ftp directories and subdirectories. If instead -I (or -X) applied to directories, we would only be able to recurse in subdirectories ending by ".mp3" or ".MP3". If you had a directory named "/home/ftp/Music" for example, full of mp3, you would not have been able to save it.

Note that the glob expressions (where comes the shell-like wild-card '*' '?' and so on), can do much more complicated things like "*.[mM][pP]3". You could thus replace the previous example by:

dar -c toto -R / -I "*.[mM][pP]3" home/ftp

this would cover all .mp3 .mP3 .Mp3 and .MP3 files. One step further, the -acase option makes following filtering arguments become case sensitive (which is the default), while the -ano-case (alias -an in short) set to case insensitive mode filters arguments that follows it. In shorter we could have:

dar -c toto -R / -an -I "*.mp3' home/ftp

And, instead of using glob expression, you can use regular expressions (regex) using the -aregex option. You can also use alternatively both of them using -aglob to return back to glob expressions. Each option -aregex / -aglob define the expected type of expression in the -I/-X/-P/-g/-u/-U/-Z/-Y options that follows, up to end of line or to the next -aregex / -aglob option.

Last a more complete example:

dar -c toto -R / -P "*/.mozilla/*/[Cc]ache" -X ".*~" -X ".*~" -I "*.[Mm][pP][123]" -g home/ftp -g "fake"

so what ?

OK, here we save all under /home/ftp and /fake but we do not save the contents of "*/.mozilla/*/[Cc]ache" like for example "/home/ftp/.mozilla/ftp/abcd.slt/Cache" directory and its contents. In these directories we save any file matching "*.[Mm][pP][123]" files except those ending by a tilde (~ character), Thus for example file which name is "toto.mp3" or ".bloup.Mp2"

Now the inside algorithm:

 a file is elected for operation if
 1 - its name does not match any -X option or it is a directory
*and*
 2 - if some -I is given, file is either a directory or match at least one of the -I option given.
*and*
 3 - path and filename do not match any -P option
*and*
 4 - if some -g options are given, the path to the file matches at least one of the -g options.

The algorithm we detailed above is the default one, which is historical and called the unordered method, since version 2.2.x there is also an ordered method (activated adding -am option) which gives even more power to filters, the dar man mage will give you all the details.

In parallel of file filtering, you will find Extended Attributes filtering thanks to the -u and -U options (they work the same as -X and -I option but apply to EA), you will also find the file compression filtering (-Z and -Y options) that defines which file to compress or to not compress, here too the way they work is the same as seen with -X and -I options, the -ano-case / -acase options do also apply here, as well as the -am option. Last all these filtering (file, EA, compression) can also use regular expression in place of glob expression (thanks to the -ag / -ar options).

Note in very last point, that the --backup-hook-include and --backup-hook-exclude options act the same as -P and -g options but apply to the files about to be saved and provides to the user the possibility to perform an action (--backup-hook-execute) before and after saving files matching the masks options. The dar man page will give you all the necessary details to use this new feature.




Decremental Backup


Well, you have already heard about "Full" backup, in which all files are completely saved in such a way that let you use this backup alone to completely restore your data. You have also probably heard about "differential" backup in which only the changes that occurred since an archive of reference was made are stored. There is also the "incremental" backup, which, in substance, is the same as "differential" ones. The difference resides in the nature of the archive of reference: "Differential" backup use only a "full" backup as reference, while "incremental" may use a "full" backup, a "differential" backup or another "incremental" backup as reference (Well, in dar's documentation the term "differential" is commonly used in place of "incremental", since there is no conceptual difference from the point of view of  dar software).

Well, here we will describe what is meant by "decremental" backup. All started by a feature request from Yuraukar on dar-support mailing-list:

In the full/differential backup scheme, for a given file, you have as many versions as changes that were detected from backup to backup. That's fair in terms of storage space required, as you do not store twice the same file in the same state as you would do if you were doing only full backups. But the drawback is that you do not know by advance in which backup to find the latest version of a given file. So, if you want to restore your entire system to the latest state available from your backup set, you need to restore the most ancient backup (the latest full backup), then the others one by one in chronological order (the incremental/differential backups). This may take some time, yes. This is moreover inefficient, because, you will restore N old revisions of a file that have changed often before restoring the last and more recent version.

Yuraukar idea was to have all latest versions of files in the latest backup done. Thus the most recent archive would always stay a full backup. But, to still be able to restore a file in an older state than the most recent (in case of accidental suppression), we need a so called decremental backup. This backup's archive of reference is in the future (a more recent decremental backup or the latest backup done, which is a full backup in this scheme). This so called "decremental" backup stores all the file differences from this archive of reference that let you get from the reference state to an older state.

Assuming this is most probable to restore the latest version of a filesystem than any older state available, decremental backup seem an interesting alternative to incremental backups, as in that case you only have to use one archive (the latest) and each file get restored only once (old data do not get overwritten at each archive restoration as it is the case with incremental restoration).

Let's take an example: We have 4 files in the system named f1, f2, f3 and f4. We make backups at four different times t1, t2, t3 and t4 in chronological order. We will also perform some changes in filesystem along this period: f1 has will be removed from the system between t3 and t4, while f4 will only appear before t3 and t4. f2 will be modified between t2 and t3 while f3 will be changed between t3 and t4.

All this can be represented this way, where lines are the state at a given date while each column represents a given file.
 
time
   ^
   |                       * represents the version 1 of a file
t4 +         #    #    *   # represents the version 2 of a file
   |
t3 +    *    #    *  
   |
t2 +    *    *    *
   |
t1 +    *    *    *
   |
   +----+----+----+----+---
        f1   f2   f3   f4  


Now we will represent the contents of backups at these different times, first using only full backup, then using incremental backups and at last using decremental backups. We will use the symbol 'O' in place of data if a given file's data is not stored in the archive because it has not changed since the archive of reference was made. We will also use an 'x' to represent the information that a given file has been recorded in an archive as deleted since the archive of reference was made. This information is used at restoration time to remove a file from filesystem to be able to get the exact state of files seen at the date the backup was made.

FULL BACKUPS

   ^
   |
t4 +         #    #    *           
   |
t3 +    *    #    *  
   |
t2 +    *    *    *
   |
t1 +    *    *    *
   |
   +----+----+----+----+---
        f1   f2   f3   f4  

Yes, this is easy, each backup contains all the files that existed at the time the backup was made. To restore in the state the system had at a given date, we only use one backup, which is the one that best corresponds to the date we want. The drawback is that we saved three time the file f1 an f3 version 1, and twice f2 version 2, which correspond to a waste of storage space.


FULL/INCREMENTAL BACKUPS


   ^
   |
t4 +    x    0    #    *     0 represents a file which only state is recorded
   |                         as such, no data is stored in the archive
t3 +    0    #    0          very little space is consummed by such entry
   |
t2 +    0    0    0
   |
t1 +    *    *    *
   |
   +----+----+----+----+---
        f1   f2   f3   f4  

Now we see that archive done at date 't2' does not contain any data as no changed have been detected between t1 and t2. This backup is quite small and needs only little storage. Archive at t3 date only stores f2's new version, and at t4 the archive stores f4 new file and f3's new version. We also see that f1 is marked as removed from filesystem since date t3 as it no longer exists in filesystem but exists in the archive of reference done at t3.

As you see, restoring to the latest state is more complicated compared to only using full backups, it is neither simple to know in which backup to took for a given file's data at date t3 for example, but yes, we do not waste storage space anymore. The restoration process the user has to follow is to restore in turn:
- archive done at t1, which will put old version of files and restore f1 that have been removed at t4
- archive done at t2, that will do nothing at all
- archive done at t3, that will replace f2's old version by its new one
- archive done at t4, that will remove f1, add f4 and replace f3's old version to by its latest version.

The latest version of files is scattered over the two last archives here, but in common systems, much of the data does not change at all and can only be found in the first backup (the full backup).

FULL/DECREMENTAL BACKUP

Here is represented the contents of backups using decremental approach. The most recent (t4) backup is always a full backup. Older backups are decremental backups based on the just more recent one (t3 is a difference based on t4, t1 is a difference based on t2). At the opposit of incremental backups, the reference of the archive is in the future not in the past.

   ^
   |
t4 +         #    #    *           
   |
t3 +    *    0    *    x
   |
t2 +    0    *    0
   |
t1 +    0    0    0
   |
   +----+----+----+----+---
        f1   f2   f3   f4  

Thus obtaining the latest version of the system is as easy as done using only full backups. And you also see that the space required to store these decremental backup is equivalent to what is needed to store the incremental backups. However, still the problem exist to locate the archive in which to find a given's file data at a given date. But also, you may also see that backup done at time t1 can safely be removed as it became useless because it does not store any data, and loosing archive done at t1 and t2 is not a big problem, you just loose old state data.

Now if we want to restore the filesystem in the state it has at time t3, we have to restore archive done at t4 then restore archive done at t3. This last step will have the consequences to create f1, replace f3 by its older version and delete f4 which did not exist at time t3 (file which is maked 'x' meaning that it has to be removed). if we want to go further in the past, we will restore the decremental backup t2 which will only replace f2's new version by the older version 1. Last restoring t1 will have no effect as no changed were made between t1 and t2.

This was for the theory. Now let's see the practice on how to build these decremental backups.

Assuming you have a full backup describing your system at date t1, can we have in one shot both the new full backup for time t2 and also transform the full backup of time t1 into a decremental backup relative to time t2? In theory, yes. But there is a risk in case of failure (filesystem full, lack of electric power, bug, ...): you may loose both backups, the one which was under construction as well as the one we took as reference and which was under process of transformaton to decremental backup.

Another point is that you cannot shrink a given file: many (all?) operating systems provide hook to create/append/overwrite data to an existing file but not to remove data from it and get a smaller file as result. This operation when needed is usually emulated by the applications, creating a temporary file in which is added the part to retain from the original file, then once the copy is finished, the original file is deleted an the temporary file is renamed at the place of the original one.  Thus here the process to transforming a full backup into a decremental backup will not simply remove data from the filesystem, but will copy (thus add) a portion of the data to a new file and then remove the old data. Thus whatever the method used to do a decremental backup, you will end at a time with two full archives, and will require disk space to store both of them.

Seen this, the dar implementation is to let the user do a normal full backup at each step [Doing just a differential backup sounds better at first, but this would end in more archive manipulation, as we would have to generate both decremental and new full backup, and we would manipulate at least the same amount of data]. Then with the two full backups the user would have to use archive merging to create the decremental backup (using -ad option). Last, once the resulting (decremental) archive have been tested and that the user is sure this decremental backup is viable, he can remove the older full backup and store the new decremental backup beside older ones and the new full backup. This at last only, will save you disk space and let you easily recover you system using the latest (full) backup.

Can one use an extracted catalogue instead of the old full backup to perform a decremental backup? No. The full backup to transform must have the whole data in it to be able to create a decremental back with data in it. Only the new full backup can be replaced by its extracted catalogue.

Now, let's oversee the implementation used in dar to build a decremental backup: The operations that the merging must follow to transform a full backup into a decremental backup are the following:
we assuming the archive of reference is the old full backup (-A option) and the auxiliary archive of reference (-@) is the new full backup (the one to be transformed).
- if a file is found in both archives, if it has the same date of modification we just store it as "unchanged" since the archive of reference was done, else if dates differ, we keep the file from the old archive (-@ archive). Same thing with EA, if both are of the same date, we mark EA as "unchanged" else we keep the EA of the old archive.
- if a file is found only in the old archive, then we keep its data/EA in the old archive
- if a file is found only in the new archive, then we store an entry in the resulting archive to record that this file did not exist at the time of the old backup and that it must be destroyed from filesystem at restoration time of this decremental backup.

Well, the only thing that the pure merging operation cannot do is the last point. This point is out of the scope of the overwriting policy as there is no conflict of file found in both archives. however as this is very close to a merging operation and to avoid code duplication, it has been designed a special command-line switch -ad (or --alter=decremental) that modifies the merging operation to address this need. This switch also ignores any overwriting policy provided and uses its own that corresponds to what is needed for building a decremental backup.

In brief, the operations to follow to build a set of decremental backups is the following:

dar -c <new full backup t3> -R /filesystem [...options]
dar -+ <decremental backup t2> -A <old full backup t2> -@ <new full backup t3> -ad [...options]
dar -t <decremental backup t2>    (this is optionnal but strongly recommended).
rm <old full backup t2>



What about dar_manager? Well, in nature, there is no difference between an incremental backup and a differential/incremental backup. The only difference resided in the way (the order) they have to be used.

So, even if you can add decremental backups in a dar_manager database, it is not designed to handle them correctly. It is thus better to keep dar_manager only for incremental/differential/full backups.


Door inodes (Solaris)

A door inode is a dynamic object that is created on top of an empty file, it does exist only when a process has a reference to it, it is thus not possible to restore it. But the empty file it is mounted on can be restored instead. As such, dar restores an door inode with an empty file having the same parameters as the door inode.

If an door inode is hard linked several times in the file system dar will restore a plain file having as much hard links to the corresponding locations.

Dar is also able to handle Extended Attributes associated to a door file, if any. Last, if you list an archive containing door inodes, you will see the 'D' letter as their type (by opposition to 'd' for directories), this is conform to what the 'ls' command displays for such entries.



dar-2.4.8/doc/dar_key.txt0000644000175000017430000000270112003275573012146 00000000000000-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1.0.6 (GNU/Linux) Comment: For info see http://www.gnupg.org mQGiBDyl5ZwRBACkz8Pj10JsE5sLlyB0wlhsWa1TKVtpcj1d7R0S+OifT4jw4E9c 36c+zGIH4bIXhk/kwADhWH6caFOcAN5YWcauUCUMcdKHf1s3Pr0V4XNwuWa3jaIJ IAvTNxPYgMvTYvK2MrE1DnYuIZkjrpR/XOgCSSQpIM8qENUhhHIgcTkrswCgzAhM oq+QQk29gQ6K+YuZp7Qz8bMEAJJHkCt3nGHfU+vtFiNQstJ6usu62qv2AJD5Cmkk lYW5HsiE26w7w2JDrCAxs24YYIGAZKkV/05dJxWsiF/c87/XtZNsdQASSFAsKvr+ yNSxRPYvpfbLP5IX3KmKGdgX/3H1e/tZb6iLwpeKGnuMo3xOI900Vzloi8cXMz0x iy6eBACP6bjjgYjQ1E5olJKl4C4dkDVVArH7Gpa0N+bH1idESA+VkqNVz2Ydfarp k35SQ0UZJ9j432QeUrPsU/2JHqII/WyLTANFqmiUWkYFzmjS5bI/AlVyUo07mRCG qAkjBHNpd1clwifrrr15UykN3v9zgJHSEkEBD97jPmDmEGhnE7Q8RGVuaXMgQ29y YmluIChodHRwOi8vZGFyLmxpbnV4LmZyZWUuZnIvKSA8ZGFyLmxpbnV4QGZyZWUu ZnI+iFcEExECABcFAjyl5ZwFCwcKAwQDFQMCAxYCAQIXgAAKCRCkLkIjyBgaUsFT AJ4w/Lz7BSsiO5Bm+YpTnQhbTV8avwCeL4xD+0u0xhIgjNqW4FQNj9D2g8y5AUUE PKXlnhAE4Kgg3USjhKvjPL7BUbpw5/hI45vC8SWQKAfYCSjQ30i8DWa+GabVMiCy 8mwgpwTJm/9FPx0JRlS5h6fCMfhth6zSRdK0AFToThTFPXtAp+DliAy1k6e2LDwk bh6AH4rbaix+NoOxhvwVeJ0ivL4Evua9ZCJlqcb4kWTO+jwAI3nHMC5AVmB3AcpB nHT+2wcDmJpKzuOLnd2w8z2EJwADBQTfdNskxTuLvBXBqDgtHWTRnZZIRAfoLHwU zgbwVvbZpYpVXjMd+oyGKA1mz21bhZLZUDusN7TRMQFDE3KdG3hvsaVqOvuMlCEY EyHqF11DcYl4beZ1nj2tBlzT0Gc5GUx2d2lCa3xltDTULbnoxqxzxM9rLII3CBvz VRpnprCZ5x8lI9FsVXgUVQSNTJ3KFDzLXPOn7rHAo121tii/iEYEGBECAAYFAjyl 5Z4ACgkQpC5CI8gYGlIwWwCeLkW80F06uS4PljUD1ctugOHteygAn0UwSHxTeSFd Bo0ZNpPxfhXbIQKq =k+H0 -----END PGP PUBLIC KEY BLOCK----- dar-2.4.8/doc/Tutorial.html0000644000175000017430000012567712010477201012470 00000000000000 DAR - Tutorial
Dar Documentation


TUTORIAL




Introduction

This tutorial shows you how to backup your file system (partially or totally) on ZIP drives (you can replace Zip by floppies, or USB key if you prefer) and most important, how to restore your system from scratch in case of hard disk failure (or other cataclysms).

GUI over libdar are nice to use, to backup once.  Scripting using dar is better as it can run in background and be scheduled every day, this is a matter of taste. But you need a robust solution that works also within a minimum environement to be able to restore your system. Dar provides a lot a features to backup (defining compression etc.), thus a GUI or a script is welcome in this process, however only very few are necessary to restore, and in that situation, dar_static (the statically linked version of dar) is to my point of view, the best solution to use.

STEP 1: The FULL backup

We need first to make a full backup, let's go:

The size of a zip drive is 100 MB,
*** here comes the option: -s 100M
This tells dar to not create a single backup file but to split it in several files with a size of at most 100 Megabytes.

On your first ZIP drive we want to copy the dar binary outside the backup to be able to restore it in case of hard disk failure, for example. IMPORTANT: dar binary relies on several libraries which must also be available in the rescue system or copied with the dar binary. But, if you don't want to worry about needed libraries, there is a static version of dar which only difference is that it has all required library included in it (thus it is a slightly larger binary). Its name is "dar_static", and its main reason of existence is to be placed beside backups in case something goes wrong in your system. Note that dar_static is useless for windows, you will always need the Cygwin dll.

(You could also add man pages or a copy of this tutorial, if you are scared not to be able to remember all the many feature of dar ;-) and find the -h option too sparse). Note that all the dar documentation is available on the web. OK you need an Internet access to read them.

This make the free space on the first ZIP floppy a bit smaller: 95 MB.
*** Here comes the option: -S 95M
(Note that '-s' is lowercase for all the slices, and '-S' is UPPERCASE meaning the initial slice only).

We need to pause between slices to change the ZIP floppy when it is full
*** here comes the option: -p -b
telling dar to pause before writing a new slice (-p) and to ring the terminal bell (-b) when user action is needed.

We will compress data inside the backup
*** here comes the option: -z
by default -z option uses gzip compression algorithm (bzip2 and lzo are also available). Optionally, if speed is more important than archive size, you can degrade compression specifying the compression level : -z1 for example. 

Now, we want to backup the whole file system.
*** here comes the option: -R /
This tells dar that no files out of this directory tree will be saved (here, it means that no files will be excluded from the backup, if no filter is specified, see below) here "R" stands for "Root".

There are some files you don't want to backup like backup files generated by emacs "*~" and .*~".
*** here comes the options: -X "*~" -X ".*~"
Note that you have to quote the mask for it not to be interpreted by the shell, the -X options (X for eXclude) do not apply to directories, nor to path, they just apply to filenames. (see also -I option (I for Include) in man page for more information)

Among these files are several sub-trees you must not save: the /proc file system for example, as well as the /dev/pts and /sys . These are virtual file systems, saving them would only make your backup bigger filled with useless stuff.
*** here come the options: -P dev/pts -P proc -P sys
Note that path must be relative to -R option (thus no leading '/' must be used) Unlike the -X/-I options, the -P option (P for "prune") can apply to a directory. If a directory matches -P option, all its subdirectory will also be excluded. note also that -P can receive wildcards, and they must be quoted not to be interpreted by the shell: -P "home/*/.mozilla/cache" for example. Lastly, -P can also be used to exclude a plain file (if you don't want to exclude all files of a given name using -X option): -P home/joe/.bashrc for example would only exclude joe's .bashrc file not any other file, while -X .bashrc would exclude any file of that name including joe's file. (see also -g, -[, -] options in man page for more, as well as the "file selection in brief" paragraph)

More importantly we must not save the backup itself:
*** here comes the option: -P mnt/zip
assuming that your ZIP is mounted under /mnt/zip . We could also have excluded all files of extension "dar" which are backup generated by dar using -X "*.*.dar", but this would have  also exclude other dar archive from the backup, which may not always fit your need.

Now, as we don't save the /dev/pts /proc and /mnt/zip directories, we would have to create these directory mount-points by hand at recovery time to be able to mount the corresponding filesystems. But we can better use the -D option, which does not totally ignore excluded directories but rather stores them as empty.
*** here comes the option -D
thus at recovery time they will be generated automatically

Lastly, we have to give a name to this full backup. Let's call it "linux_full" as it is supposed to take place on the ZIP drive, its path is /mnt/zip/linux_full
*** here comes the option: -c /mnt/zip/linux_full
Note that linux_full is not a complete filename, it is a "basename", on which dar will add a number and an extension ".dar", this way the first slice will be a file of name linux_full.1.dar located in /mnt/zip

Now, as we will have to mount and umount the /mnt/zip file system, we must not have any process using it, in particular, dar current directory must no be /mnt/zip so we change to / for example.

All together we follow this procedure for our example:

Put an empty ZIP floppy in the device, and mount it according to your /etc/fstab file.

mount /mnt/zip

Copy the dar binary to the first zip drive (to be able to restore in case of big problem, like a hard disk failure) and eventually man pages and/or this tutorial.

cp `which dar_static` /mnt/zip

then, type the following:

cd /
dar -c /mnt/zip/linux_full -s 100M -S 95M -p -b -z -R / -X "*~" -X ".*~" -P dev/pts -P sys -P proc -P mnt/zip -D

Note that option order has no importance. Some options may be used several times (-X, -I, -P) some others cannot (see man page for more). When the first slice will be done, DAR will pause, ring the terminal and display a message. You will have to unmount the floppy

umount /mnt/zip

eject and replace the floppy by an empty new one and mount it

mount /mnt/zip

To be able to do that, you can swap to another virtual console pressing ALT+F? keys (if under Linux), or open another xterm if under X-Windows, or suspend dar by typing CTRL-Z and reactivating it after mounting/unmounting by typing `fg' (without the quotes).

Then proceed with dar for the next slice, pressing the <enter> key.

Dar will label slices this way:
slice 1: linux_full.1.dar
slice 2: linux_full.2.dar
and so on.

That's it! We have finished the first step, it may take a long time depending on the size of the data to backup. The following step (differential backup) however can be done often, and it will stay fast every time (OK, except if a big part of your system has changed, in that case you can consider making another full backup).

Just a little check on the archive you've just made: suppose you want to read the content of the backup you made, you would have
to run:

dar -l /mnt/zip/linux_full

It is recommended to either check the archive contents, or compare what's stored in it with the current file system before relying on it:

dar -t /mnt/zip/linux_full

will check the whole archive, while

It is recommended to first unmount and remount removable disk, this to flush the cache. Else you may read data from cache (in memory) and do not detect an error on you disk. dar -t cannot check a single slice, it checks all the archive. If you need to check a single slice (for example after burning it on CD) you can use the diff command : for example, you burn the last completed slices on CD-R, but have just enough free space to store one slice on disk. You can thus check the slice typing something like:

diff /mnt/cdrom/linux_full.132.dar /tmp/linux_full.132.dar

where 132 has to be replaced by the real slice number.

You can also add the --hash command when you create the archive (for example --hash md5), it will produce for each slice a small hash file named after the slice name "linux_full.1.dar.md5", "linux_full.2.dar.md5", etc. Then using the unix standard command "md5sum" you can check the integrity of the slice :

md5sum -c linux_full.1.dar.md5

If all is ok for the slice on the zip disc (which is when diff does not complain or md5sum returns "OK"), you can delete the slice from the hard disk (/tmp/slice.x.dar), and continue with dar. Else, you will have to burn/write the slice on a new disk or retry on the same.

Instead of testing the whole archive you could also compare it with the just saved system:

dar -d /mnt/zip/linux_full -R /

will compare the archive with filesystem tree located at / . Same remark as previously, it is recommended to first unmount and mount the floppy to flush the system cache.

STEP 2: DIFFERENTIAL BACKUP

The only thing to add is the base name of the backup we take as reference
*** here comes the option: -A /mnt/zip/linux_full

Of course, we have to choose another name for that new backup, let's call it linux_diff1
*** here comes the option: -c /mnt/zip/linux_diff1

Last point: if you want to put the new backup at the end of the full backup, you will have to change the -S option according to the remaining space on the last disk. suppose the last slice of linux_full takes 34MB you have 76MB available for the first slice of the differential backup (and always 100MB for the following ones),
*** here comes the option: -S 76M
but if you want to put the backup on a new floppy, just forget the -S option.

here we also want to produce a hash file to test each slice integrity before removing it from hard disk:
*** here comes the option: --hash md5

All together we get:

dar -c /mnt/zip/linux_diff1 -A /mnt/zip/linux_full -s 100M -S 76M -p -b -z -R / -X "*~" -X ".*~" -P dev/pts -P proc -P mnt/zip -P sys -D --hash md5

The only new point is that, just before effectively starting to backup, dar will ask for the last slice of the archive of reference (linux_full), then dar will pause (thanks to the -p option) for you to change the disk and put the one where you want to write the new backup's first slice, then pause again for you to change the disk for the second slice and so on.

STEP 3: ENDLESS DIFFERENTIAL BACKUP

You can make another differential backup, taking linux_diff1 as reference, in this case you would change only the following

-c /mnt/zip/linux_diff2 -A /mnt/zip/linux_diff1

You could also decide to change of device, taking a 1'44MB floppy or a CD-R,  this would not cause any problem at all. After some time when you get many differential backup for a single full backup, you will have to make a new full backup, depending on your available time for doing it, or on your patient if one day you have to recover the whole data after a disk crash: You would then have to restore the full backup, then all the following differential backup up to the most recent one. This requires more user intervention than restoring a single full backup, all is a matter of balance, between the time it takes to backup and the time it takes to restore.

Note, that starting release 1.2.0 a new command appeared that helps restoring a small set of file from a lot a differential backup. Its name is dar_manager. See at the end of this tutorial and man page for more.

Another solution, is when you have too much differential backup, is to make the next differential backup taking the last full_backup as reference, instead of the last differential backup done. This way, it will take less time than doing a full backup, and you will not have to restore all intermediate differential backup. Some people make difference between "incremental" backup and "differential" backup. Here for dar, they look like the same, it just depends on the nature of the reference backup you take.

Of course, a given backup can be used as reference for several differential backup, there is no limitation in number nor in nature (the reference can be a full of differential backup).


STEP 4: RECOVER AFTER A DISK CRASH

Sorry, it arrived, your old disk has crashed. OK, you are happy because you have now a good argument to buy the very fast and very enormous very lastest hard disk available. Usually, you also cry because you have lost data and you will have to reinstall all your system, that was working for so long!

If however the last backup you made is recent, then keep smiling! OK, you have installed your new hard disk and configured you BIOS to it (if necessary). You will need a bootable floppy, with a minimum Linux system on it, that allows you to access your zip drive and your new empty hard disk (in the case your backup resided on ZIP disk). For example use the Slackware floppy disks, they are nicely done. You don't need to install something on your brand-new disk, just make partitions and format as you want: We suppose your new disk is /dev/hda and /dev/sga is your ZIP drive.

1. Create the partition table as you wish, using
fdisk /dev/hda

2. Format the partition which will receive your data, dar is filesystem independent, you can use ext2 (as here in the example), ext3, ext4, ReiserFS, Minix, UFS, HFS Plus, XFS, whatever is the Unix-like filesystem you need, even if the backed up data did not reside on such filesystem at backup time!
mke2fs /dev/hda1

3. Additionally format the swap partition (if needed)
mkswap -c /dev/hda2

3bis. If you have a lot of file to restore, you can activate the swap on the partition of your new hard drive:
swapon /dev/hda2

4. Now we must mount the hard disk, somewhere.

cd /
mkdir disk
mount -t ext2 /dev/hda1 /disk

would do the trick

4bis. If you want to restore your system over several partitions like /usr /var /home and / you must create the partitions, format them. Then create the directories that will be used as mounting point an mount the partitions on these directories:

mkdir /disk/usr /disk/var /disk/home
mount -t ext2 /dev/hda2 /disk/usr
mount -t ext2 /dev/hda3 /disk/var
mount -t ext2 /dev/hda4 /disk/home

for example if you have / , /usr , /var and /home partitions.

5. We need to copy the dar binary from the ZIP to your disk: insert the floppy ZIP containing the dar_static binary to be able to freely change of ZIP disk later on:

cd /

mkdir /zip
mount -t ext2 /dev/sga /zip
cp /zip/dar_static /disk

where /dev/sga points to your zip drive, we will remove dar_static from your new hard drive at the end of restoration.

6. Now we can restore the archive. The stuff has to go in /disk subdirectory
*** here comes the option: -R /disk

7. The process may be long, thus it might be useful to be noticed when a user action is required by dar.
*** here comes the option: -b
note that -p option is not required here because if a slice is missing dar will pause and ask you its number. If slice "0" is requested, it means the "last" slice of the backup.

let's go restoring!
/disk/dar_static -x /zip/linux_full -R /disk -b

... and when the next zip floppy is needed,
umount /zip

change the floppy and mount it:
mount -t ext2 /dev/sga /zip

as previously, use an alter xterm / virtual console or suspend dar by CTRL-Z and awake it back by the 'fg' command. Then press <enter> to proceed with dar


7. Once finished with the restoration of linux_full, we have to do the same with any following differential/incremental backup. However, doing so will warn you any time dar restores a more recent file (file overwriting) or any time a file that has been removed since the backup of reference, has to be removed from file system (suppression). If you don't want to press the <enter> key several thousand times:
*** here comes the option: -w
(don't warn). All file will be overwritten without warning, but you may also use the -r option, that will avoid trying to overwrite more recent files than those on filesystem. It might not be of a great use here, as you restore a differential backup after its reference backup on an initially empty disk (file stored in the differential archive are more recent than those in the reference). But, it might be useful in some other situations.

All together it makes:
/disk/dar_static -x /zip/linux_diff1 -R /disk -b -w

Then any additional archive:
/disk/dar_static -x /zip/linux_diff2 -R /disk -b -w
...
/disk/dar_static -x /zip/linux...    -R /disk -b -w


8. Finally, remove the dar binary from the disk:
rm /disk/dar_static

9. And launch lilo for your Linux box to boot properly (if needed):
lilo -r /disk

If your boot loader is grub, simply launch grub and type at the prompt something like this:

grub> root (hd0,0)

10. You can reboot you machine and be happy with you brand-new hard disk with your old precious data on it:
shutdown -r now

OK, one day, I will make something like a bootable floppy image with dar inside, maybe with a simple script for user interaction... if you have already done it, you can tell or send me (or give me the URL where to get it, for I add a link to it from DAR's Homepage). [Note: Knoppix seems to include dar]

STEP 4(bis): recover only some files

Gosh, you have remove a important file by error. Thus, you just need to restore it, not the rest of the full and differential backups.

a) First method:

We could as previously, try all archive starting from the full backup up to the most recent differential backup, and restore just the file if it is present in the archive:

dar -R / -x /zip/linux_full -g home/denis/my_precious_file

This would restore only the file /home/denis/my_precious_file from the full backup.

OK, now we would also have to restore from all differential backup the same way we did. Of course, this file may have changed since the full backup.

dar -R / -x /zip/linux_diff1 -g home/denis/my_precious_file

and so on, up to the last differential archive.

dar -R / -x /zip/linux_diff29 -g home/denis/my_precious_file


b) Second method (more efficient):

We will restore our lost file, starting from the most recent differential backup and *maybe* up to the full backup. Our file may or may not be present in the a differential archive as it may have changed or not since the previous version, thus we have to check if our file is restored, using the -v option (verbose):

dar -R / -x /zip/linux_diff29 -v -g home/denis/my_precious_file

If we can see a line like

restoring file: /home/denis/my_precious_file

Then we stops here, because we got the most recent backup version of our lost file. Otherwise we have to continue with the previous differential backup, up to the full backup if necessary. This method has an advantage over the first one, which is not to have *in all case* the need to use all the backup done since the full backup.

If you are lazy (as I am) on the other hand, have a look at dar_manager (at the end of the tutorial)

OK, now you have two files to restore. No problem, just do the second method but add -r option not to override any more recent file already restored in a previous step:

dar -x /zip/linux_diff29 -R / -r -v -g home/denis/my_precious_file -g etc/fstab

Check the output to see if one or both of your files got restored. If not, continue with the previous backup, up to the time you have seen for each file a line indicating it has been restored. Note that the most recent version of each files may not be located in the same archive, thus you might get /etc/fstab restored from linux_diff28, and /home/denis/my_precious_file restored at linux_diff27. In the case /etc/fstab is also present in linux_diff27 it would not have been overwritten by an older version, thanks to the -r option.

This option is very important when restoring more than one file using the second method. Instead, in the first method is used (restoring first from the full backup, then from all the following differential backups), -r option is not so important because if overwriting occurs when you restore lost files, you would only overwrite an older version by a newer.

Same thing here, even if you are not lazy, dar_manager can help you a lots here to automate the restoration of a set of file.

ISOLATING A CATALOGUE

We have seen previously how to do differential backups. Doing so, dar asks the last slice of the archive of reference. This operation is required to read the table of contents (also known as "catalogue" [this is a French word that means "catalog" in English, I will keep this French word in the following because it is also the name of the C++ class used in libdar]) which is located at the end of the archive (thus on the last slice(s)). You have the possibility to isolate (that's it to extract) a copy of this table of content to a small file. This small file is quite exactly the same as a differential archive that holds no data in it. Let's take an example with the full backup we did previously to see how to extract a catalogue:

    dar -C /root/CAT_linux_full -A /mnt/zip/linux_full


Note here that we used the UPPERCASE 'C' letter, by opposition the the lowercase 'c' which is used for archive creation, here we just created an isolated catalogue, which is usually a small archive. In addition, you can use -z option to have it compressed, -s and -S option to have it split in slices, -p option, -b option, but for an isolated catalogue this is not often necessary as it is usually rather small. The only thing we have seen for backup that you will not be able to do for isolation is to filter files (-X, -I, -g, -P, -[ and -] option are not available for that operation).

So what, now we have our extracted catalogue, what can we do with it? Two things:

First, we can use the extracted catalogue in place of the archive, as reference for a differential backup. No need to manipulate the old zip disks, you can store the last's backup isolated catalogue on your hard disk instead. If we had used an isolated catalogue in the previous examples, we would have built our first differential backup this way (note that here we have chose to use the CAT_ prefix to indicate that the archive is an isolated catalogue, but the choice is yours to label isolated catalogue the way you want):

    dar -c linux_diff1 -A /root/CAT_linux_full ... (other options seen above stay the same)

Second, we can use the isolated catalogue as backup of the internal catalogue if it get corrupted. Well to face to data corruption the best solution ever invented is Parchive, an autonomous program that builds parity file (same mechanism as the one used for RAID disks) for a given file. Here we can use Parchive to create a parity file for each slice. So, assuming you lack Parchive, and that you failed reading the full backup because the zip disk is corrupted in the part used by the internal catalogue, you can use an isolated catalogue as rescue:

    dar -x linux_full -A /root/CAT_linux_full ...
    dar -d linux_full -A /root/CAT_linux_full ...
    dar -t linux_full -A /root/CAT_linux_full ...
    dar -l /root/CAT_linux_full

An isolated catalogue can be built for any type of archive (full, differential or incremental archive, even for an already isolated catalogue, which I admit is rather useless). You can also create an isolated catalogue at the same time you do a backup, thanks to the -@ option:

    dar -c linux_diff1 -A /mnt/zip/linux_full -@ CAT_linux_diff1 ... (other options...)
    dar -c linux_full -@ CAT_linux_full ... (other options see above stay the same for backup)

This is know as "on-fly" isolation.

DAR_MANAGER TUTORIAL

dar_manager builds a database of all your archive contents, to automatically restore the latest versions of a given set of files. Dar_manager is not targeted to the restoration a whole filesystem, the best ways to restore a whole filesystem has been described above and do not use dar_manager. So let's use dar_manager to restore a set of files. First, we have to create a "database" file :

dar_manager -C my_base.dmd

This created a file "my_base.dmd" where dmd stands for Dar Manager Database, but you are free to use any other extension.

This database is created empty. Each time you make a backup, may it be full or differential, you will have to add its table of contents (aka "catalogue") to this database using the  following command:

dar_manager -B my_base.dmd -A /mnt/zip/linux_full

This will add ("A" stands for "add") the archive contents to the base. In some cases you may not have the archive available but its extracted catalogue instead. Of course, you can use the extracted catalogue in place of the archive!

dar_manager -B my_base.dmd -A ~/Catalogues/CAT_linux_full

The problem however is that when dar_manager will need to recover a file located in this archive it will try to open the archive ~/Catalogue/CAT_linux_full for restoration, which does not contain any data because it is just the catalogue of the archive.

No problem if you made this mistake, thanks to the -b option we can change the basename of the archive, and thanks to the -p option you can change the path at any time. But first we will list the database contents:

dar_manager -B my_base.dmd -l

It shows the following:


dar path    :
dar options :

archive #   |    path      |    basename
------------+--------------+---------------
        1       /home/denis/Catalogues      CAT_linux_full

We should change the path of archive number 1 for dar_manager looks on the zip drive:

dar_manager -B my_base.dmd -p 1 /mnt/zip

and also replace the name of the extracted catalogue by the real archive name

dar_manager -B my_base.dmd -b 1 linux_full

Now we have exactly the same database as if we had use the real archive instead of its catalogue:

dar_manager -B my_base.dmd -l


dar path    :
dar options :

archive #   |    path      |    basename
------------+--------------+---------------
        1       /mnt/zip     linux_full


In place of using -b and -p options, you can also tell the path and the name of the real archive to use at restoration time this way when you add the catalogue to the database:

dar_manager -B my_base.dmd -A ~/Catalogues/CAT_linux_full /mnt/zip/linux_full

This is done adding an optional argument. The first ~/Catalogue... is the archive where to read the catalogue from, and the second /mnt/zip... is the name to keep for it. No access is done to this second archive at the time of the addition, thus it may stay unavailable at the time the command is typed.

You can add up to 65534 archives to a given database, and have as much base as you want.

Note that we did not yet gave important options in the database to be passed to dar. For example, you will likely restore from the root of your filesystem, therefor dar when called from dar_manager must get the "-R /" option. This is done with:

dar_manager -B my_base.dmd -o -R /

All that follows -o is passed to dar as-is. You can see the options passed to dar when listing the database contents (-l option).

Let's suppose that after each backup you took the time to update your database, and now you just have removed an important file by error.


Now, we can restore our /home/denis/my/precious/file :

dar_manager -B my_base.dmd -r home/denis/my/precious/file

dar_manager will find the proper archive to use, and call dar with the following options:

dar -x <archive> -R / home/denis/my/precious/file

Which in turn will ask you the corresponding slices. If you want to restore more files at a time or even a directory tree, you can add several arguments after -r option of dar_manager:

dar_manager -B my_base.dmd -r home/denis/my/precious/file etc/fstab home/joe

Once an archive become obsolete you can delete it from the database thanks to the -D option, you can also change archive order (-m option), get a list in which is located a given file (-f option), get the list of most recent files in a given archive (-u option), and get overall statistics per archive (-s option). Lastly you can specify which dar command to use given its path (-d option), by default, dar_manager uses the PATH shell variable to choose the dar command.

A new feature for those that are really very lazy: dar_manager has an interactive mode, so you don't have to remeber all these command-line switch except one:

dar_manager -B my_base.dmd -i

Interactive mode allow you to do all operation except restoration which can be done as previously explained.

TO GO FURTHER WITH DAR/LIBDAR

Well, we have reached the end of this tutorial, but dar/libdar has still a lot of features to be discovered:
- strong encryption
- archive merging
- decremental backup
- dar command-line files (DCF)
- user commands between slices (and DUC files)
- Extended Attribute manipulations
- hard links
- Sparse files
- remote backup over ssh
- suspending/resuming a database from dar before/after backing it up
- using regex in place of glob expressions in masks
- using dar with tape thanks to the sequential reading mode
- having dar adding padded zeros to slice numbers
- excluding some files from compression
- asking dar to retry saving a file if it changes a the time of the backup
- what is a "dirty" files in a dar archive
- listing an archive contents under XML format
- using conditional syntax in DCF files
- using user targets
- adding user comments in dar archive
- using DAR_DCF_PATH and DAR_DUC_PATH environment variables

all this is described in much details in the following documents:
FAQ , mini-howto, command-line usage notes, man pages. You can find out more precisely where, using the feature description page. However if you find some thing unclear, feel free to report or ask for help on dar-support mailing-list.

Well, English is not my mother tong and I have not the pretention to perfectly speak or write it, while I do my best to produce something correctly written. Thus, if you find some weird english sentences, spelling or typo errors, feel free to send me your feedback. You can use dar-support mailing-list or contact me directly (read the AUTHOR file from the dar source package to find out how to contact me).

Denis Corbin



dar-2.4.8/doc/api_4_4.html0000644000175000017430000001717312010477201012073 00000000000000 DAR's FEATURES
Dar Documentation


Compatible API 4.4.x within libdar 5.0.x






The reasons for a new API


Libdar API (version 5.0.0) released with dar 2.4.0 provides a very different API compared to older releases. The main  reason for this major redesign of the API is that in the past, each new feature added in libdar broke the backward compatibility of the API, mainly because a new argument had to be added to a particular method of class archive.

The new API makes use of new "option" classes that all have a constructor without argument. Objects of theses classes carry all the current and future arguments used to manage dar archive through libdar API. This way, if a new option has to be added in the future, no change will be necessary to program that use the libdar API though an API version 5.x.x. The class option's constructor will set this new parameter to its default value, and a new method for that class will be added to manage this new parameter's value, available for programs that are aware of it.

Any program can then, at will, take into consideration the new options or simply ignore them (and use default values for them), while new features keep getting added to libdar release after release.

The old API is kept available beside the new one

But to ease the transition to this new API, beside the libdar namespace that contained in 2.3.x and older releases all the symbols for the API, and which now contain the symbols for the API 5.0.x can be found the libdar_4_4 namespace. As you guess it provides the API 4.4.x to libdar, which is the one used from release 2.3.5. However some small changes have still to be made for your program to compile and work with new libdar using the old API. This is the object of this document to describe them. This backward compatible API will probably disappear at next major release (which will probably be version 2.5.0. In the meanwhile, the API should still be available under versions 5.x.x and thus stay compatible with current new API).

Change to make to use the old API

  1. The first point is to no more include "libdar.hpp" but "libdar_4_4.hpp" instead. This file contains the libdar_4_4 namespace symbols.
  2. The second point, of course is to no more use the libdar namespace but instead the libdar_4_4 namespace.
The following table shows two code examples. On the left is placed the original code, while on the right is placed the modified code with changes in bold characters for the program to be used with old API of new libdar library.


Old program using Old libdar
Old program using libdar 5.0.x
#include "libdar.hpp"

using namespace libdar;

int example()
{
   archive *arch = new archive(....);
   [...]
}
#include "libdar_4_4.hpp"

using namespace libdar_4_4;

int example()
{
   archive *arch = new archive(....);
   [...]
}
#include "libdar.hpp"

int example()
{
   libdar::archive *arch = new libdar::archive(....);
   [...]
}
#include "libdar_4_4.hpp"

int example()
{
   libdar_4_4::archive *arch = new libdar_4_4::archive(....);
   [...]
}

Compilation and linking stay unchanged, thus running sed on your code using the following script-like code should do the trick:


for file in *.c *.h *.cpp *.hpp ; do
    mv "$file" "$file.bak"
    sed -r -e 's/libdar::/libdar_4_4::/g' -e 's/using namespace libdar/using namespace libdar_4_4/' -e 's|#include <dar/libdar.hpp>|#include <dar/libdar_4_4.hpp>|' "$file.bak" > "$file"
done

Want to try the new API instead ?

If you want to go one step further and instead of using the backward compatible API, directly use the new libdar API, the first step is to read the API Tutorial. Then, if more detailed information is required, check the API documentation. Finaly you can subscribe to libdar-api mailing-list for any problem, questions or suggestions about the API.




dar-2.4.8/doc/samples/0000755000175000017520000000000012023053205011477 500000000000000dar-2.4.8/doc/samples/PN_backup-storage.options0000644000175000017430000000103412003275573016352 00000000000000### Options that are appended to the dar command: # No warning when not run from a terminal -Q # Don't try to read darrc files -N # Be verbose (so everything can be logged) -v # No warn on overwrite (should not happen anyway) -w # Compression level -z1 # Keep empty directories as such, so they can be restored -D # Blowfish encryption -K bf:secretpassword # Directory to backup -R "/mnt/storage/" # Excludes (must be specified as relative paths to the directory # that is to be backed up) -P "backup" -P "tmp" -P "winhome" -P "ftp/cisco" dar-2.4.8/doc/samples/PN_backup-root.sh0000644000175000017430000000014512003275573014612 00000000000000#!/bin/bash dar -c "/mnt/storage/backup/root_$(date +%Y-%m-%d-%H%M%S)" -B /root/backup-root.options dar-2.4.8/doc/samples/Makefile.in0000644000175000017520000002651612023053152013477 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = doc/samples DIST_COMMON = README $(dist_noinst_DATA) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = DATA = $(dist_noinst_DATA) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ NO_EXE_SAMPLES = darrc_sample sample1.txt README automatic_backup.txt JH-readme.txt JH_dar_archiver.options JH_darrc cluster_digital_readme.txt index.html PN_backup-root.options PN_backup-storage.options Patrick_Nagel_Note.txt EXE_SAMPLES = cdbackup.sh pause_every_n_slice.duc automatic_backup dar_backup dar_rqck.bash JH-dar-make_user_backup.sh cluster_digital_backups.sh dar_par_create.duc dar_par_test.duc MyBackup.sh.tar.gz PN_backup-root.sh PN_backup-storage.sh PN_ftpbackup.sh dar_backups.sh available_space.duc dist_noinst_DATA = $(NO_EXE_SAMPLES) $(EXE_SAMPLES) dar_par.dcf etc_darrc all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/samples/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu doc/samples/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags: TAGS TAGS: ctags: CTAGS CTAGS: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-local .MAKE: install-am install-data-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ distclean distclean-generic distclean-libtool distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-data-hook install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ uninstall uninstall-am uninstall-local install-data-hook: $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/samples sed -e "s%SOMEPATH%$(pkgdatadir)/samples%g" dar_par.dcf > $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf chmod 0644 $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf $(INSTALL) -m 0644 $(NO_EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -m 0755 $(EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -d $(DESTDIR)$(sysconfdir) sed -e "s%SOMEPATH%$(pkgdatadir)/samples%g" etc_darrc > $(DESTDIR)$(sysconfdir)/darrc uninstall-local: rm -rf $(DESTDIR)$(pkgdatadir)/samples # $(sysconfdir)/darrc not removed as it may contain system admin specific configuration # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/doc/samples/darrc_sample0000644000175000017430000000146512003275573014020 00000000000000###### # this is an example of what could be a batch file # (given to -B option), a /etc/darrc and a $HOME/.darrc file # reminds that it is a simple example... # all: # make terminal bell when user action is requested -b create: # a list of file to not try to compress -X "*_all_*.*.dar" -X "*_diff_*.*.dar" -X "*_inc_*.*.dar" -Z "*.mpg" -Z "*.MPG" -Z "*.jpg" -Z "*.JPG" -Z "*.gz" -Z "*.tgz" -Z "*.bz2" -Z "*.tbz" -Z "*.mp3" -Z "*.mpeg" -Z "*.zip" -Z "*.dar" # create empty dir for excluded directories -D -R / # we don't save these directories -P tmp -P var/tmp -P mnt -P proc -P dev/pts # here we say we don't want to save dar files -X "*.*.dar" # we pause before starting a new slices -p # and we use gzip compression -z default: # if no action is given then show the version # in place of the usage help -V dar-2.4.8/doc/samples/etc_darrc0000644000175000017430000000415012010477201013272 00000000000000############################################################# # This is the default system wide configuration file for dar # # It is used automatically by dar unless you define a .darrc in # your home directory or use -N option on command-line. # You can continue using this default file even if you use your # own .darrc file, by including the following in it: # # -B /etc/darrc # # To not force anyone to use a given option, each option set # is defined under a specific user target. Each user target can # be used on command-line to activate the set of option it defines. # For example, using the word "par2" on command-line will activate # all options following the "par2:" keyword below. # we are using short options here because long options may not be # available everywhere. par2: -B "SOMEPATH/dar_par.dcf" compress-exclusion: # here we define some files that have not to be compressed. # First setting case insentive mode on: -an # Then telling dar that the following masks are glob expression # which is the default, right, but if sooner on command-line the # user swapped to regex, the following mask would not work as expected # any more, so we force back to glob expression in any case: -ag # Now follows all the file specification to never try to compress: # compressed video format -Z "*.mpg" -Z "*.avi" -Z "*.flac" -Z "*.cr2" -Z "*.vob" # compressed picture format -Z "*.jpg" -Z "*.jpeg" -Z "*.mpeg" -Z "*.png" # compressed audio format -Z "*.mp3" -Z "*.ogg" # compressed package -Z "*.deb" -Z "*.tgz" -Z "*.tbz2" -Z "*.rpm" -Z "*.xpi" -Z "*.run" -Z "*.sis" # other compressed data -Z "*.gz" -Z "*.Z" -Z "*.bz2" -Z "*.zip" -Z "*.jar" -Z "*.rar" # dar archives (may be compressed) -Z "*.dar" # Now we swap back to case sensitive mode for masks which is the default # mode: -acase verbose: -v -vs no-emacs-backup: -ag -X "*~" -X ".*~" samba: -H 1 # samba filesystem need this to properly report date # and not lead dar to resave all files when changing # from summer to winter time and viceversa. dry-run: -e # well, the name of the option is not intuitive, I admit, # but letter d was already used ... bell: -b dar-2.4.8/doc/samples/PN_ftpbackup.sh0000644000175000017430000000744212003275573014352 00000000000000#!/bin/bash # ftpbackup.sh - Version 1.1 - 2006-01-09 - Patrick Nagel # Carry out backups automatically and put the resulting # archive onto a backup FTP server. Mail the result to # root. # # Dependencies: ncftp # Change this to your needs ########################### PASSWORDFILE="/root/ftpbackup.credentials" # $PASSWORDFILE must look like this (and should # of course only be readable for the user who # executes the script): # ----------------------------- # |USER="username" | # |PASS="password" | # |SERVER="hostname.of.server"| # ----------------------------- LOGFILE="/root/ftpbackup.log" # The logfile will be gzipped and be available # as $LOGFILE.gz after the script exits. NUMBEROFBACKUPS=2 # How many different backups should this script # carry out? BACKUPCOMMAND[1]="/root/backup-root.sh" # Backup command which carries out 1st backup. # Each backup command must create exactly ONE # archive file. BACKUPCOMMAND[2]="/root/backup-storage.sh" # Backup command which carries out 2nd backup. BACKUPCOMMAND[n]="" # Backup command which carries out nth backup. LOCALBACKUPDIR="/mnt/storage/backup" # This is where the backup archive (must be ONE # FILE!) will be stored by the $BACKUPCOMMAND[x] # program. MOUNTPOINT="/mnt/storage" # The mountpoint of the partition where the # backup archives will be stored on. # For free space statistics. BACKUPFTPQUOTA=42949672960 # Backup FTP server quota or total storage amount # (in bytes). ####################################################### # Initial variables and checks which ncftp &>/dev/null || { echo "Missing ncftp, which is a dependency of this script."; exit 1; } STARTTIME="$(date +%T)" # Functions function backup_to_ftp_start() { ncftpbatch -D return } function backup_to_ftp_queue() { # Puts newest file in ${LOCALBACKUPDIR} to the backup FTP server. source ${PASSWORDFILE} BACKUPFILE="${LOCALBACKUPDIR}/$(ls -t -1 ${LOCALBACKUPDIR} | head -n 1)" ncftpput -bb -u ${USER} -p ${PASS} ${SERVER} / ${BACKUPFILE} return } function backup_local_used() { du -bs ${LOCALBACKUPDIR} | awk '{printf($1)}' return } function backup_local_free() { df -B 1 --sync ${MOUNTPOINT} | tail -n 1 | awk '{printf($4)}' return } function backup_ftp_used() { source ${PASSWORDFILE} ncftpls -l -u ${USER} -p ${PASS} ftp://${SERVER} | grep -- '^-' | echo -n $(($(awk '{printf("%i+", $5)}'; echo "0"))) return } function backup_ftp_free() { echo -n $((${BACKUPFTPQUOTA} - $(backup_ftp_used))) return } function backup_success() { { echo -en "Backup succeeded.\n\nBackup started at ${STARTTIME} and ended at $(date +%T).\n\n" echo -en "Statistics after backup (all numbers in bytes):\n" echo -en "Used on Backup-FTP: $(backup_ftp_used)\n" echo -en "Free on Backup-FTP: $(backup_ftp_free)\n" echo -en "Used on local backup directory: $(backup_local_used)\n" echo -en "Free on local backup directory: $(backup_local_free)\n" } | mail -s "Backup succeeded" root return } function backup_failure_exit() { { echo -en "Backup failed!\n\nBackup started at ${STARTTIME} and ended at $(date +%T).\n\n" echo -en "Statistics after backup failure (all numbers in bytes):\n" echo -en "Used on Backup-FTP: $(backup_ftp_used)\n" echo -en "Free on Backup-FTP: $(backup_ftp_free)\n" echo -en "Used on local backup directory: $(backup_local_used)\n" echo -en "Free on local backup directory: $(backup_local_free)\n" } | mail -s "Backup FAILED" root gzip -f ${LOGFILE} exit 1 } # Main rm -f ${LOGFILE} # In case the script has been aborted before { for ((i=1; i<=${NUMBEROFBACKUPS}; i+=1)); do ${BACKUPCOMMAND[$i]} >>${LOGFILE} 2>&1 && backup_to_ftp_queue >>${LOGFILE} 2>&1 done && \ backup_to_ftp_start >>${LOGFILE} 2>&1 && \ backup_success } || backup_failure_exit gzip -f ${LOGFILE} dar-2.4.8/doc/samples/Patrick_Nagel_Note.txt0000644000175000017430000000237612010477201015662 00000000000000Follows a copy from Patrick Nagel site at http://www.patrick-nagel.net/scripts/ftpbackup ----------------------------------------------- I wrote ftpbackup.sh to conveniently backup my root server. My root server provider offers a 40 GB FTP storage, where I can store backup archives. To put them on there by hand was a bit of a hassle, so I wrote this little script. It calls my backup scripts (namely backup-root.sh and backup-storage.sh) which both create a .dar file that contains the whole backup. This .dar file is then being sent to the provider's backup FTP server. After everything is done, a mail is sent to root which informs about successful completion or failure, and the used/free space on the FTP as well as on the local backup partition. Configuration is done in the script, everything is explained there. The two scripts backup-root.sh and backup-storage.sh are two examples how to create the backups. I'm using these scripts for quite some time, and also did two full recoveries without any problems. backup-root.sh includes backup-root.options and backup-storage.sh includes backup-storage.options through dar's "-B" option. All options in those .options files are documented, so it should be easy for anybody to understand what the script does, and how. dar-2.4.8/doc/samples/index.html0000644000175000017430000002564012010477201013425 00000000000000 Dar - Scripts and Examples
Dar Documentation


Scripts and Examples





In this page you can find several script and configuration files, that have been sent by dar users. They should all work, if some do not, see them as illustration or examples that let you have a base to your own configuration scripts.

You will here both DUC files (Dar User Commands) that can be launched from dar thanks to its -E or -F options as well as scripts from where dar is launched:


Description
Author
Type
Download
 script that use dar to make full or differential backup to CDR
stef at hardco.de
Script
cdbackup.sh
sample /etc/darrc or ~/.darrc file
"(me)"
DCF
darrc_sample
 for those who like to learn with examples (a rich one) ;-)
Henrik Ingo
DCF
sample1.txt
script to create PAR redundancy data for protection against media corruption
Denis Corbin
DUC
dar_par_create.duc
 script to test and repair slice with redundancy data
Denis Corbin
DUC dar_par_test.duc
dar config files for dar_par_create and dar_par_test.duc

DCF
dar_par.dcf
pause every N slice instead of every  slice when creating an archive  [This is now obsolete as the -p option can now receive an argument to tell every how much slice to pause]
Denis Corbin
DCF
pause_every_n_slice.duc
automatic full/differential backup script, with automatic mounting unmount see comments inside automatic_backup.txt for more info, see also this documentation file (same Author).
Manuel Iglesias
Script
automatic_backup
perl script wrapping: dar+parchive+growisofs
Matthew Caron
Script
dar_backup
 bash script for Linux users to have raw estimation of the required amount of virtual memory to use to be able to save the whole system.
Bob Barry
Script
dar_rqck.bash
To save your home directory without worry (skip trash directory, make full or differential backup), all is explain by the author in this tiny document.
Jakub Holy
Script
DCF
DCF
JH-dar-make_user_backup.sh
JH_darrc
JH_dar_archiver.options
Local or remote backup script (using scp) to be launched from a cron, doing automatic decision whether the backup has to be full or incremental
Roi Rodriguez Mendez & Mauro Silvosa Rivera (Cluster Digital S.L.)
Script
cluster_digital_backups.sh
Shell script to backup to an FTP server Patrick Nagel Script

Note.txt
ftpbackup.sh
backup-root.sh
backup-root.options
backup-storage.sh
backup-storage.options

Enhanced version of the Script done by Roi and Mauro  (see cluster_digital_backups.sh) above Jason Lewis Script dar_backups.sh
A very complete script that:
  • can perform Logging
  • uses configuration files (see attached sample including usage comments)
  • can use Snapshots (if fs_root is on an LVM volume)
  • do DVD formatting
  • can write create a dar archive to DVD.
The design requires that each backup job fits on a single DVD, optionally writing a directory and contents to DVD. This allows:
  • copying system documentation to DVD for reference during system recovery
  • Writing dar_static to DVD for potential use during system recovery
  • Writing /etc/lvm and contents to DVD for potential use during
  • system recovery
  • Options to restart failed DVD operations by skipping to DVD
  • writing and to DVD verification
  • Extensive error trapping
The script contains itself a very detailed user information.
Charles
Script
MyBackup.sh.tar.gz
A shell script to replace -p option when one need to pause before dar lacks space to add a new slice on the disk. This may be of some use when using support of different sizes to store a given archive. You then need to  choose the size of slices (-s option) as the biggest common divisor of all slice sizes to let dar handle this situation quite nicely.
Denis Corbin
DUC available_space.duc


dar-2.4.8/doc/samples/dar_rqck.bash0000644000175000017430000000111712003275573014060 00000000000000#!/bin/bash MT=$(sed '/^MemTotal/!d;s/.* //' /proc/meminfo) echo -e "\n\tyou have $MT total memory" ST=$(sed '/^SwapTotal/!d;s/.* //' /proc/meminfo) echo -e "\n\tyou have $ST total swap" P=$(mount | sed '/^none/d' | awk '{print $3}') for p in $P do fc=$(find $p -xdev \ -path '/tmp' -prune -o \ -path '/var/tmp' -prune -o \ -print | wc -l) echo -e "\n\tpartition \"$p\" contains $fc files" (( iioh = ($fc * 1300)/1024 )) echo -e "\tdar differential backup with infinint requires $iioh kB memory" done echo # /proc and /sys (and /dev if it's udev) are excluded by "-xdev" dar-2.4.8/doc/samples/pause_every_n_slice.duc0000755000175000017430000000200112010477201016135 00000000000000#!/bin/sh ############################################################################ # WARNING: this script is now obsolete, due to the -p option new feature # that provide the same behavior. You can still use this script or tune it # for your own need if you like, this is why it is kept present here as sample ############################################################################ # This script is to be launched on dar command line when creating an archive # with -s option (slicing), in place of -p option (pause every slice done) # # -E "pause_every_n_slice.duc %p %b %n %e %c N" # # will make dar pause when slice N will be done, when slice 2*N, 3*N etc. # will be done. # if [ "$1" = "" -a "$2" = "" -a "$3" = "" -a "$4" = "" -a "$6" = "" ]; then echo "usage: $0 " exit 1 fi toto=$(( $3%$6 )) if [ $toto -eq 0 ] ; then echo "Pausing after slice $3" echo "Press return to continue" read junk fi dar-2.4.8/doc/samples/automatic_backup0000644000175000017430000007710612003275573014704 00000000000000#Written by Manuel Iglesias. glesialo@tiscali.es #Notes: SystemDirectory=/sbin # This file should be copied (by CopySystemFiles) to its corresponding Directory (see above). # Exit codes at the end of this file. CommandName=`basename $0` ######################################################### # BACKUP SETUP. BEGIN. Read Dar Doc before modification. ######################################################### # Permissions. ################## # Allow use only in run level 1. CheckRunLevel=false # # Allow use only by root (Super user). CheckUser=true # ######################################################### # Paths and files. ################## # Directories. ######### # Backup files Directory: Absolute path (Should start with '/'!!). Don't end it with '/' unless it is '/'. DestinationDir=/store/.Store/Backup # # Origin of Backup/Restore Directory: Absolute path (Should start with '/'!!). # Don't end it with '/' unless it is '/'. OriginDir=/ # # Directories to backup. Relative to Origin of Backup Dir! Empty means: all dirs # (Except those in Directories to ignore. See below.). Separate with spaces. SubDirsToBackup="root home" # # Directories to ignore. Relative to Origin of Backup Dir! Separate with spaces. SubDirsToIgnore="home/manolo2 home/manolo/documents/Secret */.Trash* .Trash*\ */.mozilla/*/[Cc]ache */.opera/[Cc]ache* */.pan/*/[Cc]ache */.thumbnails" # # DestinationDir will be automatically included in SubDirsToIgnore if DestinationDir is a subdirectory # of OriginDir. If you want to include the base (IE.: Temp if DestinationDir: OriginDir/Temp/Backup) of # DestinationDir instead, set constant IgnoreBaseOfDestinationDir to true. Value (true | false). IgnoreBaseOfDestinationDir=true # # File systems that should be mounted for a correct backup. If any of them has to be mounted, # it will be umounted before this shellscript exits. Please mind mounting order!! # Absolute path (Should start with '/'!!). Separate with spaces. DirsToMount="/home /home/common /store" # ################## # Files. ######### # Files to backup. Empty: all files (Except those in Files to ignore. See below.). # No Path. Separate with spaces. FilesToBackup="" # # Files that should not be included in backup. No Path. Separate with spaces. FilesToIgnore="*~ .*~ cryptfile0.crypt cryptfile1.crypt" # # Files that should not to be compressed. No Path. Separate with spaces. FilesNotToCompress="*.dar *.crypt *.arj *.bz2 *.bz *.Z *.tgz *.taz *.cpio *.deb\ *.gtar *.gz *.lzh *.lhz *.rar *.rpm *.shar *.sv4cpi *.sv4crc *.tar *.ustar *.zoo\ *.zip *.jar *.jpg *.gif *.mpg *.mpeg *.avi *.ram *.rm" # ######################################################### # Parameters used to choose Differential Backup level. ################## BlockSize=1024 # # When Diffbackup > (MaxDiffPercentOfFullBackup% of FullBackup): New FullBackup recommended. MaxDiffPercentOfFullBackup=30 # # When Diffbackup < (MinDiffPercentOfFullBackup% of FullBackup): Rewrite first DiffBackup recommended. MinDiffPercentOfFullBackup=3 # # Max 99. If (Nr of DiffBackups) > MaxNrOfDiffBackups: Rewrite first DiffBackup recommended. MaxNrOfDiffBackups=20 # ######################################################### # Dar settings and options. ################## #Used dar suite program names. DarManagerName=dar_manager DarName=dar # # Directory where dar usually resides. Absolute path (Should start with '/'!!). Don't end it with '/'. DarDir=/usr/local/bin # # Create empty sub-directories in backup instead of those not saved. Value (true | false). BackupIgnoredDirsEmpty=true # # CompressWithBZip2=false -> no compression. Value (true | false). CompressWithBZip2=true # # Compress Files > 100Mb. Only valid if CompressWithBZip2=true. Value (true | false). CompressBigFiles=true # # Value (true | false). VerboseMode=false # # Value (true | false). MakeSlices=true # # StopAfterSlices: Only valid if MakeSlices=true. Value (true | false). StopAfterSlices=false # # SizeOfDarStatic: dar_static + DocFiles + Restore shell + etc (To calculate first slize size). SizeOfDarStatic=4 # SliceSize=650 # ######################################################### # BACKUP SETUP. END. Read Dar Doc before modification. ######################################################### ######################################################### # SUBROUTINES. BEGIN. ######################################################### echoE() { # echo to standard error. Remove leading/trailing blanks and double spaces. echo $* 1>&2 return 0 } Usage() { echoE "$CommandName creates (Using '$DarName'), in directory" echoE "'$DestinationDir'," echoE "a backup of all files and directories in" echoE "'$OriginDir'." echoE "It analyzes current backup files and recommends the most suitable new" echoE "backup level to the user. It also creates/updates a database with backup" echoE "information for future Backup management (Using '$DarManagerName')." echoE echoE "The backup will be split in files of $SliceSize Mb to fit in removable media." echoE echoE "Usage: $CommandName. (User can choose backup level)." echoE "or" echoE "Usage: $CommandName -auto. ($CommandName selects backup level automatically)." echoE return 0 } UmountDirs () { if [ "$DirsToUMount" != "" ] then echo "############" echo "$CommandName: Unmounting file systems:" for i in $DirsToUMount do mount | grep -w $i &> /dev/null if [ $? -eq 0 ] then if (umount $i &> /dev/null) then echo "$CommandName: $i unmounted." else echoE "$CommandName: $i could not be unmounted." fi else echo "$CommandName: $i was already unmounted." fi done fi echo "############" return 0 } TwoDigits () { #Add leftmost 0 if [ $1 -lt 10 ] then echo 0$1 else echo $1 fi return 0 } Stream() { # Output String(s) without letting the Shell interpret metacharacters. # Remove leading/trailing blanks and double spaces. # Enclose arguments in "" when calling. I.E.: Stream "$Var1 $Var2" TempStr=$@ Length=${#TempStr} if [ $Length -eq 0 ] then return else CharNum=0 while [ $CharNum -lt $Length ] do echo -n "${TempStr:$CharNum:1}" let CharNum++ done echo fi return } ######################################################### # SUBROUTINES. END. ######################################################### NoUserChoice=false if [ $# -ne 0 ] then if [ "$1" == "-auto" ] then NoUserChoice=true else Usage exit 1 fi fi if $CheckRunLevel then RunLevel=`runlevel | sed 's/.* //'` if [ $RunLevel != S ] then echoE "$CommandName: RunLevel: $RunLevel. Please change to RunLevel 1 (init 1) and try again." exit 1 fi fi if $CheckUser then CurrentUser=`whoami` if [ "$CurrentUser" != "root" ] then echoE "$CommandName: User: '$CurrentUser'. Please login as 'root' and try again." exit 1 fi fi echo "############" DirsToUMount="" if [ "$DirsToMount" != "" ] then echo "$CommandName: Mounting file systems:" for i in $DirsToMount do mount | grep -w $i &> /dev/null if [ $? -ne 0 ] then if (mount $i &> /dev/null) then echo "$CommandName: $i mounted." DirsToUMount=" $i"$DirsToUMount else echoE "$CommandName: $i could not be mounted. Aborting." UmountDirs exit 2 fi else echo "$CommandName: $i was already mounted." fi done echo "############" fi if [ "$OriginDir" != "/" ] then # if first character is not '/'. if [ "${OriginDir:0:1}" != "/" ] then echoE "$CommandName: 'Origin' directory:" echoE "$CommandName: $OriginDir." echoE "$CommandName: Must be an absolute path (Should start with '/'!)." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else # if last character is '/'. if [ "${OriginDir:${#OriginDir}-1:1}" == "/" ] then echoE "$CommandName: 'Origin' directory:" echoE "$CommandName: $OriginDir." echoE "$CommandName: Should not end with '/'!." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else if test ! -d $OriginDir then echoE "$CommandName: 'Origin' directory:" echoE "$CommandName: $OriginDir." echoE "$CommandName: Does not exist. Please edit '$CommandName' and try again." UmountDirs exit 3 fi fi fi fi if [ "$DestinationDir" != "/" ] then # if first character is not '/'. if [ "${DestinationDir:0:1}" != "/" ] then echoE "$CommandName: 'DestinationDir' directory:" echoE "$CommandName: $DestinationDir." echoE "$CommandName: Must be an absolute path (Should start with '/'!)." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else # if last character is '/'. if [ "${DestinationDir:${#DestinationDir}-1:1}" == "/" ] then echoE "$CommandName: 'DestinationDir' directory:" echoE "$CommandName: $DestinationDir." echoE "$CommandName: Should not end with '/'!." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else if test ! -d $DestinationDir then echoE "$CommandName: 'DestinationDir' directory:" echoE "$CommandName: $DestinationDir." echoE "$CommandName: Does not exist. Please edit '$CommandName' and try again." UmountDirs exit 3 fi fi fi fi if [ $OriginDir == $DestinationDir ] then echoE "$CommandName: 'DestinationDir' and 'OriginDir' can not be the same directory!" echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 fi # Find dar & dar_manager if type >/dev/null 2>&1 $DarName then DarFound=true else DarFound=false fi if type >/dev/null 2>&1 $DarManagerName then DarManagerFound=true else DarManagerFound=false fi if ! ($DarFound && $DarManagerFound) then if [ "$DarDir" != "/" ] then # if first character is not '/'. if [ "${DarDir:0:1}" != "/" ] then echoE "$CommandName: 'DarDir' directory:" echoE "$CommandName: $DarDir." echoE "$CommandName: Must be an absolute path (Should start with '/'!)." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else # if last character is '/'. if [ "${DarDir:${#DarDir}-1:1}" == "/" ] then echoE "$CommandName: 'DarDir' directory:" echoE "$CommandName: $DarDir." echoE "$CommandName: Should not end with '/'!." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else if test ! -d $DarDir then echoE "$CommandName: 'DarDir' directory:" echoE "$CommandName: $DarDir." echoE "$CommandName: Does not exist. Please edit '$CommandName' and try again." UmountDirs exit 3 fi fi fi fi # Include directory, where dar usually resides, in PATH." # DarDir not in PATH? echo $PATH | grep $DarDir &> /dev/null if [ $? -ne 0 ] then PATH=$DarDir":"$PATH fi fi if ! type >/dev/null 2>&1 $DarName then echoE "$CommandName: $DarName neither in PATH nor in $DarDir. Aborting." UmountDirs exit 3 fi if ! type >/dev/null 2>&1 $DarManagerName then echoE "$CommandName: $DarManagerName neither in PATH nor in $DarDir. Aborting." UmountDirs exit 3 fi ######################################################### # VARIABLES INITIALIZATION. BEGIN. ######################################################### # Backup Paths. ############### #Backup base names & DataBase name. FullBackupBaseName=$CommandName"Full" DiffBackupBaseName=$CommandName"Diff" DataBaseName=$CommandName"DataBase" # FullBackupPath=$DestinationDir/$FullBackupBaseName DiffBackupPath=$DestinationDir/$DiffBackupBaseName DataBasePath=$DestinationDir/$DataBaseName # ######################################################### # Set dar options. ############### # Backup base name (Will be set later): -c PathBackUpBaseName BackupNameOption="-c " # # Reference backup (Will be set later) for differential backups: -A PathBackUpBaseName ReferenceBackupOption="-A " # # Origin of Backup: -R /. DarOptions="-R "$OriginDir # # Compress data inside the backup using bzip2: -y[CompressLevel]. # CompressLevel: 0 minimum; 9 maximun. Compress Files > 100Mb: -m 0. if $CompressWithBZip2 then DarOptions=$DarOptions" -y9" if $CompressBigFiles then DarOptions=$DarOptions" -m 0" fi fi # # Verbose mode: -v if $VerboseMode then DarOptions=$DarOptions" -v" fi # # Create empty sub-directories in backup instead of those not saved: -D if $BackupIgnoredDirsEmpty then DarOptions=$DarOptions" -D" fi # # Do not read ~/.darrc nor /etc/darrc configuration file: -N DarOptions=$DarOptions" -N" # ######################################################### #Set Slice options. ############### if [ $SliceSize -gt $SizeOfDarStatic ] then let FirstSliceSize=$SliceSize-$SizeOfDarStatic else FirstSliceSize=$SliceSize fi # # All sizes in Mb; Stop after each slize. if $MakeSlices then FirstSliceSizeOption="-S "$FirstSliceSize"M" SliceSizeOption="-s "$SliceSize"M" # Pause between slices to change removable media. Ring bell: -p -b if $StopAfterSlices then DarOptions=$DarOptions" -p -b" fi else FirstSliceSizeOption="" SliceSizeOption="" fi # ######################################################### #Set Include/Exclude Files Options. ############### # Files you don't want to backup: -X "*~" -X ".*~" if [ "$FilesToIgnore" != "" ] then InclExclFilesOption='-X "'`Stream "$FilesToIgnore" | sed 's/ /" -X "/g'`'"' else InclExclFilesOption="" fi # # Files you want to backup without compression: -Z "*.zip" if $CompressWithBZip2 then if [ "$FilesNotToCompress" != "" ] then InclExclFilesOption=$InclExclFilesOption' -Z "'`Stream "$FilesNotToCompress" | sed 's/ /" -Z "/g'`'"' fi fi # # Files to include in backup: -I "*.html". if [ "$FilesToBackup" != "" ] then InclExclFilesOption=' -I "'`Stream "$FilesToBackup" | sed 's/ /" -I "/g'`'" '$InclExclFilesOption fi # ######################################################### #Set Include/Exclude directories Options. ############### # $OriginDir in $DestinationDir? echo $DestinationDir | grep $OriginDir &> /dev/null if [ $? -eq 0 ] then # TempDir= $DestinationDir-$OriginDir TempDir=`echo $DestinationDir | sed s%$OriginDir%%` if $IgnoreBaseOfDestinationDir then # Include BaseDir of DestinationDir (Without first '/') in SubDirsToIgnore. # if first character, in TempDir, is not '/'. if [ "${DestinationDir:0:1}" != "/" ] then # Add '/' in front. TempDir="/"$TempDir fi TempPath=$TempDir while [ $TempPath != `dirname $TempPath` ] do BasePath=$TempPath TempPath=`dirname $TempPath` done BasePath=`basename $BasePath` if [ "$SubDirsToIgnore" != "" ] then SubDirsToIgnore=$SubDirsToIgnore" $BasePath" else SubDirsToIgnore=$BasePath fi else # Include DestinationDir (Without first '/') in SubDirsToIgnore. # if first character, in TempDir, is '/'. if [ "${TempDir:0:1}" == "/" ] then # Remove first '/'. TempDir=${TempDir:1:${#TempDir}-1} fi if [ "$SubDirsToIgnore" != "" ] then SubDirsToIgnore=$SubDirsToIgnore" $TempDir" else SubDirsToIgnore=$TempDir fi fi fi # # Sub-trees you must not save: -P dev/pts -P proc. Path must be relative to -R option # Enclose each directory in "" just in case there are metacharacters in the name. if [ "$SubDirsToIgnore" != "" ] then IncludeExclDirsOption='-P "'`Stream "$SubDirsToIgnore" | sed 's/ /" -P "/g'`'"' else IncludeExclDirsOption="" fi # # Sub-trees you must save: Add without any option in front. # Enclose each directory in "" just in case there are metacharacters in the name. if [ "$SubDirsToBackup" != "" ] then IncludeExclDirsOption='-g"'`Stream "$SubDirsToBackup" | sed 's/ /" -g "/g'`'" '$IncludeExclDirsOption fi # ######################################################### # Set dar_manager options. ############### # Create DataBase: -C PathBaseName CreateDataBaseOption="-C "$DataBasePath # # DataBase used as reference: -B PathBaseName DataBaseNameOption="-B "$DataBasePath # # Add Archive to DataBase (Will be set later): -A PathArchiveName AddToDataBaseOption="-A " # ######################################################### # VARIABLES INITIALIZATION. END. ######################################################### FullDiffBackupSize=`ls -1 -s --block-size=$BlockSize $FullBackupPath.* 2> /dev/null | awk '{s = s + $1} END {print s}'` if [ "$FullDiffBackupSize" == "" ] then FullDiffBackupSize=0 fi TotalDiffBackupSize=`ls -1 -s --block-size=$BlockSize $DiffBackupPath??.* 2> /dev/null | awk '{s = s + $1} END {print s}'` if [ "$TotalDiffBackupSize" == "" ] then TotalDiffBackupSize=0 fi echo "$CommandName: ### `date --rfc-822` ###" echo "$CommandName: Current backup information (Size in $BlockSize bytes blocks.):" if [ $FullDiffBackupSize -eq 0 ] then echo "$CommandName: No $FullBackupBaseName files found!" echo "############" echo "$CommandName: Preparing to Create $FullBackupBaseName." DiffBackupNr=0 LastDiffBackup=$DiffBackupNr else echo "$CommandName: ..$FullBackupBaseName: $FullDiffBackupSize." if [ $TotalDiffBackupSize -eq 0 ] then DiffBackupNr=1 LastDiffBackup=0 BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "############" echo "$CommandName: Preparing to Create $BaseName." else echo "$CommandName: ..$DiffBackupBaseName: $TotalDiffBackupSize:" DiffBackupNr=0 LastDiffBackup=$DiffBackupNr BestChoiceDiffLevel="" RemainingDiffSize=$TotalDiffBackupSize CurrentSize=1 while [ $CurrentSize -ne 0 ] do let DiffBackupNr++ BaseName=$DiffBackupPath`TwoDigits $DiffBackupNr` CurrentSize=`ls -1 -s --block-size=$BlockSize $BaseName.* 2> /dev/null | awk '{s = s + $1} END {print s}'` if [ "$CurrentSize" == "" ] then CurrentSize=0 fi if [ $CurrentSize -ne 0 ] then LastDiffBackup=$DiffBackupNr let RemainingDiffSize=$RemainingDiffSize-$CurrentSize if [ "$BestChoiceDiffLevel" == "" ] && [ $CurrentSize -lt $RemainingDiffSize ] then BestChoiceDiffLevel=$DiffBackupNr fi BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: ....$BaseName: $CurrentSize." fi done echo "############" let NextDiffBackup=$LastDiffBackup+1 if [ "$BestChoiceDiffLevel" == "" ] then BestChoiceDiffLevel=$NextDiffBackup fi Choice[4]="Exit $CommandName." let MinDiffBackupSize=$FullDiffBackupSize*$MinDiffPercentOfFullBackup/100 if [ $TotalDiffBackupSize -lt $MinDiffBackupSize ] then BestChoiceDiffLevel=1 Choice[1]=" ($DiffBackupBaseName<$MinDiffPercentOfFullBackup%$FullBackupBaseName)." fi if [ $LastDiffBackup -gt $MaxNrOfDiffBackups ] then BestChoiceDiffLevel=1 Choice[1]=${Choice[1]}" (NrOfDiffBackups>$MaxNrOfDiffBackups)." fi BaseName=$DiffBackupBaseName`TwoDigits $BestChoiceDiffLevel` Choice[1]=" $BaseName."${Choice[1]} BaseName=$DiffBackupBaseName`TwoDigits $NextDiffBackup` Choice[2]="Create $BaseName. Faster." Choice[3]="Rewrite $FullBackupBaseName ($DiffBackupBaseName>$MaxDiffPercentOfFullBackup%$FullBackupBaseName). Recommended!" let MaxDiffBackupSize=$FullDiffBackupSize*$MaxDiffPercentOfFullBackup/100 if [ $NextDiffBackup -eq $BestChoiceDiffLevel ] then if [ $TotalDiffBackupSize -gt $MaxDiffBackupSize ] then Choices="1 3" CreateRewriteMode="Create" Choice[1]=${Choice[1]}" Faster." else Choices="" fi else CreateRewriteMode="Rewrite" if [ $TotalDiffBackupSize -gt $MaxDiffBackupSize ] then Choices="1 2 3" else Choices="1 2" Choice[1]=${Choice[1]}" Recommended!" fi fi Choice[1]=$CreateRewriteMode${Choice[1]} if [ "$Choices" == "" ] then DiffBackupNr=$BestChoiceDiffLevel BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: Preparing to Create $BaseName." else Choices=$Choices" 4" echo "$CommandName: Options:" ChoiceNr=1 for i in $Choices do echo "$CommandName: $ChoiceNr.${Choice[$i]}" let ChoiceNr++ done echo "############" if $NoUserChoice then echo $Choices | grep "3" &> /dev/null if [ $? -eq 0 ] then Choice=3 else Choice=1 fi else let ChoiceNr-- ValidNumber=false until $ValidNumber do read -p "$CommandName: Please choose a number: " UserChoice case $UserChoice in [a-zA-Z-_.,]* | *[a-zA-Z-_.,] | *[a-zA-Z-_.,]*) echoE "$CommandName: No alpha characters allowed. Please try again.";; "") ;; *) ValidNumber=true;; esac if $ValidNumber then if [ $UserChoice -lt 1 ] || [ $UserChoice -gt $ChoiceNr ] then echoE "$CommandName: Allowed number range: 1..$ChoiceNr. Please try again." ValidNumber=false fi fi done ChoiceNr=0 for i in $Choices do let ChoiceNr++ if [ $ChoiceNr -eq $UserChoice ] then Choice=$i fi done echo "############" fi case $Choice in 1) DiffBackupNr=$BestChoiceDiffLevel BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: Preparing to $CreateRewriteMode $BaseName.";; 2) DiffBackupNr=$NextDiffBackup BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: Preparing to Create $BaseName.";; 3) echo "$CommandName: Preparing to Rewrite $FullBackupBaseName." DiffBackupNr=0;; 4) echoE "$CommandName: Program exits at user request." UmountDirs exit 4;; *) echoE "$CommandName:Warning: Incorrect choice: $Choice. Aborting." UmountDirs exit 5;; esac fi fi fi if [ $DiffBackupNr -eq 0 ] then FullBackup=true else FullBackup=false fi if $FullBackup then BackupNameOption=$BackupNameOption$FullBackupPath if [ $FullDiffBackupSize -ne 0 ] then echo "############" echo "$CommandName: Removing previous $FullBackupBaseName files." for i in $FullBackupPath.* do if test -f $i then if (rm $i) then echo "$CommandName: $i removed." else echoE "$CommandName:Warning: Failure to remove $i." fi fi done fi if [ $TotalDiffBackupSize -ne 0 ] then echo "############" echo "$CommandName: Removing previous $DiffBackupBaseName files." for i in $DiffBackupPath??.* do if test -f $i then if (rm $i) then echo "$CommandName: $i removed." else echoE "$CommandName:Warning: Failure to remove $i." fi fi done fi if test -f $DataBasePath then echo "############" echo "$CommandName: Removing previous $DataBaseName file." if (rm $DataBasePath) then echo "$CommandName: $DataBasePath removed." else echoE "$CommandName:Warning: Failure to remove $DataBasePath." fi fi echo "############" echo "$CommandName: creating $FullBackupBaseName. Please wait." echo "###" sh <&1 | awk 'END {print $1}'` case $LastArchiveInDB in [a-zA-Z-_.,]* | *[a-zA-Z-_.,] | *[a-zA-Z-_.,]*) # If DataBase empty last line produced by 'dar_manager -l' is full of '--' echoE "$CommandName: Warning: $DataBaseName is empty. Aborting." UmountDirs exit 6;; *) if [ $LastArchiveInDB -gt $DataBaseLastValidArchive ] then echo "############" echo "$CommandName: Erasing previous Differential backups from $DataBaseName." while [ $LastArchiveInDB -gt $DataBaseLastValidArchive ] do let ArchiveBaseName=$LastArchiveInDB-1 BaseName=`TwoDigits $ArchiveBaseName` ArchiveBaseName=$DiffBackupBaseName$BaseName if ($DarManagerName $DataBaseNameOption -D $LastArchiveInDB) then echo "$CommandName: Archive $LastArchiveInDB ($ArchiveBaseName) erased from $DataBaseName." else echoE "$CommandName: Warning: Erasing of Archive $LastArchiveInDB ($ArchiveBaseName)\ from $DataBaseName failed." fi let LastArchiveInDB-- done fi;; esac else echoE "$CommandName: Warning! $DataBaseName does not exist. Aborting." UmountDirs exit 7 fi echo "############" echo "$CommandName: creating $BackupName. Please wait." echo "###" sh <" #Reference archive name is the filename of the first slice without .number.dar #Dar will also search/ask for the last reference archive slice. #A plain catalogue file can also be used as an incremental reference. #backups everything starting from / (see DAR_PARAMS) to iso/rr cdrs #Archive slices are stored temporarily in ./ (see TDIR) and get deleted #if written successfully to cdr. #The first cdr will also contain the dar_static executable. #If anything goes wrong while trying to write to cdr, you can try again #or keep the current archive slice as a file in ./ (see TDIR). #For backing up to files only, simply accept the cdr write error and #answer with 'keep file' (or even better: use dar directly). #Slice size is for 700MB cdr blanks, see (and maybe change) DAR_PARAMS below. #For (slow!) compression, add a -y or -z parameter to DAR_PARAMS. #The archive slice file names are: #- for full backups: YYYY-MM-DD..dar #- for incrementals: YYYY-MM-DD_YYYY-MM-DD..dar # The second date is the name of the reference archive, so you can end # up with names like YYYY-MM-DD_YYYY-MM-DD_YYYY-MM-DD_YYYY-MM-DD.1.dar # for a four level stacked incremental backup. #Files which don't get backed up: (see DAR_PARAMS below) #- the slice files of the current archive #- the slice files of the reference archive #- files called "darswap" (for manually adding more swap space for incrementals) #- directory contents of /mnt, /cdrom, /proc, /dev/pts #hints: #- You need at least 700MB of free disk space in ./ (or in TDIR, if changed). #- For incrementals, you need about 1KB of memory per tested file. # Create a large file "darswap" and add this as additional swap space. #- If you are doing more than one backup per day, the filenames may interfere. #- Carefully read the dar man page as well as the excellent TUTORIAL and NOTES. #uncompressed, for 700MB cdr blanks: DAR_PARAMS="-s 699M -S 691M -R / -P dev/pts -P proc -P mnt -P cdrom -D" #temporary or target directory: TDIR="." #I'm using a USB CDR drive, so i don't know which 'scsi'-bus it is on. #Cdrecord -scanbus is grepped for the following string: DRIVENAME="PLEXTOR" #Also because of USB i have to limit drive speed: DRIVESPEED=4 #used external programs: DAR_EXEC="/root/app/dar-1.3.0/dar" #tested: dar-1.3.0 DAR_STATIC="/root/app/dar-1.3.0/dar_static" #copied to the first cdr MKISOFS="/root/app/cdrtools-2.0/bin/mkisofs" #tested: cdrtools-2.0 CDRECORD="/root/app/cdrtools-2.0/bin/cdrecord" #tested: cdrtools-2.0 GREP="/usr/bin/grep" #tested: gnu grep 2.2 BASENAME="/usr/bin/basename" DATECMD="/bin/date" MKDIR="/bin/mkdir" MV="/bin/mv" CP="/bin/cp" RM="/bin/rm" #initial call of this script (just executes dar with the proper parameters): DATE=`$DATECMD -I` START=`$DATECMD` if [ "$1" != "" ] && [ "$2" == "" ] ; then if [ "$1" == "full" ] ; then echo "starting full backup" $DAR_EXEC -c "$TDIR/$DATE" \ -X "$DATE.*.dar" -X "darswap" \ -N $DAR_PARAMS -E "$0 %p %b %N" else echo "starting incremental backup based on $1" LDATE=`$BASENAME $1` $DAR_EXEC -c "$TDIR/${DATE}_$LDATE" -A $1 \ -X "${DATE}_$LDATE.*.dar" -X "$LDATE.*.dar" -X "darswap" \ -N $DAR_PARAMS -E "$0 %p %b %N" fi echo "backup done" echo "start: $START" echo "end: `$DATECMD`" #called by dar's -E parameter after each slice: elif [ -r "$1/$2.$3.dar" ] ; then echo -n "creating cdr $3 volume dir containing $2.$3.dar" $MKDIR "$1/$2.$3.cdr" $MV "$1/$2.$3.dar" "$1/$2.$3.cdr" if [ "$3" == "1" ] ; then echo -n " and dar_static" $CP $DAR_STATIC "$1/$2.$3.cdr" fi echo SCANBUS=`$CDRECORD -scanbus 2>/dev/null | $GREP $DRIVENAME` DEV=${SCANBUS:1:5} CDBLOCKS=`$MKISOFS -R -print-size -quiet $1/$2.$3.cdr` echo "writing cdr $3 (${CDBLOCKS}s)..." KEEPFILE="n" until $MKISOFS -R "$1/$2.$3.cdr" | \ $CDRECORD -eject -s dev=$DEV speed=$DRIVESPEED tsize=${CDBLOCKS}s - do read -p "write error, try [A]gain or [k]eep $2.$3.dar? " ERR if [ "$ERR" == "k" ] ; then KEEPFILE="y" break fi done if [ "$KEEPFILE" == "y" ] ; then echo "cdr not written, keeping $2.$3.dar as file" $MV "$1/$2.$3.cdr/$2.$3.dar" "$1/$2.$3.dar" fi echo "removing volume dir" $RM -rf "$1/$2.$3.cdr" echo "backup continues" else echo "usage: $0 " fi exit 0 dar-2.4.8/doc/samples/automatic_backup.txt0000644000175000017430000000651112003275573015512 00000000000000Let describe this automatic tool by its author, Manuel Iglesias: (extracted from email exchanges): ------------------------------------------------------------------------------ To make it easier to use I have written a shell script with the following features: -It assumes all backup files are in an accessible directory. From there the user can copy them to removable media. -Easy to configure different backups: Make a copy of the script and edit 'BACKUP SETUP.' in the new file. Configuration file 'darrc' is not necessary. -Mounts, makes a backup and then un-mounts filesystems: A mounted file system could be an external H.D. where the backup files could be written. -Decides/recommends which backup mode is the most suitable: Create FullBackup, Rewrite FullBacup, Create DiffNN, Rewrite DiffNN, ....... -Fully automatic: Use '-auto' option to use with cron. I have studied my system (I am new to Linux :-(.) and cron only sends mail if files in /etc/cron.{hourly,daily,weekly,monthly} exit with code != 0. I have written some shell scripts to handle cron jobs. -Different backups can use the same 'Destination' directory: Backups are created with base names made up of the shell script name: Shell script 'LoveLettersBackup' creates: -LoveLettersBackupFull.1.dar -....... -LoveLettersBackupFull.N.dar -LoveLettersBackupDiff01.1.dar -....... -LoveLettersBackupDiff01.N.dar -....... -LoveLettersBackupDiffNN.N.dar -LoveLettersBackupDataBase -Creates and keeps updated a Data Base file for later use by dar_manager. The backup mode algorithm is the following: -If there are not FullBackup files then create FullBackup. -If there are not DiffBackup01 files then create DiffBackup01. -If the sum of all DiffBackup files is less than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite DiffBackup01. -If the sum of all DiffBackup files is greater than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite FullBackup. -If DiffBackupXX is less than sum(DiffBackup(XX+1)..DiffBackupNN) then rewrite DiffBackupXX. ------------------------------------------------------------------------------ In my last E-mail I forgot to mention another condition which the backup mode algorithm takes in account: NrOfDiffBackups. In the copies of the script I sent you: -If NrOfDiffBackups is greater than MaxNrOfDiffBackups (set in 'BACKUP SETUP.') then rewrite FullBackup. In the meanwhile I have decided it is better to rewrite DiffBackup01 in that situation and I have modified the script accordingly. The backup mode algorithm is now the following: -If there are not FullBackup files then create FullBackup. -If there are not DiffBackup01 files then create DiffBackup01. -If the sum of all DiffBackup files is less than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite DiffBackup01. -If NrOfDiffBackups is greater than MaxNrOfDiffBackups (set in 'BACKUP SETUP.') then rewrite DiffBackup01. -If the sum of all DiffBackup files is greater than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite FullBackup. -If DiffBackupXX is less than sum(DiffBackup(XX+1)..DiffBackupNN) then rewrite DiffBackupXX. ------------------------------------------------------------------------------ dar-2.4.8/doc/samples/cluster_digital_backups.sh0000644000175000017430000000577412003275573016672 00000000000000#!/bin/bash # Script Name: dar_backups.sh # Author: Roi Rodriguez Mendez & Mauro Silvosa Rivera (Cluster Digital S.L.) # Description: dar_backups.sh is a script to be runned from cron which # backups data and stores it locally and optionally remote using scp. # It decides between doing a master or an incremental backup based # on the existance or not of a master one for the actual month. The # remote copy feature needs a ssh authentication method which # doesn't prompt for a password, in order to make it non-interactive # (useful for cron, if you plan to run it by hand, this is not # necessary). # Version: 1.0 # Revision History: # 22.08.2005 - Creation # Base directory where backups are stored BASE_BAK_DIR=/var/BACKUP/data # Directory where backups for the actual month are stored (path relative to # $BASE_BAK_DIR) MONTHLY_BAK_DIR=`date -I | awk -F "-" '{ print $1"-"$2 }'` # Variable de comprobacion de fecha CURRENT_MONTH=$MONTHLY_BAK_DIR # Name and path for the backup file. SLICE_NAME=${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/backup_`date -I` # Max backup file size SLICE_SIZE=100M # Remote backup settings REMOTE_BAK="true" REMOTE_HOST="example.com" REMOTE_USR="bakusr" REMOTE_BASE_DIR="/var/BACKUP/example.com/data" REMOTE_MONTHLY_DIR=$MONTHLY_BAK_DIR REMOTE_DIR=${REMOTE_BASE_DIR}/${REMOTE_MONTHLY_DIR} ## FUNCTIONS' DEFINITION # Function which creates a master backup. It gets "true" as a parameter # if the monthly directory has to be created. function master_bak () { if [ "$1" == "true" ] then mkdir -p ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} fi /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R / \ -g ./DATA -g ./home -g ./root -c ${SLICE_NAME}_master > /dev/null if [ "$REMOTE_BAK" == "true" ] then /usr/bin/ssh ${REMOTE_USR}@${REMOTE_HOST} "if [ ! -d ${REMOTE_DIR} ]; then mkdir -p $REMOTE_DIR; fi" for i in `ls ${SLICE_NAME}_master*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } # Makes the incremental backups function diff_bak () { MASTER=$1 /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R / \ -g ./DATA -g ./home -g ./root -c ${SLICE_NAME}_diff \ -A $MASTER > /dev/null if [ "$REMOTE_BAK" == "true" ] then for i in `ls ${SLICE_NAME}_diff*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } ## MAIN FLUX # Set appropriate umask value umask 027 # Check for existing monthly backups directory if [ ! -d ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} ] then # If not, tell master_bak() to mkdir it master_bak "true" else # Else: # MASTER not void if a master backup exists MASTER=`ls ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/*_master*.dar | tail -n 1 | awk -F "." '{ print $1 }'` # Check if a master backup already exists. if [ "${MASTER}" != "" ] then # If it exists, it's needed to make a differential one diff_bak $MASTER else # Else, do the master backup master_bak "false" fi fi dar-2.4.8/doc/samples/MyBackup.sh.tar.gz0000644000175000017430000047340512003275573014723 00000000000000‹¦°/Jìî®¬Ö ì³[oÜQãÕ‡“³ƒóO|»O÷?Þ×k˜ÛÃjs¼[·°\°À€€•€õX£zIrìTÂ!x÷uðÚa˜Ö H½¯ÒRé¾Áúu/J†vat%ïê`YиCÒïWq8T®DdyBk¢ñùsÆ•,‚ëà‡ƒ«QâgèîF$¾7þøô­iJ¬R»ÿÎ ‚ WÜl¤Ž»&ïÜwퟟ¥®q'aÜÿ ’ÖÊîªU¤¶#…ä{ðé“‚ »»`­Á?>‡l B¦í1ÊfÑeTé1<¿v_S¾J€õªÊ •ûZí?mVÿ5׫èýoŽã3þ¿Óy²Éþs§ó¤Õy‚þ¿ÕÞÞþÓÿÿ×¼òƒ"Ð1¢·B·O §¬Z 'K1>€Íf«¶T[‚‹÷!¿Šc³Ÿò8|C.öÂ1¶…}°=„ 6,öðûa”·©{]†?åQ&\Èì^ vèBÛŽÀÛDèÎ ng`ã %ØCÄvbgQ’`‡q6†À'GïìÌV¯ö€ad>:‰ÍRÕ‹k1&$/»/\óbÛOR æÈÕìD¡ùaªÈ]?C i="t‚(E`~)rÑð$Kr ,Ř(ÄÅýp®Ó|¸ÇöP¬ÃÅ8Æ¿Çb$OF! aàÇ€²9ÉÐOI$%àר^—£ÅS˜^aÔ·_}ë°…žßÏ‘ üx9ùP„?2¶$ê'öp¦‚°è^Dô ÙÙO¢<~¹Îj%nmÂ\ë’mñŸB}P%x)Ò!* }¬ÕAq¥ ð7ÝohÁŽc挼·Ã1RY«QÏ®YUçVs.SÔÌ7¹ïŠfìz&a&žu'gO«–’K÷)…¨¤nQë ò©¹"˜.:=‰»73Ê.0NŠce{ûIëéÓÎö‚(ŸHâ{ºÕEqº¡@¯¤;wsksÝð“í' "ÞÖsMYºMÇë&ÔkM§½ó´óìY«õýO»ÌÿS®ÿMGIã·Âñ™õŸ2Àbýï´:¸þ·w¶Ú®ÿ¿ÇÕÞêlmn>ÛÙzò§]ý¼ŽBÔÿ øMq†_> ÏN;ÄÄ(òÄûóý·8fïÕÑ»£‹Ö¿>º8>{,+d¥+2ÛR=÷(Þé \™z%Âþ Rgc?/A†b¦’R+Ì|Nõ¦0ÊÖAVܲè¡ly|)ßu8 æ:l?ƒ ÁYÈi€)XpžˆÍÍÖ:¼ŠÒŒº¾ßƒV}‘ÕÞlí¬Ãå9©ük ÜCŠsA¹ÎZœô©Ð·kbRaR—Ó<‰1ìBmÉPR¦¬|cæxm÷)ù{î;לýº~‚ò鉓±¡éŠª%ÒÿBGå{>¥µˆâ2űŒ€”ÓÓÊ*xyèpˆÝA4" 'y(ùf ”¢âÃÛX8YJ(<Ñb:›a¾Ãö”‡D ¨Bk'ÖD/Ùìÿ›ÖÉ“»µs².Â4f§8ôCLõSDšô¬Ù‡·Õ,eòqF¢ö8²2û¨ȧÃðÆO¢Ò9¢ò­$@[s™Ý˜”“ "¿ò^f9R'BÅ‘uƧ¢p£Å8$™ŠòNÙø(­efÞñv¥A·V€æõÙRë¿Ök5Cíb¶aÌC³²ƒI›Ùiz%È3]eÑ-)~˜ ÀeØ `#Í¥·LxFäEKS ÒJ½†Ær7å.»Ö=|‹ˆÑ²'Pš]ЮÑo‡ÖŒbw´v°DŠÖð3^P˜Jw+¢ä€ zS{D)¬`ÇU½å£¢ÑѤϵ󔼒 gÖqÝœ 8Y‘ÀÌh’–ä3žÚ¢ŒìqŠ! À‰Z!ÖÁ„T9ùæ‚ÂçÉ‘è ózãîõåñþñÞûÃOЂïë`R)×k â y&†‘¥ha»¼®$òÅô«ƒZžVJ½ÿgá«Fa1éÞn½qúá`Ÿê5ÑâóŠ6ùzCuªw½:/¿k¯b'Tž:9ü:« é |÷]©â¤¹õé½=ä(‘h5á“ø‹ }!Õa_NQ…a1Ä3Þ(^=ä"¶”ŽÒÁá‡ïNNß_t!½öã˜–Ô P€à—æÉ[ê3±SyÎ@Σš{C‚*e fÚø X¸æµfΗԒaAÉH35'^à@hCi¸â[:1StY^†µû‡ýŠ' QŠ@íl–ŒX@@tKŸSoÌÀXGo÷àEE½ê&E1Áô“¿!"¤b¢Û´ì)‡'ñjáäï¿/WfQ¸ZþCšÒðhñ4LÜ|§:!bûÐEÔãs~VzÎE½¬ò›…›åÒÆ)±™Q–VÖ†´KåæÂ#ëb†j©BJŠŽ–«ñö/ÅʘœùaJ3U~ãiD±ZуRèãÉfåãµ+øÄ¤^q%äݾ—/ÊA/›Í&üˆ1Æö A¦EK¦§ƒgzGé¹+=ô½×r&ê4Â*†°Âw´®¸°¢V‹ñ*­PÊ!3or«zòWŒÏù¶ ¼s­7 “Û‰¥(ÙÇpYŒÆ¨ç‰/R•Õ9•?¹‡ ³ßQ4T0!Á£`Ú1ÖqZÿI„ùJ(¤”0…õƒtáÕ‰ù!#(–ÿ!If7Š#Ä.ÙsIN©Pd»j¸ØP»²+wÉÜŠÃ¥{°úû3~åFò4{—Æ]{¹»v/[hQÀ†¥µî}M •¨!…oº­n›|udªu{»lO¾—Õ佊‚˜ô]hȉV õáónõFœôQ[ äxC‰^ÊžReYÒ!{b&C6ëðò»(pÊ”Ûz~Š’%8’FÀ «‘…9r<¨l@Ñý_ä >½ØÐ¼’§^q²O9æoÁê)^T#?èΈ³ê°bÛ…odIÿ}ùŒzýÀ ˜4®E++Z+äÏ÷¸X¬Ê·:Âe:‘}xâÅG" oƼ¯?:‚‚y· ûW$ÞA¼ûñE€úKDP þJįñ’úyÖá‡Ðw w¡È¦[³²Ns3¦sãW—op})»5ÚøØá®òs†2¹Qð¿Â)•i ¶Ü—(¾®\`f¦W¤F‹é[Ql­Ô°0@Çè09”ë *Nå|l™2“¥­‹ow_V=âr-þKa8¾Rñ»6/cªò·Ë5?Y 9ºº%+ý³ªJ²jÑ>Ÿ!ðû:»Á“¤­]/-K­¼ÏV¶É(™ARŒ‚¸]þ†üjèZ-œ[¥&JIB£œLEÕK 6«»Òç²3f´Ø¯Ýy?C{³ÅÛôwkSëÊôŽyh„ ‹h7ÏÞ¼½<5e‹ ˆ ‚7ÕçèøbºO{ºÏß/¦;ibÊN‡gï':I:­Û“\Vµ-9DOt|€¶Kƒ¨¯Å3§`Xp\îç]l6V¹ 4£ò‚­7ê,j‚¸.ùG€ióXï…®«ÄYøö׌çy µ?ªúÆVR>ϯjw¹s­î{•î5%‡ŠÎU”G-ØÅvB;ÍN4¤Ãû,’Å|m‰æËŠõv‚n–|®à £Üm+vTg2Ç{» ó¡iÈk öÎ÷Ž -Z»yÝ´!#ž…!·Iwë¦49kWxvdª0Gèk‹‘$´ÜCo<ÁfJêËÿpÇëÓ>õn}CdÙhÄ×ä½7PUÝÈQ/ó4ÙHh½ØVíBœÚ5Mâ „¼š„o6ø3NÝa öŸ3R2ç”Ô`R æ8<’öZ×¢(B&z¸\ó×§N÷ ë‚nÜýá¦K;KÐyÉ>†yÐ\‹"KÄç Ø;K7®7ÚNN/öÎÞp>È^eÜÎó‰uvŸ'¼u–SæÑÒ˜ Æ„÷ò ZÕ>>;{¦«LSv^^^[ÙX½ß¨O¤aè´è,Y†¡§r²Å¦;®Ï>G½i@ç2 àÄæJ½¡$_×ÛòñJÜp'â â„Ö“bQR ÙÁ¯gÑÁ•EJÿ5‹Ôã<Ö2Í"WEáWÈ·õ–àuÃÒû¶*Ý‚_ÏçàÊgå;4ŸÕã<>žfšÏñ¯gQüGe‘òžšEêq‹ _;Í¢”²M Šs~ uµ>Go~=ƒoþ¨ V«f°zœÇàbÍšfðšfp9õ#5_y˜'Œ7f]×C Y9Áí‚r/ùy#’[„xl‰ÇH‚¶B¸LŒ<:>°dX im:üÕÌšAà$ÞŒÞÍÆš©÷8i£‹ÎurÐ)OÚÍ †jÂð0øVšG/e‘IÄþ¯x’ʧ²@ÂóÎó[UX{,sìl×äðΤ-ïaäêºç¶hmxoª-{l£H‡Ûé™ÚcÝ1Æ'ׯ¨M>Ó-Å=©zR|^“VȾǶX÷ˆ©Æîê1¥ÐÓH³l¬ðÖ¬U‹)ÕºìTašyq¨¢Øò£mû®w‹¿4AyÇÇ#ø.æ"—o‚”˜:¾‹e’#‘ º™Úò¯žV˜¦®;Ò7>ØÌùë´ÈôŽ=jVÓ.i|²ÎX71à˜Ž‚¿¤Ç×é :oÓÛO°xðËö…i)•õB×NÜ:í l 3T¹×Ú8=oó-! _óp"’õÈ5\- Ñ+äXÉ.Cò9JbÑÚÅÅš¥¼múÒÇ/þPåøÄ,«]Øs^?íxªe±)æ_†ÌcœŽ+$ƒ¸pS2¯57âT×0G¶>Ö£÷– ye¥’[¸T/ù³ @fèðéŒÏG¿Hï.&õn%×t[Ò¨Š|Fë*i˜¬†«O*'òûãÊÖÔ/Ì¿jF>´Ók ÿ»‚Q•)k ªs@ËT«™ëÄm/¢Óíü ”vΨCÇÑ,•|i ³}´Šé:‡<Â9yVScA³Õ·Uëìä¼<#-+"Øúk&‚®(š‚]Œä1ÜŸËô¼GP=ª?]²™* È@d"ï6”eš8÷wï7?M]·á|þ3õXaöëâª+{Œ­9òŽëz}y@\냻jÅ õ¹’>½>iÒ ©baVÓ>fæ‘щҗy<³YX<ƒ÷èaÏ… †Öcyj‘³U³KS•bÓBÅ©©3È}ù5—²*æíÙNåÐ*ñçFÙý/{_E±­j‚ Š‚xšIÂ$!3ÙI$aß·8ÉL’ÉL˜%!@pA6Q‘UD@A@Ùd_TDÜпΩªîêžžd@î}÷ýÏÜ+Iw×^§ªNå;TuNÉ1Î3ÎKÐÚRåy ‡E­ÐijÂK)$Oȉäs>ÇC…\ÄÀDÈ;ðïkbtžrà&ÌÈ‘äGO&ñÇkEж•…&SJ{ ¬ÅªáÈ7`\–\TÇú ÆÉ‰‘@Y ø²Ðý7ÇIA]ÐUn•9c"è%-Œž¾èŠ9ÚËÍÏdklfVEŸ•Œ17ãEû¨HØ~<°ÓãÐE'T¬¶%´©Á¨zÖÚN§+Ö7.ò7XxÙœò£.Ä8€¾ ÑQvñ©æ9ðhÙv«ª‘—ïàiüÜkøàË6ø—¸s^%øhà.ÆÀà‘SXöË«z EX<¾;Ñ[5È)ÂŽ)]¶ÀLìgyýwªtcå~ÃØHÆ=’Ñ¥³¶Ì6›j7LñÆ’¼ Ç€ŒX[¹åJs¶ž¼ãµëÍ…O¾×”‹ȬUX¹Fÿ›‹¬ ŽC{í1È9*~˜‚ÆïŠˆ:T_ñ`ð[}ºa„ÍÈ—€f›ËÖA²ýÏM¦£%÷¸êóf©¤€Y½{÷è­“/AÌ'Ó8rzð¤±g÷c¿‚ö à%òb¥n;xzBR “â½%5›Í™Ÿ†=ªÜ ÜBÐ8îÄdê]Á1 \´@•HG¬Šf€¿Å€# ÔÓ †Éë­ÐºÉ‘»»r åãy ÂhêŠÝF0lö”ÖƒÙ:ÙîŽ^‘X<ôË(X½è'²;y= ›Ëê—Ú° ÛJm ¯¿-I©~ìwïVø9dY†..¸9á•_‘:×m+%·"ÎßxÝÀ*!ÃY¼$EÒ©ÌÒN%BY ò؆†à ³ÂgcÇG*be„“ªç?4—α i&D$(@lV ua¶x:Áo¬äâof7$ â¼ä|Ò–‘ª3?„S©±#í ðß„2bþŽÇ §Ã ˆÖǘyíJ¦Ê®+r½¤Y‚³ž¢죄Ë/ùCLR•³ÚÝm+É£•\ÔLÅðCjg |€ÇR© É#?ö‡ʤ6e7}ÇvG®¾E’͈U"ªH²þ9¶¢b›£4Šee'(M&É6·- ·UÒHêrŽ^ô*Ø;ÅÁÑ_Wb–2™àJVä H±f.X§–Žj;ó3…\àÎÉcU¡Ùµá€Ø¤«l‘UBoÅÞ“‚¿1&(æ4†Rt*#‚;#KâzÅæ=˜¾pó±`Üp•=)ZÁÁ}‚lɵšÞþÊX)màÂ0í¨• í•áaâoŒßU´œ-¡`ïôbº1'²]]ØÂ¨;Ü¿IÊÁ ÒYµ‹ÉÌñ+màf'hÁÖ)´j~3Ôcó‚’.f£ùƒÓå#§¹7ÔS ·q“twpõ!=¥($ä™tì¾-^r¶9©Ã#ÊbJmýVR7 ,:=þm ¥bq0ÏRËw.²\Ì —PîÊO<`MWi©ÀÁ ,°' ¹™©ƒ1°(p, Æ€3*”O€À™gµ¸óF¸ò=éq¡ÔT(]Äf5†Ò?=|l\XXtl%ÊN8y£Yxy¬ F¦8T÷¾• ÂŒQq™Ohe”Œh3ªý’G¾ ¥¨ß—xò…Ú‡j?%%’OÔ*4T”þ«& T0d“ÅB0i¸äeg·éÌdG»x;‚Ùrs‰µHÜêBâ¶ׇ“Ä.‚TOˆª.øÍµ¹…¥ÊW[GlLîå†\‹Fc„¢Œ²ï6YÈ(¬¨ÞV.¼ˆA¨à¹ˆÂ;æïê/~fÞ²Wr­#dO}<¡dÒ@Zõ¬E«Õ¥UAÏ’g=ÈbÐJ‰m$îÝ-)?]z{…KÑÍŒVÑ6eÝeȧÄPÊý"ÏjòI‰Ø¹üBqü„y ÏaÑ`ËÛ‚²°8A!OL~E†¥›\1¾`ØÜNŠ×-æ'ë4`~n“ë—1` Ãõ#AÆÈ+Ci“1¾©XÀV†2`·¥"=’SrOܧÔ7Hr6ºü0¾©J©}l2$zX%á,¼Rr@Š?˜‚ ¡¬v("{!Ø[C™´R0&É“ Ás—Ág!OÐFR‘Z¨ÝÚI3P’›#ýÃVž'?¸8Ñâ“ ýÄgôÝ…?€˜ì¸ÙŒm& »rq©8L7ý&…Û¥aàlP®Q<™|“âKJ¾Jù5[] øBÄ«,Rž{TmŠM&çá#Q]>yióœÊ°U—UØ%xfq”«ËÞ=2i>îI]U†h¿{ipÔ Nþà¯Zà*r¢^ŒOx”±?hRÔ‡"6’cAУ/–[Þ!A}Š;¶TòŒ¦µQ€ö€ü%tŠfˆ!~Da‚4Ñí¥Ó%7•J,tŒ•;Çîï×ׂ€UY—LQ×¢B%@5µ ¤x-ꈽºš˜¯E•@„ë¢^5›>é`Øà”‹¸5*v(07”Cx>£ö @½cÍC^ hc ø”­Ãòª¾ãª±D_]HìÉUãÑ„SÓ«‚¢áC6V'C¥” .ë€2«ÁÖ^3°Øœ´ù` *k"àÝ,î"›`0UÅÁÊAJ÷Ô‰SÚmó´Ðñ©GÜÕ†r¥VƒÊÒ HWk /(•ù9µ ,N›|b©P(Q–È5ƒ5¶šÑùû-à"(e,ÁÔ¯ÊV”¸¬ZÆçÚµCD› ÐÆ(2œl†&‡º `щ¼2“47Iaß_€2&0Ì¢ QK>(Á®…³\Q 5iõ3ª )ªö“Pˆ1”î)zŽ,š=E= 0ÑhË-"U3©ªÝ#ÉEVŠøã’”ãÅÓâà˜ËJ³áï5FîÉŽ ªF~@ fVºž‡]0]uÁœU¥Â2O Д( õ€N‰Ùn/g‘ }Ê@©DTƒ)lä’†ë€mÁR¡¤—Èd“ÍƒŽ‡j3£Áè0So* óH£yÌÿùH8 ©#|ÎT½P8Íà«ÅW¤ý˜a…ñ±äSPÀö%­—D9F{Àk¹]™`äèÄb•V…¶|mè\lf|•’ªâ–Ð*sÛJÈgª†”—|B^˜MâÙ¡ù?<òbœ`$íB‚Ø24:Ô–l¬¾~úJ@ 8_*$Xq2W*WêqÁÁÂj’£ˆæ+¥Šg7Ù-°$¨,¡e‰Å­í«P;,zBìÞÆ=ì`D]„ˆ¨ “¡ ¸ÕüÙÛY XܾŠ\ÈÒ£í5 ‚Ïãca ,¥èNh§~õdi9*)³&î¸1,p!Ct ÇCZR®{Ôõ¶ ‡€2’1ê»"–KOtE3N/L½m ¡Ò¹SÒóšÛ±MI¾’3µ?„)üE›¤Ø:äÐ(_¾ µÉî;T"ãd)¥twÁ ä•¢J ™ØØ¹YàðYmB9ä3±6(!±Z¸fH‘…ž<*Fhô=­ ¦Ñ¥í¤A¤x“ øa}X0úY#TRB:ì°ç“ÝÆÎcƒÈ€ÀÌøì+kºƒdAAÏ쌇B~¿Ùið®ÌvÀŸÅQÚ 3€dK7¹(W- €Á³Á¼ëàpYÈá±cäJ‹ ¦áp‘Û;ùg$O ðÞ.ªü¢”¤2´¤%¹#˜™Ef-uôº-:ÒÓ‘ ]àØ3Žƒvönt–¦%róÃp[!¦KgWy»f4d¤ ‹¡Ì ÷:%=ÒÂ4¬÷3òÙ….d? í¹°SdÍ’dyÐQ›ã†î ¹*’ /œïÌ¢7Z:g·0z 9°­¶‡…פˆ?²¼€æÀÈKs5`úœÂ@Ÿ=¸K*'éÙÛª(IÅèPHC~’¦Šr):œ2Ï\l+Ωh©3²á†^s;ÉDøFBíH^rL"Žþ†õb8P+h—á0à´F®<”wÙ0äá2XVàSFÈ°Èæi‡éá®.S—¢h‚?Ò»ºÚU*»ÈÃ;ؼ–(q"è"Þ€®#±IâbôñSÉÛX!”ÁÐ £IŠJÃ8F–Š|Pùy-ôÀ«ÇÝ_l"™ `Wlü3*`Ïóþ˜÷?˜×anaP¬F]ÒØLwÀ(EŒ'eq{IèõZÄãŽmò‘ÍÃWÚÖÔF]Z[3'XØ¡d›'Zº„Xm B¼`í}8‡ƒê$OE‰|£c‡_³9é‚u?¡måë+Dá¶2×´BùFH¸EUã WäCÅäÿ„ᆃ]¬“*ûv\âÿU‘ÿ †üwÿúß ™Ù%ù½—ï§BAÀ­æùJÌP ¯“ íÞle@D:_É}òsÖD'…Õ…¾‡Tæø;œÛú_Aä&dg-§B ÏÓɬâ…¼>¼ Ã7á%e5/Qï§y§—¹Ìæ¶VhK¼¶A¨­n—2ЈÍåFtY>qà¿&Fµ?e)ÚY9ˆ¼À)UQ‹j³IÁ!øe õ–¡6Æn­öcr@ÒBÚÅv@ bðÇ«²2 !5B%¼y!™¨Ã„ì±k@Œt #§†‡ÐHó)dŽÈV!Âuu Qà„Æ²L¬Úa•þ^©!DÄàŸÚÀAŒ°Rf8.“'©…’¢Ï ¢q¼…žƒÕ f¯ÃD[Î3¨ðƒüB!©®cŒQ…y[iŒDmÖ˜À#wƒººœZVÊÈ+pË~¨a/jhª¬Ù!y8 ŸWB6Ä•kM7°1Æ‘£Â0ŠŠç° †Uk¨Påÿ•|¦…I<2&r'ÍñfBs†h£ 7*°²Üìv©—a´±»‹ÞÛq P¯i&Ô î['xÏI4R›ÊÑ™Ûù¢Ö càG—3.4K=„+,ô¡œp„FhŒ Žc€ÆƒôVË7—0ͪ!ÕÅgH8˜è+ºÉ.Œ“ Ntà Ê•gšO"Ìà“OÃDñó Poü¬uœ,ôR( çZ‹+Xx¼­xæÕ6Õù¬nµIÞ´høêÌKñJ(üN’à>3:S%Ã땬¢Ž¡vº²6„Üñb„À]w—мz—Ø1¬»—GÍ“U2€Q¦‰”d b¾((ÚfÆ:rÈ¡Èpµ7(aïâa&²T󀋺f3 j9ê4\h ¨æÇ¦1eh@ss м àd¹êcwóé 6z·ÈÈ)3J}ØdØsjjå¶â­ 52Î6ŠˆP®ÊÊPbk+EKþ [qw0¨Rc¤xÉ )G®/¨™WU5\ èXB”uk™5Ï]žWb³Ú-(5„C ä bÒC&F —nFR.äI‹Ž”(¨ð!6Û±$,˜Y  †¿#"ÈÅN@àùÉæNZÂC„y/*’ƒ'Ñ a6‚`ר[?6"¶ …½è))؉rà„f´Ú$•‘9-U»ÑHqèñÇI5„ûÚ±#¥úüñ äW¼šÐÕû?¢þJÑûÇ ÿ'}ð!è9¶²ùŸØóÿÄžÿ'öü?±çÿ‰=ìza‹†ï5ÿÄžÿ'ö¼üç?±çÿWÅž×HûVÖ î8²N‡ ~þR-)‚ÈA‚‚E¾€È£Ñ†œ:ûR@24UƒRY ³UÀøƒcÉÔm¨â.fPźƅÇt¤ÉÔ²¨"QæêИÙRÚúÀn9ìwÉ—Wb-¡  Œ”Jœ^j?Dó1æªà)üe‰’ JÎl,€E¡IÏÌ ¿Åq¤ÚÐ`%Û4,.++Ý@Œj ×B^%hvåsÉ_²^U™r8CvÓd6«ÚØ…±¡þâ7A+ˆ¶ *¤œ0‰I›Dv¸£}È•‘gx0ìI9 T /B¹´Jê"øUö®»K08•Í—Ðd.¹ ×Ô£+Ú<(ÂóHØsH•«¾$Ã’b`s€ûˆ§‚ì+%ÌFGn8êaᤰXå~¯6n‚˜oÔ;†‹ ä¢üJ®jJ ~#¬“¼RÁÃ$­ißL…,9E$æ.¡Ò)ôÛ½ÐÊ T ?e¯æ[ÃìQbcá!¶]¨¶MÂ#I;t(à0)‹•ñŠÔöU—2”üüŸ/–Ž.'¹…RÃÖ U~A].¼—¸bu¨·œeLIL&‡ð’Q•†šo:颮JÍŽ!+F¡‹(¸å)d˜&8a¡Ž‘z?Q³P lK¾Br@«€<5ˆäÊ\»¢ÈÉ^ËH&•[ G«¼OšzRVƒºxÒ;ha”VX9SÁØÃ1MOa$±êXR‘‚êÆ΃k8¸PœA( .ž:ªqUµÑ®:»L%,èZ®pqÑå@ÎYÜK‘¹®òõ†¡â«ROóe =&<”ÄEDÊCÝd²8Ȩ¦[bÙdB\T‚üšJ¨Ñ»[JHnI.8Àðœ½ä/²zóÁ<—ÅV£ËãmQ>ÒìƒH³ìj¬yö¢1öÒôd¹M¤¼É›KÆh³ÅShžÊìÂSþ€ÚT=&ÏV{Ùhá±È.–T¤Î;¢ÔV¤zŸJJUO6õGñ©Ô)>¹-ª·øT"·R¶ø‡«ò‘²M(8xÒÌCïÂ7ŽaA‘¨'£`=£uÛ´r¨,_¶@´x@~êð¶·Q rÞ€á=x‹!WDm!å™±SÔÕ©°Ð^`‡¬èsEÝèu_ÇR²ºíäxvTÈΜè¡Xj¡¾Â—z!WŒÜ±§¸ƒ˜F–*Â@Y²ßÚb±kR$“åI!cCé ƒÎC—Ôb‘=ó±­1ßbEb$rugåa!¬E ñ ¹€ƒŠ×É^ÄH QühëÍ=ÎÑ&OJ…ˆE$C$µfŒ‘˜U£•þÒ{Šá ‰Rr†º 1äb¢ù†ñ'${7Ä Þ%aYù¼íôQÓ4- ©dò½YrºKcÅâ3ªò™eX4˜ˆN–Ú£‘w°3xè&Çã?¬Û¤ Cã’(ÄŸp7&ï»÷0h§ƒ¼6Å«Ëñˆå¤Ð|ZR ô4„°ÏC!'¾¤™Ë$¬ÔD”AÊäÈà.ÅéÒ«A•ñÐ5iEÚxlÔõ£ I‘*@ÇœŽ\”L­à¼êðqü3™Lɾ‘GÍY Š}N× AWdd÷ÞR|"ÛÔ*ÕÏ™ÑþЙ#I³vøqh´«§ŸÑ@H¬¨õ¦–6â6G¶`I5vvXøC%0´Hå`#)( ¶}~@1¥¾á½¹ÅÞ?iáãÆI±Ã3¬Vù5ˆy¨ËZš˜dsüàîl ƒ m¨þ'$êžz£2’kî?#Ô ùÿ(ÙÌÑ„h\VÛÐHÏÐ(r=/³YUQ\D~ËÕÓ„Ôº@“V(ƒíAaÒP (ú%–yÂõthT ¬dÓw¹1#ã%q‡'YáŽ8o:eÙÀ¡“Ù©re¦#Œ®Ô×.*N¹ëR×F(ÏMÍñ­à½ šD)+ƒõ 6“!q¦VÃÄ6pr Qý€(.ôšÝŒÿeJ¸€ð8 ½¡‰b¬Ð=†¶'1%x‰ F‹$vÌ¿ú\ô ¢R…æ%¤œiX±îº€õK—Cü0ˆ"‡Ç‡¸z«îM%Ý'™Ñ`5y F.\6%;Dê¥"6?÷ì4I® D<ÓU§¥Øì”_µðÆh€N QÒXI½¹Êêc*%è ù¦Ý°éW¡Çê]CM¥à«ÇüÄAØ3-˜$ + LE×nét&Ò«šWÝa‘KÏÖt-FÚl¥R‘KvêPH™‘Ùÿ±èO¡ê ©ª§¤«ýü‚êë5ìjp=Uõ1«{¦ÈKÒ”~Gr@ÂPfºØà3pzÉpE§cˆtN}5ÿÈõ é*>RL¨é$àk $Q–8IÂ3aÙ èU±ÅlÄýšÏÞ+uŠÓ#Tʲ¨"VÉyT¨dªDI¿FJLd+DLÆP>­æÉït….ð¬äÆpìr¤D”£‚;ÎzÌäT%L)ØYS'{ÄD!7%r;Vsè¾-Û‹1Õ˜\Ô®\ÅMN#½ë°öi”níQ /g3î6p6 fC ~ MǵÆ?Ú¤k[­©E05 Ñ”Oþñò˜Ÿ¼p¨`Sh€n¦ÐQ+~V©j8iÿmDfqy?ÒÂâaà®YmÂk)V#j»ö«¨P¾ìçÕ#Š‹®Â~Z­’½-.õÉì§©¡"¤aÆÿ¬ì)YŠ, ÁPÜ)¨ºÙOËËÊ AóiªÑUøFY7\ECµ:[4Ò¿ëYa”‚Êå›rUT b€m °du1¨J@-ʧÇê)*ÿ.V)\¤z"éVGµÿ]òGá„ÅX‹œ{À(bþ¡CäyüS3¨B<˜œ ãâ…hã‘1Q—« ŽÇoTlEÈ0BlÿâÝ"Æ5œô(šä´NzÉXHh‘‡Q"KÈ‘¡‘!'E¯„Ã2F+ùðV‚±IhÈU£Î'~×»ŠG tïÑâ- ä%ñ¢!Ó‘ÂU’n²¿T±4˜1¡Ò(«K×Ca8R,Þ0Œà/¨ SÕ5‹qi8£%NŠ "NE‰¤RåuSÇs1$Dµfà¼Ö»UàýAãÊ%^ÈB4‹KU}¸’¼‚"ÀpúÈŒ9µ6!ªûŽFt DôÏ~L›As}¦Tœ—*¹Rê0ü›±,Bý@4ûùßÃE*|ž(¼ ÄŠÅ—ÉH¿I G§JÐaØPH@±ç4¡5„ïUFÎÒ Š!$ ìBHCœ<ö1¶4ámÄyg`£`©ó‘: ù}í-S²ú-d8û®_eR˜ø â4úP›×Ãú¸®UϸAJ§zAµ«L¹S¬ÑÕ2ÇJtÑ4¨é¤È«¬ñQz5Äà^\nq2'MR+Vúwq{-ÈÏÿ!ޝÛk­ªaïôsÈÞ±]œòwzEG’c´Ð>šß`à£4üÝU1T"O%‚Aˆ,•¯„ÆA f•Ôœ=.”´œ}ÒQ÷ Ò€«ÊVE`Ñ䣖É~ñ(0¹‚•§  DÉ©P/®ÏG*© (jV« çŠ|OÓ§ZŵÞN£ ÕßGeG{Fêí$“múÚÊl|µp ì Ö0#¬éb ŒRe‹ÌÊz #’N âsÈh\Q’ÄFCˆÛ)ì¦U€Hø]Õ51‡iÚ…Ý™QX‚!Ý`dŸAzøƒ«6m&R¨è﯂«Í¸‚ lAŽ7b2¦¥øer÷—É6 rÜ CÃ|2 lyrôP5U9á¬ÍWZ¤$ô÷¿}Òå†V=íW¼c«¶Î 6l :eÓ¦È,ž«.ñ+÷?^âß„¿`øô n×"+ï¶a ;?Säa‡8à’zO J 0´JÿÇG1«¤Âð’J} {”aqÀe¦B1›‚ Ú r*â‚l¿¸ ‰€Ù@Â*ïP6tžc$#£–ÅÜèú „© lŠj!*UØ&KpU‰„³Úò}hYÅ–8ã)Ñ6gÕ€-AÌZ ЃÁ°è¹³¦&ß ÷Ï@M˜B.QÓN¿@øà…à nÀ`ŽUð™°Z–6TÁ™Qöž»b±UÝlý¯ŒrA'D у® W¨›P( $ï!žTEM€j*tSi=x§Ê EdTÑ“%@{äjiÜ„¼ê¬Þ´£Ú}iÊᦗô˜%Æ1³˜¶¨†¯Q@þ‹äÁCàÔBÛK†š`p<Àu ”½Ð^`ažhš2Ø+û‡?=*{ˆp ”M  :–¬cáw¡ðiÖú3ƒš@ìý´É´P~ŠDžAWº*ÔšÒõ¥ƒæìÆBý¤Éø T¨%ÆÐ‘£av( le1 W‹ &Ù©®s9,Y¾C±6¥åx7Î’ÇÀ ˆì*LDòÖÃ/_Ê…XL'§ëÅßE^sBÎC1òë!½XXÐ\—l e=ù扑ã…BÌD1¥†7Pâ1TdsÒ =‘ 1¢›‘šäh>tÅ ö º—’K+õï„Ø>6zM-e@˲ƒ$„†]Ø.èùb I³ ŽÖøF³hI|ìƒÅÖ·}¤@¥¬H.?c»ðÿm¥2éô¿Hüÿ—¶8ZêKf`ãòÞbM½3rù¬øh$’PÑR’9¥S˜+j—wµzG’[Y‚F•¶E ÏqÙÅPÎ)ï©Ä5U4Žÿ.…ãÿˆ²ñjÕ…l+f±2¯ÞöKˆ®K òÓúé5&yÈuˆþnvXȱS\j±b¼uÓ=¶ÃK×7:¶I›Ú2!)MÊ 0.´f«3– ±¨yõ¸ãbAÛOã½ÛÈ6<·×B¢: £ÈÍ=ð!§ˆÅ4'·r¦ud«‚Å(g6 Z´8žéA¬äJ@áҀ‰˜n¸-w²yÅXV ºë=›bÎRXýCXPd±kuTÑ­^V˜‡ì&+Å`™©u^âYj–"¿ŸU3:8Üš11ŠõClh”ÿmNâÈaäk¹ÛRê‘ùQ"r˜]äQHÃgB*ƒâi såð•8CE³ôhvi`§ ^¨¼,1©.z:YA;ŽE±ÝmHrvób0âT™WëÁ¡œä^g f70KCå°*vîD³MˆË>¨}B `¥lØ&‹`s–ÙÝ. W‚(+¹Ê£ ‰ÜÒUª R3Ťµw‘‡®S§dcb[Œ«+Ç §b%ÓIŽ—¦»CCtüÒÕ#.{U˜nêTLÎ3útÌÉ‘ï›1’Å<Òl‘œ¶rJ Ô1Ý`¤ _\;,ž:‚¾CHÂýƒ‘–qàüU¡æa¬Qˆôwöºg F:u•zcë î½a°-D”ÆXé%ˆ‚@ 3®éFôŸõX¶^²ÓIfGe~×v€/($WbƒÐìz®ãàÚ áÜ+œê´Åd:U-&_b WÚh¦‘£–•p•rç‘2µm…vЯÜlx:Ù;[34N›#SÄÇ„Iê kCS·¾hˆöˆè4S¥,5æK Ò¬™iÅŽ4wZÿò4h Ù´q¸>‡CÔÆ»0²&J¼eŸx”TõèÙ7£w'„_F)J¬Ö!h‚íSŒ<*Ãi&†#;]()ÅïþhÞð”mßJ¿fÊJUäëY)\‡b¤4&ƒô…?9ÉÊÊ€£Ó•W}Fj´#æãÞJlä¤{Ðé6e*ÀÝt+0²Ö2]®\ ×3°çbV(Fû“Leø@Å0qB2ÇΑN [h–r)CDZP’ª™’—”jy[Ø×´ð±aìÏÊRÊaÅê„»š),˜Ý€¬ÝçsÉ׬º½AN¤[&94GsÙŸ•«Â…– “áÚj0Åå,·ß.#“0ß@ÕýR›=ÊaÝ`;’–¼jKѪô#Þ¾G{ݹ×UŠÉÁ PM­Ééži¢øŒ,¢µXŽ÷¤Ó@u½àÎf 6d±·Ü@Ø-Èò°Í(P¶!ó æ4Ë6&‹&Væj*•LÊîn”p£WŒ(1ü 숟šQ|¤Xà¼=4(¢ÄÃ4²PO7¸y2üóª8r/†FöÌèÛ9=ÖçqÃM“¹ †ÃKŽŒË|uØ]8w…Èã„mAÏyZo’ª|$9ãYé±ä‰PoAq‰ËJÞâÍþ†w#=¾!%>)›èvöá£ò’¦PÂw„a2²dw o ùs!<‰bJ1w™µ…»ÜÄ9wÉ?©ü de ¸’à3yaŒXíð'¾aC'¤–ßßRFUCä³¾ñgüDà”Oô?y”·PLÉHØaiÍø7¾Aüà-èÆøKø›¼3f¡}?¼õ°d¥4&²€?ÅHöŽ?—þ¦‘’P}_@«É’âßðÎë­à¯ÈŸä ¹+;„ÜðHÞúÄfûX»Qm"ªš è:Æò`1 ÑŠÂ$l7®c½`õS…ös;OWGÅj”‡þˆÃ´ÂÑR86ý HÿRè–?²ß^ý£þ©S|#¾Dò¿€ªØŒ†ð‰S { tÂh $ öPý=<à¯RšŠÏ*} ]¡óGÿ„Y£‘ÉÂ?`~ðŸ\˜Ž‘,HÚÇP»“Zðººf_ãúrFÈRt3AÜCQz  0, \¾ dR¼¡Yx=ŽàïGøäCä@p[ hñÈ`[Rƒì¡}âqŸÄ}“FôMŶ@ÖÅ—z$S©.™\Ee¸(ƒŽr& ªÖ·o´‰ï¹  ÍÉÙ£æS©yI(ç"D;.Íà+«Z)ƒ'”êõ—"i)·x˜¼˜ÇÉ‘çC¶“n±Tòƒšd‰s© \vu×WfFå¡B_³øSU» \ÇÅ¥¤`‹r¹‚ÃUT•žˆjßA؃ò&Úä™$MSß#GöA%°¦@úïq9Ê@ÏL…,¬+[sR8cOüeh¹¹Îׄ…EÇV2uµßnÅùukùU­Ð¾ê C Ì![žU­ÍYöÂB&€Óe‹ˆ SD‰)ÂÚ>¢sZDnZDhS‹§äKF%[:+óÂår˜/F7¤Èž2Fœ›IÐ0ˆì‰lapP·]£HN”ßa %¤ÝÝUŽÚY˜Qp¼u¡xœ¸„“FU‡åµ@ËòÉÛbîã¨Ðª:¥APv¹|È“ §—µHS¶ìŽƒy0=*JÐÖV©Øì§¾tùTÒm•ÄÈ’²vcˆ°d<Ü6%QÕÞ¤H~n¤hµ9,¨Y‹p3«Èò¬QFÙàöTƒ1uªj§ jiX»õéF;UI©ÝÕl”“ŽÔÚß>ÈÎðY çàfÝCW=‹!™!–|à2+óÀ/$wZêqs»½@Ò÷‚@ͯÀµÊÚ¶ =[%~€(­fÁ^åaþå, ·zªä <±‚‚9BOº¹«×6HRˆ°¿òú“2J[Ôºõ+F5mZ@Mš’<š^BPþ ©¯Y¯@à(„fQuK¨/˜Î0-¿Ø/lª!Š-‚æyk`Vä j«6[ ¡|°dL‰‹Ó˜4\0[9I}n 3:úûVdrܸt8·¢ª÷ÇÂõÏ]€#— (”ú2:¶«Á+Q^{%-Äû¡2r„1Õš<(=]¯I]š)ÂYó]“~£®¡Ì„à ¥XÎ"ðàò†4«›1 U,•ˆ‘6ÐqH6ô]ÁÜÕjb“wWÛ‘] OÄ— áˆåêåš«^-A,—|mÓŒ¬ašu׈b)*sî §¦1¤…x( BþYX©¢Vœ3h ^Šª)’F{¡l!Va‰ãÅϦÅ` Ü»œ_áë±ÜõC‘Š6ë!~_Dz?#b£+cYZ*ÔïjZ¾»\Cð²62AKÂÅpÖ~wг°Tr ©y(lÁ[G\¸þ’5?W¥J­Gn~¶§êýb>‰/„ØQa,0•‡²þ4RË, mò§ LSGØ$TúY¹µ³–¶Áí= Ø_M®j„Q°ÜñßÎrVÛs®ÍoäÏ3uÊ«õ–×Ñê§lÆóNkFÊ|åÑ}Ï«:A­¯cÍM |Áz É•^™æ™Œ?•Ìæê:,CRkǯ¨éèfzJ7D¦¢ñI»Sa#¢e˜\’Û%™@ØàjÕ²eœR¨È ({„å²¹·Ñ³¤.—Éj³ym8 2­ˆñ¬Y!Öq"¬… Ç£”¯#mn§™ü‘¦r‡799Rl$ ‘$äöËeðco}òN/»…(]óVyúEºÇD^)2W'8{0–÷ßjÆ=þ͆aâbTüõ—¥²Ì‚[•ž"µ‰ pàõŽ]åKÙùlŠÍŒçÉ„Cú’cœgœ—<8,¤Êó(wFÍÂÔ¤ L ò9`b‰–öœ§ô°œ€¤…PZi`ZæÀ“…ÝuÁì’¼óˆï<^+:ÂÅÙJŠÂX÷J.‚ÀEš3nÀ¸,¹ ´…¢} ÊU×¾Pʉ‘@Y`Ãf/ ’”'ý’ Ÿ-Ìÿ$‚^ÒÂøPp9þÍQlÏñ—Ç-Žrˆ©Ší#A¶á‘ãÛ ßÓ¤Šm65T6‰¾ª(;àCÏÚAÛét!b9ÞÁÉßÔrW~”ÇŽ¾á5´*Î_ÃD3U{¯Ã½rƒˆöÔ ‚j#ÓãBdÆ4Óå4ÂyPl#ó&Y¸û¥Ð>&FU·Ry©n+{¯6¼p¤9ƒµ·`º|6fºÚ\§ —µý镊Üa”uتnè™;xTiUÔKîU%×v]'ã)uc"òQôédõqP;¦ëTë\pLË‘E†þjv¡¾ªj£UÅp½·X¼P8< ~öÔ¸9€n[Eš¸ìdÕ0ÙüìTI$„+–"é›[QÈøŠ.lòÞddOM/ñ¦€_ 虋c™†YÔ>_þâo`)©Ñ„ŽŸŸjžÿÀ¡Ç7U¼lAÆ…Šbpt>ø8¨ÀÝMØ9Ò.„6õ_¡»*…Á`èTU$~f\ ß(ç"5o.…‹{Sºl†Ž™WÃòúïTéþˆB`*ÍfBl2ºt6$]9'.8¿„‹c@F¬­Ür½0pæ¬v½¹Px£rqY´qV®Ñ_[#»Ó)¾C:Ç ×òôZ°0ÿ][_ñ`ð[}ºa„Ím~Øh°¹Ý]Žß>@“E†] "o–ʼ'«wï½uò%ˆùdÇ«\?+~ƒÍMvqãT¨0‹‚{›¨É­ÖÁ9 .lv§_)f³9زJ¹Þr¢Q±Ÿ‡\D˜N¤i'5·‰â1qt¤¥›Þë&—tÝèÏšÐÑ =Z\þ@c.¸(ûlÊ`øff*MÏR¶òåN¤Q€zá5ûSÝ¿ {;•ì ’æ×54„ 4)’oA-¢„|ð[R;¸qa +UvmÔ Ü9ãέ…-72ˆl0jã®’¿_1\G„t•Ñ®P‡p9`ZÖiíØ¨ÊX0öÐe® `ð 68ˆäÁ'G}±Àò“®RÐjM•IFLÑ‹: :KÂUÇG¸°ñ¸ª˜þ_‘e‡©>¸ýIjLm¡MÃàÆaÅ!ô"³VG @'J–-Ј–ü¨Ú¬˜£«¯¼¸'ËS"ôSɨݤ›0q´ÉŒÃÑ&1²rùÖë_„vsVã²é‰²µìÏ \ !ëKË«¢fÿú„D$CäÒDH†fœ¬c*ÆÏ|«GÕ›0r0â‘M;þ*;€3Ô™î {‹ÖKY3Õ( ª¢ÒŽ¿Èv]ÑÈk ®rÔÅZ„ñæÔöð:€l!¢BK¡[Á%€BIh²Q‚5 Úǘʙ¡µØ#».Ç%³,>ŒŽk£yy"#™gc”fØÕ¬o°c^ýf”&j@Õ‹â¿l=¿4.®ší‰-‹8Í¢Èb;¬fRÔ—Žk9)¼Â*'A³ï+“ÀÞWµ³Ž1¨oM¿]ˆt—™ÆBµ8/ÄÄêefæj·JL²¸ÇzmNôæ†í¾©· )d<0¹,Wäëè‰Ï,H‰toº jVÙ<@ý`L#rXáæ­¡ê¤=vþÌ+R Òߘ'åZ¤?SUœÝæªG‘CåTdW{y÷ “äšÊã§÷ÒR€pp;=d{i“ZDb¤h„¼)²ʬpz-£™jÇi³¸ET ¯k¤Í)ÝiŒ‚‹W)Eµ©ºÉDÐeÊÓÐKt 66&¶R·*6«Ž »JÃöé¬iÕ„‹»ÅIZ Wèäéµqw/6ˆ²Åd fÍöD®ÍoÉʬÜ5݈heUnC*.RÃ[Vß“šÐ¼~NÝ-r¢ÞÝsÌ’‘_yüqýaHªŒr¥ØRùê\%´Vþííî1Ù‚$—rŸÓª ’\ÃëÇ5¸{¨qÒÔhäL9Ur:iÔOíïš(‚Ìàäž oÎT_2¨®<.PIAi^CO„ÚÁ¤htÆ’šÍfƒl¤ _¹£ QñyhR«Kò¸‚‡ê·…£TR\E3¨7U  ÐÓ †É/H™*l~D]°[¾@ÑÒûÇ:㥠-Ù• D”ÌXú×v¥@ä,[h¨€lÊjDZ ,++ÿâåÇjà»þ›+ZèA<² J‘ÜS3,é”lG0©Øâ¶b¼ ëF*p !ƒ[ ˆtxn¡ÌD ¥ÿRÉ» ‘ôj1`-h|šIh/«©5¢OxI®=dq)§lAÀÇÌ ÓŠd‰ê25&j´c\­o‚éQÌ:8&ѧ·$“ªYQüf Á+&÷ÒÒdµ›.m‘*ez2ûBv$ÿ·d-æ?aRŸb±I^¥™$u„ÎsD?¤s¸ô¡ý¸îa˜Ÿ`–lµ°©K¦,è’);ðÐ0ð¿RJY<®AÝoY}÷[VÕýøøªfÌŠ¶^pæ{Aé¡7*þ‰… ¤òhê &ÏjPM¯¾íññz—yÐWÐ(ô¹Ñ§OÜg<@än€‚àtÀŽSÅŠPAðûQ;˜3÷êåì+µZdƒ ŸÝ+.ÎŽ\éñç¿?µ­{lÞ<Ÿ~à jSͬ ¨U¤¿T„ã ‘ ")¨TàÜCr3À+ìcq ¯Ô›‰H¢”CCÓȤ—¶¢¿ ëUPbEh/ˆŸÂäVªP*hqÇ¿ ÷_擨ƒÏî`[hèÊl*Ô+˜¹¹Â‡«õtÕ‹ctS •¼;VAôcö–É/Lô‡ ‹F#*eÒ X\xYùÏÉ>i‚Ë@^%G%~֦ǎ•Éš>¶¬¨ÒDþuêçÔ°!•é|_`ÑìF9ˆAÖ83aÞtmœ(î+TÉFÅÏJe)]ãã„_ ¢×º6„Šª§ ÎÌ›ò%½1 Hµ1ð°Az%ð(´Af8‘ …-ƒ2°ì®CáïpM Ë­ºpLPuOB ¯j¤õîÈÔÄŸ6XŸ¦0LsCçðî*ü%} ðUá!X-^Ù°¶wvëO;g)´y+Ì´®~N¸<Œ¤¢¹.P<ÁnÛîá;…'ü)ˆ»í¡rgô$f~ûœ· Qced›5ùX=&IŒc#Ñ…Àè_ñ^ŒÏ§w ªú®";소&ìŠY*Eûj4Ò@Rž^3Lb R wëCš9¤Ê~¨†O;nFfª°å*™$Èo*Ë¡r¦>\èVÛqà<ìž‹ô–íðyŠUç©£ÐÈ,UQ#ùNØú‚ÖÇ߇¿T/¢´æú¬Jæ%<õã'ƒ´ÄB˜8½„l]n{b¾S^:>..B ¢0ïá|;¹›8]¾¢b H…hA™LTL…ƒX9žyëL&<“ô÷Ó@Ô˜¸ª¢­Ý¡ľv¾ºÚÍ‹ù¯*=~Û-3÷¯Úip Ë*ÔT"hÁ'˜ ”#‰ð"ž‘󵹪 }âÂ<âtÎyJ¤*&…2ÛWà&ЫԿÕ9—ª·€H•³J@š«ª£WßOM`D…úˆšD€G.½J:å¡4ÀO_Qà’OÝp•dWŒú•ó>ØFø…€Êú;µZ„ªïôj…Ù€å¤äoܾ´×¨ î^ÌN%iôy¼Ò<Ë…:èZ¬·îIn›§Ôåô°ØfÔt‰B90±ž‡~òƒ^ôÓ(r×P’¹#^Va[© ¤àõ·%)Õ/TXG~A™oH\pW-^ùÙ|’K®£"”[²yÝ>n–Æìuã%)’JG³´ÒQ6 .„(ˆ ÀÇ64…¦¬p!¬ y”M£F¡º°—¢UTcŒMH3õQ/²j¼Šnñ€`Ão¬äš'Lë3ò’óI[Fr­*%B¸àרæ•bþާ§Ã ˆÖ§ë5és*~“ ‰ÈoªI¾ #XH”÷Ê8™ì}N+Û‹×{øÎ|Q¨ràŠˆàHͧö¹"d(ºST"òÀ”9’ÏrƤ%‡0±‘—Œ e£ÀÏ”]¢á— ˜ŠÏë’ñ«èñVät•ØLtK3Μô˜kìˆ_€6BĘÂÜ7\8Õˆ€Í£Ö!ò "ƒ¥‚Ë å¦ÐÈJ1`hñ_b°·ù ޹ÏzTë…t¤l•ÿ-Ί`e Ø.‰³Ìc@Eý]ÏÅÐj}Wè•ècîpZ…Y•Ýå‚"GÌ\@º¯ùèrâBDSÔÌåá&ÒVb²#ÿdò0ã$ŽÞÃ?© ÃUj nI=%ÉÈSøÖþ)—ÚPÛ¤¶Ã˜ ÇÆ —²’¶2ÈÏDWA±ÍQŲ²ËM$I(´KCA_‚¬*ÀåYÛ)¯RQ–ivŸSÂÎP„¹¤º oA¥Ý:›ܪ»©™6¬qXÈ‘GY8R9A&Ò¤Ûå ã̳0MRÔò\@Ëlܵ\Á¢V’²t<Ž& °¥L nû©*6¡˜S"B%Á¸S 'ðÍdIÂ(«àµtpzͼ6Ù#Grp /Z¨››eΣä˜CPm»“#}ꎶś¦ôX¶“"e‚Œ’D ™ur$Ttr%%F*¤eæéäý™é_%Š…a³£¿´êÒ+œþÊøÙd% G¹ÒŽd¹0’Š_ ÷ æro?cÀ€ò!³”5šR&Ç07‡²uó÷´àtà jo£#¨=ÒéH“΂8Á¨€! :Š–Rhõ,í†á.óÀCŒŒ+µK±+1QÃ¤Ž®’RŒAÓ˜Ù´~i©¥ *LV»g$ŸÑ¿#ω&Ïvˆ§Š³‰X BN%æ&‹Ä XWØ!zz¼™¤… 1mØuh6ô‚ö­:qO&ƒ©£Þœ°ËŽÝ%. sŽÞ Šp«! ÂEœ@U<ÏêÂy†I,¸ÈÈÕÌjsнúü5,;€ÉàFy %¹sœ„¡‰e| UbʆxWr•˜:‘[Õà|–4ÃFΫ z¥xóçòG™®õIl4YJø†ñèÿ–+%ýeqDiÅ•á„#Í%”έsBüˆ ×BµT5^‡B¨ÆÕGõƒ)×Pý8ªPW7‚´6eð”(žtÐØCqXI®@B^¥ëÔËH™…ÝÏ¡JcÒÈÉ#bp2Ôè);.jEÉŒ‰"l5~¶ 8±«+IØÃ˜œ‰všÅÈe# Í]‰±¥`jÌCÕ3‹ Œ£Òž”¡`Mc-’"=6ˆÿDŽÐ¨+0ýªŽ¿δ Î@<²µG`«ÔŽ× Þ’T×\9¯ÜѨ¤¿Rk$M|IÛ<ý‚ÜPŒ6Iõ–ø‰9å²Ë]io~÷rh~ɱ§¹³Y*Ÿ€„ ÇÀáBü%X@p£™ŠâΉñ_e%”å§ü ˜%¶Gé·þ¹é¼DƒÊ,Qíò·—P^ê„ñ6@}°ØÓ5_¬Zëh O@¦¦*®@¿|yR3 S‹² ’ <+^ÓDº†¼*‹ä˜tX¥!ÚhFÛú(ù„a›†u´zÛ&éO¦Æà#…Ž,T€+—Lƒ°&á½mÖôxÉD¾‘Ñ/³9¤$¸´˜_è5Q}º¤ ‚N\Ѫãˆ^IØN¿nëGïüï Mú¿1œhŸS8Ñl=é¿N4ÍIÝ¡ð÷8É!ãŒÑFÂ8,n” [œÂw“°¥M{ Y]V“¾9 ʬԩ|p¡ V,²'ÓDب”ÙŸB)H¬<ª}ãÕhBêÜ „°Ï¤.A‡¦i(\±ÄHzJ ʺ%(ùU×u)›»ÐöPAkZãÿÓ:^Š3'JhRÐÉg·ÚÌ¥ÖÂkSGùIJJÂßäGý;)19..¥F||B|bJBrbJJ¸ø¤–ñq5¤¸kS}Õ?> Iª.U¥«îûÿÒŸˆž™Ù¦xsRhĦ—6­Ý´zÓs¡ ä tåmÓ&¶›ÍYDNþDò¦w,á<Ⱦ› ß™60ŒhÛ6ÔãÐÓÐÑs}½>Q3þ浿·ðš‡}¼tXܤsuö¹÷…‡wôù-õ刷ºÔâ눨—m–þ»·,˜6ñ™C;çÖ¸."´ã²[Çô(»ÿB6ûß{ôXëõÏŽJl¶·Iïg-c'èÒþ¦}g ²£ç}¾åÔö™Óã§f{ü¸ýÝ”O ^ïm³ü§_úDÎx"áç‰qG{f¹ºÎ¸ý®—7LH4¯*¼óƒn'$.ÿcØáý©?öîžoˆjšüù¶ú‡ÿõáCnØ“74þ{_›]‘;V—ýpw퉋¦.°²û¶OFÅ Û’ºëƒÅ“7›îî>¦²ßöÞj÷zò›‹t‰x6«îð_û:}nûS?è;}O‡b›U{駯åÌ[t[»OêvZùnë1³Úµù½?ç¾|±ŽÍ)±ÙbØ­dRZI±mö¢b¯ß2QŠí@næ=mn&”Ü7R%>aÉ0a¡:3ÛÑåp¹ûÀ­bˆ›Þ*6kl&¶õîÔAJHNm“?áÉøÃVl|œê§cV\üÎÊNh•Ø*!5+Aýœšš•×RIŸ™š’‚ä߬ÔVãR•ï-“húTø—|OÍjÏIÉ­â²;¤¤dÇÑüÙì'}ÏŠËHÎ’•Ÿù²ãÔåª~H:¤Æg¥$’¿ ¹$aý”ÍÚ›Ùªe©'%5;ë‹ÏÊHNIáù’ãR3 žŒÔ¬–ø=)1£Ù¸R³“à99;µUf2´7).3IhoGÚÞ–ÉZAù©q™ð;3þÎÇ‹”ÏÓ'±ñO‰OHÌÆvŽ"EIÇ¿gÆ%Äãû$š/Žõ¬CǬ q;ÄÑrx»:²ï¤Gñ0Žrù4|!6Nl|3Ɉ`»ÙsbG:>qñ™ð;;.Ç­ïSb\¢Š~T“Ôú‘˜AƯ# ì7™¢¤löÌÞ%§ÐwɤUIXºLò_¼òÊJNRÒËÿ±²1mKÛ †Œ–ÅïÙ´LÚdVVB«¤x·ø@´ÅLJ¤kUuZñÉ1!ð÷dR^ |‡ñL…ñMè€ô—Åæ-1 ž“³èo’F52}ûÃg‹µ‰Êq)¤-ɘµú•@Æ®e }ײ%’d\J²ò=)™ÑWRVü-ì ˜XErÿø>Àû+÷ŸíœîãZ²rµ?zï ¨ 2D¸eÅegòe¿[ÆeeŠë$5ŽŽ#_')컼Ryè{¿ù蘇ó‘DgvÌØ&øÉî igëWGe}ü;Ú† Í…(ƒ-ñù]sê,Ù}ó¤ik'}{_¯ƒ=çÜÿܬÇÔ1±gZ±6ÿæñù/®^õ׿±û íß(¹géÙ½ÇíØ5ª´äì™7œµªé[¾ùnçÀ¢­ßm}-mÓ»oÖiÑ&ìü‹Ï˜°níþ]7F^¾ö©·­‹òKÂ¥âç¥Nµ=ún“5g¾Xv䑯FÙ –VÜùØž Ëj>·þü!sý'O~xƒó—mwÿ9æÂ£åSš×þÆ>êØä–‡Gm=stü÷×]jñöuã>X×taJÚûñ7ÆZs¢gÚ#¥ùÅ¿LëõZÍû½×ÑcZ²së›sË7—¯lóm×§>¯ìPš4iÓŠ‘gßÙçl2fþÞ²ØÃW/º¿÷üý¾^ðÕƒ“'~÷òSaÓ7O{;iý÷”Ü™Ö嶬?zSWÏÃ\‹'waĉ䰥o½zÚÚöpöÔšs'Ôšºö|Ï= lÖóí¸Ç·»î&çljO<ö^ë÷%z56ôÁå3Òÿkïà#‡;tøz×”®ëæüyÈ´>{Èk£Ö¶{L½uWHA«—65Ÿ2µxXÈÜßæÔÊøär×¾kz½²8½¦uòÀöC.×ÌîÑyi·vi3[¶üƨ˜;ržÍö»Xîüñã~ýþúžM'?ŸµúðÉG_¹û‘s{þòý¾Ë]Úf¶9à“ñ_L.1ÊP·ííSŽm›vªý’¬ͧ>ÝáËqµ^x|ùäÊm£¿0a nœ>£mb¿…ºß6¬fEó? rÊFçß^w•uÍí¯u©3qèÎg:7+¾Ü>Ûýà´Å®'+ë<Õªt㢨µö5ÏW¼²µæW#g}‘›uâÑä&Ÿ—5ÿìÁì3ïux³F G·»ÞÝi¨ÕýEûâ7¶5|ócoZF£Ý¹…ko–ju›}ü¥y—B¶ý5lXïÖ6øÒf÷Ÿ÷c÷‡Îm=cCÁ¦ˆ~ß=y_άyzf³§b×ô¹Ø¯[³»{=³«4'Ût}Ïž*éÒ`Ï‹#L™Ï-›õË[ šv9×lîæëºHë<;æ<6¹î—“ûñã´ZË ìK[‘ÑíõÁÿ9Øô˜iÖꯇ¨ucÉ7oÕÿôÅK7ÙmI¯oÖïìÄq¾lm:øøüÛ/?²zîâgÇÝÛpçóŸ5èµîÙ{»ÚvÿGËóf\ßuCîÍ[©CÌ)Œn“”«b+¿ky|\÷òïæåºŽ¶¬7éüŸw¦5\Öú¥Ì^×ß|Ýäú O•ì«‘~GúÅ:‘ïÔ~gæåß÷ž¾1sîÌ'ΚŽo¾ë۽˚ô~i~£[ÛuhݯòàÃzeµöìÁo»íJ±§ßsãßÿËk¿\»ùÇÓ—¶]Þ¹ùÍqÏ~â¯û~{ù·ßµØÕ¿ÇúËk~µ{숾g‡ÏhüÓðÛO*Y9`Þ_‹N>ùKÍ?ÿøì¯ý·Œ;}pýÅçzÿTóÀ-íÒ+ß­·wÜMµ{Ɔn¯7sgÃöýÝ|ýÔ3¿äµû~Àþ‰KÎ=üЊ—^õ¹.¿bÍN ýzÙƒS,#gOÚ?§ó€Úßu>òÉí]µëdéñw]-û–Ø÷ húkóó³¸üÜ®KuöuYµ¿^‡Ú.w³F«ÏøÂæï½iÿ‚ΗÍõF>Øv{ä§>;Uë§Ò§»N>_ë;ºß{föÒ˜ïZ»Û^žv¬ÁÈ—úâ¶Ó[×>:¾éu]Í úÍØõDQ×;:ùÃC—jl;4î¡Wθ9ÿ¯WWüѵã6Ïç¼þÑÉ™–ÝäÀîIÃÂ÷ìŸôRËÇîØðFÏGßï³oÚ»óNÿ>êéÎÍÿx±áéWk¥<ÛºÓÎɦ{F>Ø|Ine÷ú3'}½×ú`‹1ƒlõoüÖ¼¸hùœíÒ„C£¿êw`I÷‚K‹R6Þ›Þ¸“÷ ýÃö¿·dU%õœ—óú‰ûþäKËâæ¹?6ü×C¯ž¹Ø>·õÉû}ù^¯•߬šûàýÖv®›V<èÜ3» {c`ýôSó&˜¾oóÌ3}N­{t挟ÊlðóÊîá/\7¯ó’¹÷¿^Ð|bLÝ~ úÌ/Í»ToÜÈ´Cçüøî¤7V7þlÖètïÊä™}wÚ³òR½3£ž^i›g®™Ô²|Ï;K_žß êÅf;¿míìÙg|¿M]1Ì4zêÄ_ߘS\ïGém÷Kç}oµžñ}Ó‚Ÿ;¥,ô/ïÒȵ­Ùðñ¬™®CkÂJ ¾šÝ È“ÞsUÿ¾Z¶»÷q§aKÚ°IûŸÚû{ÿÇsç_ÜÞ§[è‡ÍŸúnÇÃV.Þù¯éßf-ˆ8øó_œ~ÃuySME›z'G}ztÖÔGO\÷æái*æ?7¯ðºü'Û_ús]ÿ£+¿{¥n“åOw¾«éêa¯,ìÔ¡­òVº¦7=·úð§^?dP§÷N'­—Z6½´mõ´¢K…éIÖ’±©Z|eÞv°|ëå”V=8̘úй’§7~úÍÃOÔ˜wô†Å-îÝq[q“ÛßßhûÙ_÷ßvyü¤ß×2¶zà%{YMÓüîº85¬gé¤w´èRnaÔõoÛ÷¯Üûè‚_Èl¼ÑÿËZÇú$瞈,iøê‘µ¦Æ~[Ôkþv©Ö…¡­S]ßü꟭º¥Qßöyy{Õh{dÈ ­gŽ]zçâŒÇnü-âõuM†ç…\H ïwº¹±ó´ZO|iuž:ðÀ )ÇÇví˜ÜýÐK%3O_ÕyZ·Ü·ÏÝ>ë½eómc:ýý“³ó¿:sðÑŽï ¬˪œÏ‘!Ëþ«æ]K[ø†¿Ó¢VÓeËž»x]È-§jºoý­ÆWVßß½í¡-£ŒïîÛ,ÍôÒ¨Š†;꯾núÓyíCnŸ÷ý≎z¨áÐÖé/¦|¼åøíÑÏ7Ú¦¸bG›Ý^_ðÃo¯Oê¼4Ã<32jÿ-×?Úðç•‘Ý‹ëuµdî ;^7¯zÙÓaùœ]k/E69šg÷„çn)˜ýóêìŒ <ä»+ј{ë¾'îÝòíªíÝ[ÔôDìNùÎꔊˆ™›â×|·ÐÍ£o Ë]ób£ßÌ»Û7rú§‡ß*]yãæo‹~€Eÿ ™õ·G¬xuÊs·¾÷ð²/v½³9鑇_?úÖ½o_}zsjô¹’Ó§5Y´àÃîe GÕÿäôìƒÇ&.NÜôÀà©c‡ïÙòXˆ!÷®®KçM&}ˆÝc<Ûíñéç264Žù¶è@F‡‡¦¶Þ2#±á”9ïå:ùnmþÑåÝýÎlÞd]Ôÿá ~;ñõm¥™ç>ïxèók/5>ðpæ¯ãntÓoŽåýräÂÒIƵ̚¿¤~Ã…%Y%¾þÂo÷Ûr¾xý_ý×¼0 ùÙÎ÷?÷Ì­7¾u dæn÷­I;jm[òÆÄÓ¥„L’òoýfúoæý߆žùó¥.7¾’TïðÊã[w¹·ÍɆíü¾`Ä[cÆíoŸ[§iÕ÷Ü»{Ê‹{Ìy$rìý{Ï-ûîîQ‘ N¿ðf׺}3eÔwO-ù)·û3Ž”ýzðÝE]/¹¿¼¸8dùÓ¥EoVÚ— Ÿ9·á¨™%¿¿tâå˜'ëß=aôÜÆîïß5gË gëÙÛývü››¿>ºæ±Ä•†ãùŸv·;¢_ž?¦½8è–ç~¸éd\ëòö=\¸¶Î NÖ˱ Éù¿¿½²ö† =W6i3¨^ÓÔæ_™s?ï×sLnl‹o[oòÕžW/=¾tàmºcÍ„¯×¬Û5y]_çœzOì sMMþc«Ç^>vGË6sv´þ«ô“½¶•;6» ]+RjÜýÙYÏÌ\¸yö©QçŸéûõx{ç¥C>ùýþ¿<0ñòbcg²YÖ~Û]¸î^“Ó:ê•»óŬmçó^Ú2ï¡Ô„_óšÔ›:ñ¾wlÊ›µªsÝÜ}«GǾÚàlÅÓ}Žwõ w†Ýç~zܬÑ/MþÜ™öÊÃÓOœJþxæ c>nr{)Ù>¬ Àgž—·Yû«Mëo(šëÜå}~Ó›µ~lr!í×aç¶×~¬â»‹‡žë{øl~ú˜·êE•üÞ|æ3ž{o¹{Kã·£]¹ÜÑëü¢Ãçj}1Æþçå¦Çïé=ó»&|YçÂØ’øWgÞýç¯õræµ(zðËÏÜ9þ̪)·Œ(šzôÔÇû>Ó´ù‘wéð;ÈÉ l<¾UKå­¿&">U͹Ÿïkyó{'ü2Tz¶ó 7ßuÝ=Æ&ˆ¼U£õ­K¾»ßÒxÙ¥ëÛï¼âÈÀœáއWÝŸ÷ÑS‘?6ûuêS­Ú_(¹T^pwG×’ò÷Mhpkå…Ÿßþõ׎f>ñÅ_o}SþÊÙÏ~íXœvã¶‹ox·Ñ¿nÚvñÓW6tû(ýæÒ…ë×½ï«aKÞ}òõ‰/´þeÊÄÒiÝkÔ;öÆñWN;}êÝK›}ö†ã…w^hí}'$éþ“'=øÃÎ[OüQºê|nÿeÛži6¨ùk?}8v׎÷"~:W{øðãSfmo|ùÞÁ«~]óÖ_?öEÉÞ?¾wÇW1{¯ÛŸÛöãZƒrŸw»Õ–¼ôõ£_®r¼z¾îVŸÎÙ´áý~ûhaÆèÖ5ÙóðÔ½no[ûÎÃwöLÉÜ0úæ‡nK2¼hLhÒëP|¯H×½Æ?ðÑãu†gœÈ±Þ»ëæÆvLM=ðnîÏõóÇêM¾µ°öîi{¯7U¾Óf’µÏ¬7toÞ«ádÃ3x¢í÷ RÛ6‹:ÿñ°^‘ë‡üèݿܰ°ó³‡8WüÔ‚­üÞ7{ßíϵÜùxÞŠ©mŸ½¤íŠEœ ›8Ľiöί{?ºõýO\xsIæ“Og$>»rÌæ­÷~ÙïÛO½Ñ¿ãÌ­%c6oû8Åûfß˳_,¨»xîû¿„o^´õ®~áÑß¿UãÏ¿z0´ï¦Ä[îŽiûúƒÆÛc;¼òá …-YÒp×ÖE·>þµùÞŽ‘ß7Ÿù@°Ï¶¿‘²bäÜÑ“»if¼gÆî²M¾}ï»ÇûŸ¯ýÜÀ±?ÌÏϞسçÎÆÐ?o*y!õò¬ãÆ)Ÿ%|Qëãä^e]R½u¤±í"k9â/÷KÉúëTÝyÝz7Xr|»uûÊÇ­oÛ_øÉŠˆ'¼O'/µôTÛž'mKo›vfMÆ¿Kÿ SrÝÁ7Nø­Áô.Î#?Äž0í‘/k|=oóëã'ŽïøcâÒ²Ÿ—4•Þô8§¥¬ª—î\3x_¿wzê±ágn]ùòÂOëžšèyiÊ£Ãn¿'%méÖÜcÌ?E4u9Üjh“½µßë} ¶ál——zßñŠc`Ö½ÍÃZǬßy쮓3™|käõ‹Ovx¬qúɈ!·×Ÿ{du¯ZO Ù’™sÛ ½{ò²äýö¾×u×é——¬˜•—9maŸæ¥íW?øËÍ» o8ÛïºøOü¨õôÁ'Š^¾½Oêw½üØŽ›2ÖüRÛáĽ=NL(ónûÚ5{¡Îº¨/Ó^/YÒgéŽÝw׸ãú’ +WÏœõìë6ýúìÂ/gí»TüÎÖ¼5ï}@aÎÜF#v´ž|¶Ù¯¿PÒ°¸0³iÈî•mj.X—úŠqØÃY+x:{'M:êí5‹n~U4À;/mî›ú­û"ÊviýÀúÂVÍ}öKObÂáuwî]±¦Á¸ væ6k7eNÛîosàÍä¸uõž®?ùIÓS½¼ó‹ß¦{ó'G+íÕð™FYsVOØi*þrú€šY?xO ë÷â¬õ‡Z}¸ö“{w|q{ëÂ¥¯Ì²­õF|}bø„Å_]lûÖš‹ó{_\a{=í@øõfŒ<÷Éç=WñÉæw¾}rÊ›))•‹fohujÒ}¹iÛß=Òfh‰´©ÎK+#½ue3÷Íg–Œ=øN·ôv¿:þNÃàÞû¿¹³ÍÛÍêÕ{Íc¡OŽ/7nX4oè–6Z4yöJç²S~›~ðƒÆFKÿ0,lù§Ï&4òéÚ?l8Y?nSÂô ÓZTþÙ¨bhóW4úîýIwmnñWq«’9Éë?¨³u£÷‘Í ÿúÍ÷Ñ€ŽßÙòä7ç»­<óëÞö¥¿ýVtaçeS÷¢èCûkÞ¼"¥ùÎY³Xîø0o­íÞ&{>[rêÏ{6Ô)Û]?ï»ù¯ÎütàÊ×Þ3dr½ Þ’¿˜3ÿó´ù¹GZüþÖçÛfÖm–ßrÉɰï[Ùmö¨÷kæî¿gãoS¿íÛô‡-{òG6~¤ù­wšB+Îök:ç¾±9Ù¥zÇWÕÆ'§&)o“ý£–AGëw]zvÍ-ŸË›v±FÎÅ7ÿãî+ Ðli¿Û¶mÛ¶mÛ¶mÛ¶mÛ¶mÛÞ{¾‹ÿü3É9“Ìm¥®VWºŸ•Õ…‚¾Ï>QØK%à¶„z^Èo¶Ô§YL,™Š×Fšte4…82&曼"’÷ûõ’—ÄIÑþ¨æ×P¶ÿ|âùùtW¶çËñ“•;àç÷ûtõðs&ÅSÔ]vÇ'ëç‘•äZvç÷í«ôôcm+º7÷YZëÎsðâý·õ¦×§»§×Ǫ¤ƒS½˜%Ùw;$Ãwkæ¦Ä–{gqï­´D·r#³õ.~ןEE•x?«£­»`|†¼+|Ð"xð€‰ê5Ȩ…µ éfÆò{k§YпFN-;«àd¨wTL:i[¸ Ú†…· Ÿö´ÇÍü]ÍÓw÷HŒé E9wüøol~ÉBK´Û}3vVó›Røê’àúᬠ:sE©çþC‹JSÕ=Úøƒù/B.zi7ÊÚanôcÿe/NÓß0† O®ÒóÉË`à \ét€K,¥Àôq2ijø«Ç”ÕÄ´ûÎÁîîö¶0º¡\º;ûy¼èù\™™¤ºÎÀÇž(;n×”Uäjô¦GÂJ‚IãÛ{®Fs˜~|‘z!{ïO³­Èmi=Æq4¾·ZqˆØY¤H[ÑÂy!!ÏQ+zFÆûÜf"õE¤‡¹gÍ•“3Þ‹ßž¿ãæó­ây’·®÷®±¯·(ZèÑíéð_0#<øBHžµì‹$,ϰ?P¾›gè¯JÇð¦ÈØÖ¤8/ÏHÿŃ[h½þœ×É}¦D­¹>ú ØïÛ8êÁF!Éû(!3;(è£Ut\ưaq"\cÏ€Pî–*¦à‘*åUF;iÀ @Ó/\&,‚³ RþƒyaJx\õX>K@/ønaÁxbý”ÿæËpÓðš‰wοÊ‘?coáLŠ ¸Ñ°™ðk&üâ>&!¹…·á þ"…]d¤`+\묾ÅH­~(ˆ—D<ÁöE&³Uš0tîõ† žŠW\*G@R9¢CøÖiöp¢â´aKtúñ[áÄÐ%Ò›²f þ¡‹½\™À6Ü=EÒÊÜel<ÀSðdQ¡;ÌÊ+pí€/«Ð…C&ÚêÆd°£ºšH›»Û»Aó€”-œ[k›„q‚ý1bŽëOì)Q ˆk9Õµ æÑéVLG°îž>2'ß ¿ÿ^¹_?z4  Á#2¼L¯#}] Ëö5[PRFhøÿí£Aï«KR#zð]) s—ÔO›.€“Ùc˜V;ü¨Ó)ø•׿¼^B€\f Üv€”c¡¥^hÉŠELóHŽàFʺ4¯4Pì•J Ñ -V‹«>%ø•c’l­ëÂòº£ô0dyïf¤"!혣´,ÿ‹P´^-[[)TÓ…±æ * àV…ˆ… ,«jÜIZαŽD]ITy½ÕF÷ŸÀÞW%¸¸Nç~@o¶gÜMÝ­ZZü(â=¨,“@:ÏžÀíîX"¾m‡æ6EŒŸ"@U©>µå4$eh’Æl$›c`ÆCP‚’<7þtÉ DcåA”¹BJÌt•`ÖA("Í^Èç”И±Ižf4‘?5̯ՀsSìònžvÿI¸_·9Ÿ ᶯ4µj¿2AŒlàé¸Ü‡È­Èw¥“ÚJkµjê()ñ‹ÌØqø©)ÍS6ý\ICè.ª刷fQõRj «@k8$\¤A‹ªø€Ubº'!ïº<¬5F14FÑS‘_ wb Üí˜?&³~{nø½õ*~­®ÿ«œüÛ—ƒBÜ]`b¸Ðý> ºÝaÈk-­·Y„Á@*Á¸¢ëöÆöÅï¼µÂ@1iá(³i¾#·j¡€ºáW3Bg< ´ŠŸˆ‘ìŠh°±UϪºPUâ&ŸB³šëŠªž‡. “°c?ÃÂXf‰Ôõ¯×F±>%Bâ¦!6”Ôô­„Âõ~þ²ž\jqAþHÙ‰z²¢º5Ü1–x(e‡|Ü£æ—Nò‚jþ›®²3 Ì@ãbËײv (&¹M'ï’ÝtRòËKаê?A%íAotÎÈ ˜ñj)v½dÏ É9ºÏõDfÿêfÓ% N©&M€1hEê¡3¢µÀ=ÛÒaé?ÓZ•Õ©{l,X $³Ñ½†T`ÌÞ”n#I^{áè—¦†¢=ž XºØ¶u9É+çÝK kIOqÜË&'I$|=纖”\çSÃLÎ `”´É˜„ç&z4[±*‚AÜ̈chQÁ7NÚ‚/ÇÿžØ#26Í8íj7¢ûúJ0¶ºíš`¶ ;1çà4©®Ùã@…Úšíî¬Zdq( SyÝÕ™V+3”à*¸k'ë*ÝYá~±¹'/d+¶x#´B «[7$‚t[£Ë–e fB€Ï,4g»€w Ð`^¯-õä-'Èrmeªóà)ÜæV Ýñ‚³?ε£í0Oœ&Éýw†‹p?Í µÜqîØWJQl»Û¿ÁÙP¥ËŸa@îœ ñioa;X Ðp2èÝ,`Ó°~ÉÛ_æÀ¦X±´+ýà¤Õ µÚè'™6Y{BzÜm¾\‡Få{U,™¤Fà -\_7+œNeÙ²ê[².„-®à2 à.`¥ÈO±—,‚¾S%¼§AgoöUŒ>Ì«CnߢK~Ÿfº7qåÔ¾#åizñ|ÏHüx~à´SÔóþ'‚ý¯ä“‘•…ãWÙÿ;Árü¿dHšRQ×»suç.úœ”<ÔX™ mfž ;!Ô‚®ü¨Û2¿¾—e$LoÙÀ°dÐG­/Ô+j*•”:wi~:Ë"YZÛ+Ú¦Pk'Wzt~ß;'yŠa~}¿?'‡¥WÞ»º7'Gti{|/ Ï'Gv~7¿œ;ÌÛz/24nòl?¨»/0÷(þ›ßGptwxt}ס ; -#±l%wìÍ}™XÒÌÛÒprpAp'‡çâ´¡`+2ÒÓs1’l×?—'Až£å,ÔÛJV•Gþ¤*©Y6ŒÖæ?´Ñä9Ɔ(¥XÕJëgß.‰!‚Úå Ì'HÔ  ’=OÝW8¶÷²GšžJ“ö´xp%L©X3TJ â3%+Ô)ê”=0T?Ž›ÇadÅܵªNfø’÷ZFbb/U Ø¶(Šé¡¾Ö×5»Æ²ßZÌ2/åi#5yâur¤Ú_A`™lÌ©%Vè,ð–»CÙ´˜Îz¬€Ãð¨4„ÁÇc|£ðšQ‘ŠBèãÆ²8j,ÂÌžïÉù‡lƒ‘ñÞ˜Ú·¢ƒ¬;ˆg uÜÇØíð«!Åðÿüæêÿãd »ã[Aë…Q)©ÏF@/(µÇc§ÿi§÷Ÿ&>šW«"ß±w}ÃÚ„-ÞˆFcƒºþCäK¼gb xê–;½f¹/œ|AU}ƒž.Ìõ<)ÁîLÓØC%anæŒ÷\P™¤ÒÑY^ ÆØ¬ \ÖtsÃèeñ¿mœòŽ„šÖbe¶¬Jþ˜¨_]2Aù‡É_q‰VT9ˆH„VHÂê½=dJÃVËZq/£.؈q}Mèão2O–é+µÒù¾B6eóô¦xGbUk`ÈÏÌ{Ýá¹U÷/2©­3CœÚVÞ$–W§ ’™›0Ÿ:—pë óÒ/X£ÎlTà}éygl¼ifœÖ¬`¡};¾]©Œ¶¤Ó Áz@w¼¸­$ò;iæI¥"—6þ¾:5nO«X”‘¨Â‰ ÀAsZ¦@2i2n'\?XaÆÙ,ÇÞþH½üµ>ûy¦:Â} W©û‡7:wÌׯIõl´¥ Æœ†Iôð€ïX^®›¸îK B–…;ßà4lû»k |S[ɤT~÷/áUmoŽ¥˜\ˆpäík¸ c8EäúBQ6Ñw!‘,³PQE¤>¤`·°jÌ»Aõxƒ~¸\B—±{äà¿uÊ2Ò“¿Q:¢‡1LD…ÅbÊfô0éÙHt‡©¨51@E ¢*Ñö$†›1Nˆ 2q6æ]´¢÷t¾Möï¼.r¦$Óùï¯àd Í¥Ýr 3ÇyŠ‹¨²³]~GªP]?{&3™” ÌØüÖx°e´æ9'í$á9ÇM|Té1qj¯?£‹Iíb|üŠÇZ £fæpZgþû:œã¿ùSÛ=ÅmbšàÍ}­ÑxwÉç4ôP1U²Ô_#Ó1UïÔ[•Õu¾£Ížb/a¨ªÇð§æM|da?º°fÙÙÒ!e ½Ó'õóõ,R„”g2òIÐ¥âÁtð°6¼« ;Ù.â1¨JXÀ¤¹õê¥} Æduã‹Q¸‘ð‡•‹›òÕafذlE©sf†eÖ]^9ðéÌ$•0œ­O4ïåe,"Õõ'#ÀÙ/“¡ÊÒ ãë‡êË÷× ¦›n\6÷\_ºv[5q²BêfÓ׆¼GJœJÍ{Ói-¤ô\ÂE:—Bë¿T§¿¢)ý—u—ÂÇøŠi1Ÿf$÷Sa‘ÍÅ'שèuuŸ~5$>Wt«fQL‘öȬ|ÙW>ÇMk&ψqrn !¹r…Q¶kMçC"Qz.Áâ–¬Šõ¸&Åh?©ú掜 4òAEç>HÝC Uµžål;ð÷Üí¶ Ð²ƒíôþVí‰Åä-bþg£îУݱµS þQu‹¨>€Oø‹-4¶Ð÷ÑsÓ¥£B±S¹gd =ÎߊzÙ–O,`HÖò#¿Ñ’Ä4R $¹±%êgümô•¡™r³†æ>—»ì^‹œdžbaÆÆ„0 r¨ß$¬iæFâéˆØ ’V˜°$¿ÅZÈ'œƒ4i8“†‘-F5n3G‰çɹ@u¢`&\á9©V>3ÉÔ—•®ˆ¹“gëOZª§_°Âäß4? À3æ™.#žnÅœZR¥t‰Ϩ ”ŸÊ…ûn`ðBD`sÓ  ü%96è° ”¶™ÞñºT&/§x)øÕ¦m‰Ÿ…‹f*.éB$C+‚.¤IGS6=Vj>\ñ?šÀú½.Òášcd–Óo*ÕÝx~/«Š”>.pÕ¾—[–DÞ.d_ŽÎ¶¤ƒuGÐ|æ8ýÝXl S¾~7‘vPy Ø{VHд㠽ª[µDÎü0fsSSþV½9ã’ÙmꤕwôiŒE7v~¨ÔO.=QÍ“‚œÃ¨˜v¨ë¹7Ðd¯8ÛËnykHo7á YEÇìcÈÐÃÎÜïûòJ°)»§‡> Kžæ-Äÿ1A»®3_è{O¹ fuiب 5Z9g³¬ÍWR*máT}%Ï/‰Áø8µ¤DxF”TðÛ¼(ƒñöu*kiÓw±Ë—?î›ÃÐT멲YúŠPZ©¨œØä±/ÈE/ï¾å÷Bc66¢\JÚ÷7%tÙ#$|,éSÒWF}°<9=7_t&íîåh’?Ò•¶Î‡CtiÿÖ?òçÿ'âû¯ÿAŒÌ, ÿ•‘á¿#ãÿË6OBƒå0=tèwÈ© ¶Dëp4­PPØbÍš`˜ ÝÅ•ZCâUË38ykñß’]pè~Õ0pr^wÃ<#P¶Ü½;žfÒÝ÷ªtÉCÎ3P2Z»zyØã'2CY*áÜ£-ÜLóß oú-A—JxÁè/Y¢ha¿Ëa5D$1¾*]²fì9¢˜£„ÿ&lEËUL<—„BiéÅbhf¦â–DN§†eÈ7Š=1E3î-Ê®¢€äçhÇz7˜¸¦È¥I'¶Ã®"†F© Ù3îH‘ÙâBÓº ZW²½•ŽcºïÔWfÁC'µÜ±.ª+uÜø•5Êl›kP‰³ùwŽ0lEwÕ>øéQpôÿ <ãÅ”ŒìÿÇbëž™íÿ"üÿ×À€ÏJÏÄÊþÃr·‡7öH ®ÊN“×ÜnÑK3S¢(ª*”F#†ú¾L#B*Z¿uqÿ:uÃz:5¢ub=p{ƒµu<pˆ•8øû!qð?ˆ `@Élí°x}‹ÝŠÌ›ÇÝ;ŸÝ>¾–ÎÖÚÛÚZN—Ù·lNµhmÞÅ"…a«ÐÝíŸ:xÁ:¢`š+Vž×ªjE1xÅÇù4/ÁÌ1ÅU÷‹T(^镸ø‹6pö¦ƒo–«;ž?;ÒàÜ`nËõ n3Ö ÏÉ00i: HWjƒñvf§´;í¶3eT°ÚÑó~ÖþWpÚ­¦Oó´¢ ‚|ût>&3&gn=Åï ǹÓ)BXt}†(74u|»¿šEbË!rbT cÌ‘:!}ÀD_HœáPü˜åŸ`~MFV"NºòíŠÎ¥Í¿ Ë{€s}¸¢u™Æ´îSL©Ô˜šî9Þ»ö¢Èî¹Þq ‰¥‡cœêúN°#™¸'¾ïwá5XûÁ½Q<¿hßwǦÎU¬Ç²Cé<¡nÿ¢§ žößi=‚¾c»2¶òä·ŒO“2è&· Ë­yŸ‡RÎ!ÎCúôÁš èFÓŽþBSã|²ÚáÕŽ7ÛK´ùÜ°ÜØÞ”nûÞ~%~uFG0G’*Y˜ô2><:º¼‘ µ!n" )óEóó ü3i^akEÖŠnÎeóÓºæiŸ‰½ß¢ÎUž©žq?‘Eÿà7º™ð»°q´ãñ…øEëÖîÍ Xî‡'.JnJuÄoCíÓ{Dy{t¸l«Ú"džµ›µ»¹?yk(ÀŽÜDŽ%×UV‚T¶\®ÉIˬɾÉýقδ›©Îls=e𥦥µñ‘Ç0Î/j¤¼ÁðŽ!ñGÌå«ù»ýe¶¼Ø¥ëƒÈw‡ ñÃP ·¨rãÑåÁŒ1²«î§ØWX=€²·*xõ"ì÷…ÞŒ±láìÍõ?PûÂÆê¼=„½Ë½Û‡§ÛóÏ i(ÒÙ=M:Âb£%ÆØC' 4Bˆž3<2sðïVkK9YµÜúÞ‚mKmgmóy –½Sï ÒÑÛïíûÍöÍø€“s"c!ÃQè*z&ƒœ0žŒLÚQºQñ)æÊS¨ôT¶TZ©ØUN^‚ÛWåõ»Ýý!NMQîõ§QY›>òNC%`)nG¦ ÄQL(Þ*ÞÊÞ¢EBÝ3ÈÞÞl‹l“œÜ9æ;ÙéÊê³hó/Lëøßò›|ðpÈþŽo=(>2Ä>éÍ%Æhs)0ö~JíÎî•¿ƒB•k<Àûê=Àp$¾ Gî†iøPuÀÙ~Ò9"tà¼<€Œ ž¿ÝQ± ùÍ‘´hÞÊü< þÅxXaêx§g‹I¾Kì·/­Ç•ÞÓc¾ácðÅí !uŒÝ†-`’nƒÞÁ£ Û9j.Ëm… ÇzM04F<Ò« £·Óͯ¤qäçlôa<£¤¸%¡ß7Ãî“ö ¿ˆ ¿ôññ> M6 ØQÙ‚qz¥K8ÉÃÂįÉ3MÇÀíŸ µå{>cÏïÛ‹õFÔÒľô¿yùû!2ЉjTÙ^·Ú€xà³ôC© iñýþ1íî¼ÏSRLg ;äêvÝý›eÿ~se_-÷Ú)Ù®{'ÑŒÎXœâ?é9 ˜þ°sïÃjáÿR- û@nÖœå†vó¶2Hh™À6,æH Â¨Ûi)ô'èißÁËyóUþëcÓòfá¿"h¹‰¹ŒëÌ06ö”¸f¾ÜÁÍ\w¬Š9®U?;ʹZƒo×ËUoŒ[Ž~=O6æèÕ¥ åˆÜù´b¢2ßõ5ÓÁä«=²G´3ܦSg3)sÛw,Ø–ôåJèj-{‹m¶æ¢ìŽžì çi4G EÎäà õµWK®ïò Õ-M‚¹Ëç«fñ½¶éK¶³ØÀÛ([˜¬Ím0êΟöZ—p k|æÇçèÝÔ½@¸/ÈWéäjÎe>q ªÄAæ|z>ô n°¿½-®È©ø­sç1àáø6¾•î dL‹§à—å©ÛËÚü‹C+×u"ñjJm±‚'°c±Æ+ºÔRÐŽÇáwŠz¤rIùƒì e 5#ZêŠØ\B¼·"ôŒ­àæ*3óûŨÝÊ›'zLä(óuàF)êù×AD\bë/Vÿ[H)Ï·ÃX¦Eá7Ö~—ƒêËà›Š0§ 6 ­_9ˆ5Vkë‹8<Ì(ð“çí8É¢TN.oD,9úwM®|™¿¿X*çhp?Pr½ôÏ.“¯È­á|ÙfÅÓNÖ—ƒïLꃎE®/'ôIa±Çùäõf“Nç Ø´ÅÇËLür›·ÝgFØÃÙFQp ÎÞG{ïµ v`㠜ϭˬû)Ö÷=¦C<+Ÿé-øÞÆì.áÁFJõžÿ¦ÛÆbO7Pâk?ãÇkоæÎø;#QSÊB´#ÞŽžê‘¥êŽŸˆy²ö±ÚÿdMFxr¹É½& ?…Ñ 5šC"Iã§(»ÐÇbŸTÕ/SÒ5°Ç_¹;_ÞÉ¿Àëþ›ûA_ÅÃNñéX싽XeaØ7ZÛ²äP¤]> I<ˆnàŒ€Â{•qÇ1Õ¾»Ä¬¤EP“@@‡Ö6“^Ÿ.NV|ûB# ÷ß0ÄÛš´¬b RÉý)¹Rnrß‚ö´i-rݽöA+orÇXÕ&!/–/í/Öœ]÷]hJmz|„˜T‚¢µ,wšF9k2ÆÌ¹®ÀúŒ{WOŸëÏh‘;w¬ªï؇í{ÎúփܠdöãÈʨ¿Nõ@%:#÷«>øG±r Ý.¦&6aAü¤.–ûµp¢V¼–ýÙv£×x=Ý´4×i+ªèTÆôFFÆ4ö¬}²ªf1Æj–433:~5ÆK *†FÁ‚ T \ Ò{eYÕjÄıÆXÒ±3"THÀA±Ï˜|²”ßµÁ‚2Ÿ³¾U½Ikr#Rªâ„bà¥KÇ+M#[¶½çˆ8w¦½èûuþiåÌQ8Y²Øq¨çÄUTšmˆŸ =|æÐÚ{ x0ï `Ô|¢ðÄÕÃý?LÜ"ópÄ §ÎÇO¯ú zpþÚá"B†õ¸Ÿƒ¥Ð+ÂF°b½¸þÂ̘• wEîÍw½¶Tñ4x'¥] Áǯð’D¤ZÔÆ˜Ub—Åì3’H'H±¯%Çe'ñ|”ZpR¶ª 6‹ž©g_æ,âßVx=ÿ"޾,D¤Þ9náÆ'~X#Jd#%õ&Ë•* Ô߬ò5£ïHàZd[åÚ •ò©oF„$PezÓ¸ÕÓ¤<_É5˜ÔÊ_ò=Åg*Z8%È0‡ȸ[ÅqT¡1V3¥%Eáá%msO·¹{WÛDjb%-5©),ˆE Zü¶Äªø›|že¸¥ê4©ÈI€Í£2BáÁÃð‡%€glQ`!¦›î`°+§<Ž ca0A1‹×õ­ÉYq :Gù›šñëùÛWVG|P!i_Å \ Û*¾/=i•äöa7*E§FzÇÊW,\:ñ÷ë¡LˆÀ•¬…ÜÉ|žÜ»ñŨiÝÖ /Zb5&ÈTa5@ª€#$® ª9Ñ},ªÒSÿ ÃÃbxꣻÂ.Ãñ§¤ýòºþõHŽY1zÉØÒ!N㜉HàkK.'k~}èRˆ°r¢&"š#›³bª Ù· 86¶Í";Žfð•±él-0e?²ÒöÛüÔLLuJQut•ùµ8õ®acúZüؼàbgøÚ7· (䬈òèý‹Á•ߎÀ&öS(Xäñ‰O¡–ïT\_VTDzA:{SÐýý©¸X”»Ê6ÿéÙs.̽ Næ<ê´bízaî<à[Xıò©‹Ø9¤Q)¿ï5!qbk‘òYx‹ ð¶àuÞãBˆ)@XÄš X8óá7#t87·HØÃ‹Y1?ïÅÏÄìRþÙ+ôì’lÚ ®ðÍsÂ4ùSÉâ.Òœ³(÷,i‹äåFõGþV¬É¢’µÒš^0sÙ6ƒ”ã9ì…-~˜DSE>Šl,ÏP¹^³œwŽaУKÿŒ mê‹è(ÀxŒÏaÁn-¡Ë»¥È -ª{izœÈ-A—l(Zª.%¦93 ™BÑE,[)™­7h[s.B«PöVI€¶„èzÁ­{¬äÉ¿OJ¡dÇ0jO€_Œ_êÍSÏŽräp é%òhúDÝÔ8×ñP÷sÈÕÆ·ñ5î˜Dìi³ž9«§ªùrcWG‡ztÜä÷óñà%š•Õñ³§¹*(´ôp±·zæ÷yø…‹ußÁÄßžézý~…ÉéŸ}2gŒW‚¥¡¢â:Æ„Q¸|UA\Ð/)ã·)…ÿ¦Ñèi…GTñx÷Êô©Øýo[¦ R# Þy5)¹DP#¿ì“ÂÀ²RPNb’,DMÖÈœ¡*D1|FTHR¼¢²;Jé”j:ÍH>9#¨$ZàâEV,E`­_+ úÒ?’{.–ƒ,g,;iôýbu0æät1Ðõ–¥ÚîÜÅpcã Û–c,»+¿P2ᆰÓÒ~°.:ÄT&³XÑëyel^Ýí yô¹]-ìEè“á‡ö‡¡G°o­á­ 9Z~ ÓÚ@¾¸å„ØmÈ]‰`ÜŸ1Þëf$ Ø¿Îåt®¹Õt{Ôÿ<6«AÃá Íf*Ëû ¾ËƒŒ=– šÊ2€ĨJˆÅøàú<´ÀÄÚ™”µ14_|ŒŠ|qRSDÈ)1†–8¶^`e^ $àAon~3HÜ+cÑ,L·xÅ‚F¼{8‹\AöP{}ó8+`4EbIS’³ê(™E’ zBk⸿à '€’»2xp@?Q"LN1XÀÁ˜Àg¶÷öÖ²°¦&1^'&fÒ¤‘è„–¤€¤«ZAãâ̇Ôë"S†ÝXŸØ?Y:ºüÁ¾XQOOßAɉDöŒÒ|ÐãÈm8xoSÅHüF§@z…ô#èé™¶Çš·$X>FפÅ|ŒY]ép?µçFs¢t•Ð-B×Îc°0›:"+ʈðå!Ÿ86¢kبPp‹3ºŒœ he°!Ã;Ó>’|c{h¥UYoDgD0èe 3‹dAø&›£¥…Æ©÷²Ò*ËÂnΙ²o ‹Í0ëPÖ³ ¤ÕuVÚ^SS÷‘±rÄêVB‚VÉœ¼©X­ äaï­ÒC[DÑ Ã^…q¿ÒØ\õ"<¹r`_>˜B½ðùY&yoÛ¬%Á$þ}ƒ Ùëã’=9p6ÛœG N¬bº1ØLG7‹6>…LväA3íQ Y*YJÅ'—·îRp„t„æö©›'569Sô?-ÍC- tDàŽÔa¸kDZÌeš›”7²OÌ5,w”o”Oágø¥á³üMnÑšR"Í$ääµÿfÑü\]Ü$õnJ,Ø,ßۘVi6 :R€b«xuN,8fÊû.«þ‚,6\$ïíUW+BÔ1©¯ªwþhädzp’<̘9ç@€ÚCŸ”Lj†¸ ˜b’¤*É+ÑYt`‰¾4=Š\„9¿=ÉÅ2 ³£v|‹Í÷Š(úmç3¨s»O rFÇotÎNËKÕ ¯ë¯áñ§­ÁALŽ«Åò½k•÷»‡n“ɹ€™ód¿hê@í$Nš•°eéœqXB‰&OIc¶ßÌøk$Æ=È]Ã5ÊÏÿ+ó½öáW˜]¸Ú…Ò)‡£VÀz|CŒ–TÔ)[Qù8[–61>+(3‡¤áœòTó mÍŽ_­Hæu ¥ãÆ<ËÝéÖ×Yïöë|a»öóás-§.2é¥Ï¥—·6ÏñWâ;éJüÇì\oÖï˜ê^ÐÌÌ•äÖvN÷zûÚSÄ›ã-õ]õè!(AYH¦^Ò}{ Sº n¾’=4Ø (Wþy3ØFÜ™àqâ±’›-­åDØŒ–ÊÔuóÖ¹K[¶P¡ aòÄÐÁÅEJU‰H¢l!›ñ=^; Žœt˜tJn¥æÃBÁ3yxÔ¼$þÚ>ÒÚ¶¢ß¬?òt±æRZÑ!ÇÞ0DxT1½ =”™û3¡Wc Ì^$U€<¢ˆ“25I1ª~Ü[j6‡VÄ#‘2Çüúý_>¨€Ð0»¹™ó“]ÇÏ6ÒÅḬ̀ÓÔlVƒîHÞa‘;sáõ½¨gÓRR ÁØóÞ•œu'­=ü¹˜À—§Ýþ½d¥i,ÔŽÔ—.îãÙ¸ oèö‰‘W3 ôZ©oÜw±hý¹fÌ„â°æ8û*Ûvy»†3fˆÈb‹DW%„Öà­y#›zdîG*L&Ìf”6Zûšk1kU›y[{­£ÞÁFöféÎé'égé)ï[ÏÄÙÂÁ Ød¼h‘,©RK‹f5©¯µËÆ·jråQÔêu›ðJâÔÍ슥2Ê·ÚÑK·PYtqBìÈ9®©²“ö£È£ÖÿêQ{:iü(›0I AO÷ãªÃ÷”š¼f¢ þwEÎïåÃ8¸~ð%à¿ZY… ! “>Æç;V>2¦O‘tPm•€™²j?I”¶7«+ѽ?}Oæ>êÃçÏÇâ+¯«[ŸóÍ +Â*)7Ÿ£ê´\­=¯‘û_<?‹±m^¸3¾§í¬9¾zêExÌÚKz„¡9]—!ҔʗÐÜ*\7§™¨;ÍIªrÑ–zî k͸Ù}ÔYÂ%Õ XÁ*i±‰[BËœXê¨ú„H|\ú¸ê"væ»|ü?®RÂ³Š“Y1–¹ËS)JÔªÅé|*‹}Ôsa¾<ñHO ¸•)^<˜ÔRƒ>GÿÒçoU„¨jeca‚Cµ–bwÿž%0¦Jrš|e%†{ÓGøüýÈДìlï8Ê»@Qi•´λWîj‹~T yLí§èÔ&bšµ‚«Ç¶ü-—6´Þ¨" s›–]‘H‘IØŠ"S…¦íΙ²ULÄJð®(¨¨˜›¨9õ‚.y·¦SÒÎ%űXDsIÐeó âų°‹„^îèÒâ¸+I”•Žòs¯áP}ù9 êÍâ³Â.‹†vÍmrG ‡*ñá:P`é>ö5Â6ø÷ë‚góÌâ¨y;dõ,SƘ¥ „d¶ó—ÁÜÍÞòÐ4?\½ðArÁ=sêq|FLŒ¨{ÇcõàhQ{áü¹$×bA×ô݀ĜMÌŸ\ë Y,©ÄJ5–J@Ôภû:ȪͳV?“ÕBöÄë NÞäÒŠpæcÔ€YÉ¿‡‹" ÁeúrMçÈL)”-„;/ü _N*E,ÒýïCl;qkKsBÉÍ*RIþaåbãWžqCh]m¤Ì"r…¤NüC³H]ëê€|Ã’¥èÍÌò‰ˆ_2+« áa"tzt\\ /-÷ü¼ÍQrûº :Hn@óÑ9t©˜ÄÀ·¸×K gáÕ ÉÂb*ŽƒgEK·+IMôÉQö˜x+‚UþêŠ?K¿Ÿ\D’*"‡,¬Ì̪7š®e7ÌàÛù–Ê6æÍj×ör"øO}¨¶ìF/,¿ÃVyµê‹˜‰Ö̉8{/Nݨ5KÄç74BhË,G_ŠQg4všÎK&¦k0hmk3[ÝÜÝ7™™ÊN¥½ñº¼©àÙݧ+XgëÊ¡©¥^g>@pÍíŠA% U­LAÎËT·Â1ÛQ+yÎL>Ë#‚3ÚN&1!!ò€'`0àÞÃÖÄÌd0 úBÔHý¦AŒWšñkD'ˆ`'è–ù,Œ(E¨RßpâÓã€Tˆõýb-²nijºÀ&g|ù䔎~ñ40Ó»«óá…´=½†Å@Õ Xé"`õ¥­‘,MËØl À\“ãSÒjÁA÷r%&à9GþÈäQŠý÷03<› ;é¿K@§`4pÀ®©¹«–Rµ‰^-õ›Ýœöõ Ý ÷ø Þ‡yñ 3/£jíjÒxIq2J§}êË5§ª ,óÄ-©œ2D›T2—Ô(ÅáÈ3©üiÈÊ®¼bê” ºŸòï]ùêæºðîO‰¯}-e£ÀÞü`KÅË2ªAZwL1'ˆ¶çO?ƒŠOŠU‹&ß$Áu‡çÖAâgíÁ ²XT‚x:UcŸVÌêE*†ž<ÝœãÔÂ<,À L˱²O–Ä…"ñïhM0&Œ%¥•6æ«Á1ùBp0ؘìãó&SÀH¤¦Q+”퀶}TI÷c†,š…VA~̨–á&[‘¿¡äQΪàRYÁÈô‚°HX£ÀRâê xD¤5øá›¢ÁÙHïJ å~:eK”RÚ…ªiKq— <¼Õ|…dBŸ–(öœnÅö`’íÈÐfòìY"L³¨Ð(=Ü™SñÓúJ€v¾œïçjèéè¸/²éˆ¥³ˆ¢å¥`×0ù ZDåJÈ<íçdK%üøØýhÐhXf.¼™üÈj5’Ð(jƒZ-A”*|Ïsããô,˜Ñþªâð÷Äûü`ñÿ–FAÍùàÛ’é³y1Àél) ;Ipb¦I”H• )Å_Ó3Ó³I6|'צû…ÉõÃUþµM^æC‡V†!+§¯¬HØhVïRt]Æu=_ê#ïyÌšØäy7¶’¢¹ÎäXÄy2ù9›xš2ætÍ'o¿j2¡"»å¢®pGéÚ=!HfpÜMfꯖÄ5ì\NA™Á¾±E°c³^e4u0¹Í¾¥ …'ÿq,ð€¡…J’½Œc‡Ò^¶´0/R^4¯Eª¢%¶§4ÀW ÷‡0wu’ÏÔÞ‰“­¹Óæ6ß‹–x¹v(uK¯Ï×)”—¨wn‡×C*ˆÎs¦äLš†TÐѽKCÖa#à ÞÎS`ý(ëñ6äÍzÒœüÊX¶8e¸L—[š$Sª¬g$²^Î5†ÍpCXޤŒ¢Ò1‹Ši aõ«ú¿­7ðv0/«ûÓJt>«Ü`[¶PA@…¨ ²2ùIAÎI´Ì^ÈÖHêX÷²»txFyH]’¾UÆ­ü-£;:}>þ„HAusƒ™­hs4HBô]¥bZÖ²ªÛ;LÇ"H1Î>`,¥Â£Ø$I1¯®4)×`¯Ü‚¶<"E¨S ß‹+Ò±ðÅ‚2°l©L“Fîë3éP®®¤Nv²ä¶Ì—ÌW 4=Û¤0)ïw'ÓTB_ŠTÆЍô7‚?f‚B¬]0Z—”Cæ èe 9·$DßÀH‹«¦FòËe³LžâÄ”¡Â"/ƒ“¯°V„“¬ãäô=¼:…ÙayVèÞQ»Ì‰×ަ¯Ð£1N¥Vçy½v8êÑbJÙÙh8]ÈØÉò´¯ØJÎy{ëîâEè‡ÕÚò´µò‘a¶Ír70,o»­‚€ù2x§Š³iåץůœ“ IlD—Ä&±8:)µ¡ªYÝËÎØÂ,lW÷½[nªMÃÖӢqÌ F+¨ÚNonêR-ÕÒá‘ôr(Ž¡U2§š1Ó)]ËÐ3ë·–Èà¶q––4äGÐ*Î[‘rÒ\^¸(åHâ0xH)´‘ß—@V‡3Œ¶P®©øvÏÆý LÄ€íÙ¶žË_ TPÃ+tISå`ý+…ëñwob¤“¥UY“a`Ñ»º×b„TÔ*«ä5¹PÝ—B^#'T¬Ê<»úQ{à3\0Øì›°å”.õîѲ)–p™,R§ÖÓ+ù JyÇw^ïÿÇÛ †aX]›NÊL¡©N¡¡& €Šdóó™²Ò5Yé¯aHÏ—Œ7C%óA=Ñ6¨ì9sŸù ÿ®¼VÐ6xWÌ{ÌíƒþË^ÒÔ\ ª¨'¡—¶à³]î³Ìs‰ñb+u7Ž.½.Üœ-Z÷l©ÄWãÁRüê›5ruߘ»ü“ùÕ'¹îCTÝD ó ¬féàëâëdèt‰ztT°ó××óÜ9§‹ï‹Ý¯o‹ÄÞÖŸÑo»¡Mõ©Cçþ}ëe á«ëܳŸfEj?ž$ÀJñýNFžTS#îZùU…DñÙÆÑØl4q…Mc¼÷K2BÆžUÅx °o1=œAjtÙ¡“áY¥Äe”þI=ÊN‘•°î„ïpÕÐ\æ…ºù&ƒm6 ¸„L“„¯"©¯ì5…ž˜‡[†É¢¥ÆA©ŒN³ùelÃŽ ‰åÌ$F˜1ê‹‘‡›Ø@æ?*/ÑdþjBSàŸøƒò—éNˆñÏè4 É’.¸­â’®¶><ÒænÑ»àSú}ú[$¬ p1µÓõ0ǘ¬MéÍß`š÷÷)Úã®®ƒ®™©ïO<“ÿæÅ§‘k2ÔNŽÝ—¾ý¼lTÃIÛ1…ÙA bj1`I¤¦õØôÊfS4RñXFö^K3†ÿú%Yý˜2¤bUŸ&„!ü<.·!U±°å‘ZêDƒþ±ñ?'E 1TÏYº5£‹Rô=.O?Æ¡ÿz>lvaFÎ_­®÷œÚ3Œÿ?SÚz )gN35 ²Ðé‘ûkg$l (Ñ:Î6§duŸ5+:íŽçÅü pиšõÓS·‹ç 5æçã™@‘¹›Ü 6Ú[I¶¤Œ«^§ß¬¹¾£Ù&)œ³–4Uh¿% ÆÈþ!hp‡? L  …ƒ^ÒSbóMÒ¥ÃôS7—§“"F%NáRŠð|åo XåäHA†ÔQ\ D”BšE³b Úü’ Bž*þC¥ a;¹¦³’†á¬Ífj{Ñýè! !~ðRÂ%’HrÓ×÷K$EÌÒRÞŸdÉK%Ê!brè—|†éÚk Kv•¾º~U»(ê`¶QÈ{ùOq'é©~@xÆ÷üšÏ˜úÂJ²ˆõeö¨ ‡x°¥¦ª6‹Ëb~:þC`¡ó g¥#Š`Ý€s2ÀF½$ÅŠÎà,›D«¨MÒËRãÈôÂ"#™o÷%ã¥RÚŠá™ ¡fqÛÁDMé÷¤l Öµb€Y·s$ß*á‡o`³d¦Cê%Õ{´3ŠUÿu5@Q¶,)h«Ñfwí(™M‚Ù,MÝ)í´Eð}ü,\i¸’…gõÌè2I¨‹‰çæ<\ëâSËèÈÊØi¢”³ÀЉ@ØlyÏ•š÷Ö¶u‚5Ñî…º!SQ*–m“g#ÓNäå_‹ö‰ô ü 9*ä ~PT*dj>X}ÇéóM¡­M¡ÜªE¬dsÓ-S/š´ôtjni=ËKޢ㤙€¶^«á4‘\ekº; ßÚxµñÂvè¦Yö¨¹nwƒ‡wüìÖ+¬4g³ï¯v¡µbÛM‘0âüÏÛ»,–€To”È®˜ÐøÒ"G¢ ’ÇçúÎjÏâÉq™ ,›á­…8æhŠøjÇ×cíÝó@œ3ý÷f<(x¸«rQöìȵHýbؤ’*ÃW߸²1™„åçŽáŒãYk…þ‹=¨[ÑñŽç(£‹š× Bú×V RPÕ LS³››W5 RDFT¦XÖe åõé –ÇÕ„Mµ\µêï5VxÝ7iVz¿A8%¡TßÚíÉæ¹jÚòøp>ÁhyK­O';y³ÜNs`|¿cÓb^·LÈ´>Ïû'ûR¦½ ئÏëŽç žÝçÿSã®W˜vÄmÒ6™q6euÜ`þBAÉe :èÕÝ‹ì‹ïáj Qì³L­Î/§‘€ÎëLwázð>dNåXðÄž¡V" ¼Ç–ÚýIÇÊàÎìm©®Ï‚äw;öÈÈš@ö€mý4é‰çŽ`ÆC™ûLtê˜),Ò„‹!hÿžÙË7Ü¡'+*l©{¨då{ýZ,©ôo!ªS ÜÕ ù!Íç|T[{ÍITÕrawÅš‚Ü’cqú뎘Ù)±í B£ú…”–gF›’^ÄZHu5 A†ÄãÕ( GÍ•¡€RN–¬ö€+òi[Q­X0Lli`ѽÓS®‹K0MÇîÿÏ/z"Úr¡ŸÂæ2+qd£Œ%lV%F²X =åfõvNŸÕbátZ&Ù\5 ¤muo`Tôì÷ÆsÅw¿Q[<Ì‚¬Áñ¶ÜmºÎD©íþÈï0­Eëý½w§Ó•¨6SºÀét¶wM Ù=YÍuY<„há• ««ñ®©3¾‘«åzw²j/¨€QLzT i…Se¥D%kI{#G¬„]áÄ®1¨ªF&ìn£tZIà ëFû઴¢¯ÀA¬âªÕ—²î ijì#¤‘#JÛÎ/“’Nú™¼sÐ({×£Ct S›3[jè)õ§-s°((¥Û´nŒ3¶äB¢µ$îJKå°UíT—jŸa­åxIÏ<^æZ¾«âÐq`ÃSÃS Z‘C¿áÞ½ˆ”šHÍV`}Åä0FŽNLà2ÝD„y.iØÙÓ})ÁÁŸÈÀ57ýG¸$ y„ ©8Àp®Žu“¯®à&aé…[€oó…¥à`{·M.‚ž`èÇ"$ãK4}EÄn)DÒ“C‡±_âçâmTªRR¸X€NA…ðłÆ¤¬æ˜™š ‹…¤kŸ²ƒØiŸˆ¬¯ØúùíM+Eâ¶Ú“^£L‰x@!¢æäiU† {¾Í7h9CNó€÷ PÀæ°ªËþd+n¥æ’ÔtÂ|pEîr–1a–í¾Š#®NÐr®š‘ì{N[°!CÍeÜŒ¦NÍ)D:šXÅRñ¿÷¢< X"ΖªuUCRù(«lp`bnÊ `d+ íÌdX)H ú‡sµÞn5ýãó{q¬éñeÿ ‹÷DÎGãtЬ×P 9VÐ[e2—>¬t4ÞlMƒ¶õ%Žªƒé¦‰YÌ4UƒÉðK†\m_}¿U<(Vçõ$³Yíä§aC°Æ·#W®9%·IütP“}jÜíÀúÛþÝÊSn¦áT§±HÈÚ–ñX X¬UaÃk§› Ó,´±]Îפ;5œ²Öãí^X¡˜·ûA|‹ýWZÎI0Û€-xúýËꀸfôÊ÷ÿ°•¸e¸ÑùÂò@QZµlë¼ðEpf“Í¢×Ìí÷ÍwçŽÛô$(æôŒXß4G©‡5¬ Æ Õ[5'Qw›pèS#.KÑ;ª—ZŸE(0ÃÉWž6(Õ”õ¡Ì¡¢®ÚÔD~þÑÇÎñ÷T£kTØóMÊsÇKCµÐ#Iª|PˆK¤äÇE¨ÛD+]„S˜ç•øë«’´Xƒ à‹rˆ#)NÇ¡/¡Ì}k¬}k–3ìyŸ.³G×@®‘\¦™gì¬!ðÜ@ˆ3´–o¨/”š£üùKµ}››~®xý™5Ŷòoq ïsN™u}·fTéöù}€xÝyÔXM’V­ QÐfht^]SÁýŒééfft“a´éÌcF‹B%L™®NÛð> òåþîúé6 J¯ yèw>MŠy„Ê"ó[,꜠,TOïg*Ã6]F>Q;‹$Î ´†±;['ôO¿25$$ _bž9‚1xU—Pñv*Ãqèõ ¢ÁWU3‚6ˆ I–°Ÿ:ñÛj°WaÞ* ’f´ú`z±ºHŒ9<«ÞÅÃMõ à=€8ߣç5@€!Í žþÒ1èùE(³’˜0A¨†ùd;f(fÌ(ש´2@tòD¼ñ·)ö¡ž¾Ýqø7âŸÍÑS˜ñ1eP@›ÖÔ©XŸ‰5u5%i¤dãÌ &»,o&M!O-K%½U*C)«O²ZžY¥YõMð©vQUeNãJ!J*ÆOZû9ŠE5«sÊuò{U8·4#Ý$çÊãÈ÷(VEUÖ0ødØå˜½`ÛèöÙéþ³]z àºjá‡,SEãïÌϵ1k\Ã)ŠÝ¥ÝGw‚#‹Ð%ÂÍ|ÊÊÓSn5ÛlS%œ[~ÿ·ÓìןP~nÍÓ1.o"nÚ^$¡‹%¼Aâ~Xl…R‘¥^ׯî±QÜ=I:Ë€¿’á*‘‚˜~Ä\å¦ý‡^cên¿.âlÃË gk¯¦šÝ;½ÀlfdˆÊj°Ìg —ʆɪ߲¾×Í/Œ‰Ê’µ‰ư‹åÈEwu£ä&ˤ‰ õ ¦¼šG/Eµ)·ÒŒt™¥)§ ºK=!ØÖÛ…=2Ôø˜å§z@œÎsœ ·Ûê,dD”1âÈà_•ËEBV”£‚NñëŠvC»:6e, Šî5dÇC¡c·{»øh”;<$iT¶Üø» Š’áœÛJ9’¯‘ÚxÚ}eéj§ºÖ¬Û\”à‹ÇëÈü¸1£eNl½¥tÌØÊ:ðå¤ì –¥®ŒLM®ð.Z- ±'RPD0_&ºT4{2<"ñÀ2JVQ<:µòtÍÄŸ›S•ˆ³ƒ$ïáa¶nµ|$ä[uCËá³8 ˜{°†ÖŸMÍrÇCó ’8C\^oÕ—â„ÁòJÓåY‹·ÖœJ–óhçmGë,§RÿMLF=D©î¥î®ãÍt* ZSc&êQU-V¯Á(Ûþ=€wv.q@WãÓ¨ê/•Å_4·{'ç!ï ó­‘¶D™Yq]›VÃàd¢*+Tu’*gZº@hakd†h™l6«/ ×ë"^£³Ï»¼ë¥~Jé^ÖŸg¤¯Ž¬h,ꦜ”W¹ÒV1w–½‚e(T¨³ºÙ£þ—k÷|èÆ•1ê`— nµ~q•¾qµ>qM½*imbK²ΪnÕÊOLtFO¾Ëç}±½8;;}ä1É>8s¾ŒžfŒÒâeמwß•ÈWT,ÄšíŠ,ÞîéhÔKLÕKŒZ赡ô1¶ëÂÉ)=Œu®NzÓYe͉cÿtÊ'¡MóŒüÒŸáÙõÙwÎË­„D~£qc¯Ã?qp£\µ`›UnËg%'©'„Ί¯ãVµ ‹È¡ƒ~‡ùõRþl€Y¼š÷«þ’"t~ÀÀ%À¤DËÅå2ýà¿§Wy—:G_W0§òõÆá]»žTxp³,c‚EOŒ>-zvÝÜ7Ò÷|¹&gÏpŸg¤‡Vk'juDX±Q¨Âse®_°Rímá½’jºYºŽÓ­áªÝ‹ÀÂunOÉ‘ç"™ñ5Íñf+îfÂ5H„åH1U‡+ž“ýÅk#òÂmuPÉ&a‰+ qÛ-GcØs캣u–°Ÿ» ä4~ˇ ™°c_3ï°3¬K\¸*üµtÓüÍLþz }—¤Žÿè”çZG±»’Pu® õí¹y¥‡t}ãñÇÞt'ü*Ó «÷*È>“Z.$kp&nÞov›Ôn¾Ðn!î¡æ46Þ_2D¸³‰wúvß„ŽO/⬵ÆÇr—é÷ÊTÙ1ëçD%†±VD“š&H ð\¥Ö ’Ë“WðÅׇô×8HŽùíiŠǼE‚è†n|?$K-oVF«h²pÔ"ÕçŽÜ 5¶ËÖÿ©SÆB{ M–Ÿ­bZwp|:žŸ0·Ñ«g†˜Ò— dŠ33â½Ä½czÃtpÛw4W˜:sõª|š~J|oœÃ-ÔëPìßï!{áƒÿâz0wsý~Þlë-ž4²/ÉP;¸Íê¬e[ÌêÀ÷’WÕ¨ Õ¡má*ÕƒŠâÓÏ¢CsâÚÑ')c׬ƒ6ñ~#'„W²>€3pZ8©Vª±”–SÀÉœQØúé°®ŒùðÒ1`Ä µ„Üzno©'l7´‡v;ثſAÁ¨ƒêüÚ­ÉË39uf¾eʰºÄ Í¢7ç lb6ò)k;ý뚯¦è‰|H8r6ˆ¿%WuE0®ê`¼ú{6¡€‰6NJ>jß\žÐaHMÔ’VÝù±EA^PG¨@êœÅƒ^,°Æ]û _.~$Gž•xñˆ6MÓ-×c$'$²vZ@6RO÷D\—t¸Œ´¦BI{…*ÉÖ\¬üüRx;1¦§šüKÃW0à¿kÿ]Nõ Íð2‹ô˜ØGë^yU"t¥û¡Hàëó1sË>Ü!›­÷d¡ëé~=u9¶ |˜.¥ûË¿z«ë¶%ätš¡ññtdóý­áªQ¡34u?€¼%N¼1oÍ#ð5[ o³V°EM=-׈†E¦IqÆv’P¥ÒX[çЪrjE¥IjV5ÛúfU­‹1Xg ÕÝBehÚ+ÜøB\i‚Sv¨ŠZÕ).‰ÓH6n#Û¸™e± bc^äçôÔ#úJI=ä¯bEtͱ Ñk”T{¸¤+=®T M£œ‹·¬ê+¬­.R!a¼š ¿JW<˜\u£Î—×Ììsξ2N;™b¥ÝêŒÖjÕæè“.v®LM.$$j\æ0ÐÇ÷OÏ-ökÏ \üãÑøy€l”m|Gi}úFf·Zr¡ ±¼h<¡l°íÍ€jžFVL<¸ ¦’bmmÐÆšj¡I,×i2l?·*Ž@eS­G’úŽÑ±±¾Ö^€ Ñf†j$xàèê4WðÛ»èkü¢º–ã¿Î‚kiU$€Ó7Ùžïi¾YÖø~#‡2«ý^ðm;Gi¨x\ƒ»/ÊçÜþCcƒìË3tœßûl(NEôuÌ^W[J¿_\]¡–|?scø¢ä¿õ)0„ÛŒ1Ð+§9¸ æfk¦^]j]î³Îå\X'JCt`Q9¥Ó#M\´r¡$©“Ÿ¹VÝ_~åʱÀ†=%æ&ŽY:1ÐN–‰‰fÜN¶Š9Š)NHåðTmA£|~~_ÈmÀ ô Æ]ª¾Ãð?ç òùA2>YrÆÚÖõcÚvYTõÊ,(J)/é,‰ËÒÜðƒsÒ¯õ´ÈY¾—ó]~!"F†¹ Òµf^A'*ó’Æ¿d¡éùžþÇ×.§,ÓÃpÊWà˜»Éæ×¨î’©7’æºtæpç)å)äûÑò uw­ËŽíŒÞ c6ݬ~œ£¹‹¤¥ÛmŒ@ðz:›Ñ)B’C˜®¹-¯èª ’ß²;㺫æ ·–UØr¥fw{«»ê}3]z’HË•ñãþ%/JK;ObÖ™q'ÂnÍ 6yi¬Wé0ÚÛÀ 2—mòÞý¥8pIûƒ|~,GÉŠÏ·‹¿—èBRvÎS„? ßn®ÿÈ… ¡eYpùEYB]s¨V©Æ¡™V^G¯gð‘z»5ú5#w‘ËØ[é¾ò¡ô€sÆuˆp‘÷Í—ãÊõú@2vëØžÍ«Pâp)œS·Ÿy·yºMP!ŒF Û&u§ˆÉŽ >“zPÓm˜Y°0ÎêCÅr'gï!χ|‹CÛ½Ô²%¢=çà1|PDZËÐGŠÿUSUƒU£UÓíXeLõSÃÈ–hl¡|š‘xåñ-7L”j²eÙ˜`MÂ5u óÈû¥€¤¶Ïá}FÁ—XDƒ"—€ ÚZû›.õlâÂÝ%¤·Ü]@àæýH¾¥oVA_pyæ–seEÛ¾kgm¸(î÷hø0j6 Î@P¢‚iꇺ/^šàåøA䓊û9ù™³~†çA·örÎwvØ’V{îl42†àMú{úVç÷U.- HÖnúfüfÚ#á“V™Ú Ãb(†vDê¢m!G.þ}ƒw`±ÏHЕ„Mh|§${ìBŒ+ûv0–]/Rnnì£=a5zî°Œ½E“Ü5B¢f²&Qí&W­º°ÍÓåsò(Ö‹ËlKl]ƒŒ£~‚áØ}h€£<‰M+Ìå¼bIddð ÿ•üꧦ½…vìű8m(‰]ÐoóÄ"­®ßÐ’ík‹‹ùúGô¶0¥*w(KÞêÍÿUïÅ73Ã4¢âx\€ãcŠ˜jË7,¤È!&”êv oñÄvy–‹þB \ Øæ×«Âšã|6/È8§„áX¾q0%€Š¯kbÙÌ­fw§—ˆ“Xûù ‡Hu¸èåÕ£K§ìªöŒ°eÑ‘TÏ:רÈÎ.DN„‰sháJ”箳¬cEðYDŽn7uë`TZÝ=‹W~§¬¦òÉÁm­aô&ɾ®Ó*/•×ëŸá ÐymÈBåm®ÏUÆÂµÖ\´é’8SÑÑ”êTW7þÁ(ÁT¢5ÄTbõˆ¨ zÓ0,<`ðúä ìÀøhª iÀ5ð™>ñÛéá1'0-»C8Aëè²K5VbÖWÏÛÑQ%§ŽŽŠÎU…S¥lÎc¡IóÖŠ•D6¦Ò‡š…TܤäR²ðc†}Õ&òB ü„Ö¸ÅÓœ&±+Š˜ …)Xwt˜É„'ü‚€_R}±OŸ%&”Tâ(£QTŠRÞ~‡ãǦš_ß-—Q•r}éq«·™¯~úÃ볨Ó] \Ž×Eª§SIe aÆHkÈe&j]žVî—§ÒÒïM\ª÷»–·Èa.K[5e2f˜V•<í9‡2ü j:Ê%sho‰Öæ­®Šê‰³¨©Å&`IÀ=ËôlÍÚKÕß§•YΟöÇ¥x©ýi,Š+WY0'I÷³Z@ZnT›D÷äÁG²„€? oa¥+!J0s|} c}£î³IG4…V.ÃÄú:6©Öì",_’’„‘GJ31wQÈBEû:QêiYím7 ÇÓÚ­úsÙY™pï•ë‡rB¸×‹ÓÎVŸºÒ”ÏSîYÞ-nî©=X ‰[¬nÓÑdÆIT«Ýº÷] \o‰Â2€ÃwSW_ûíi‹ VäÏN·ÓÇ_3ÈâǢA†kz„†CÌlÑÓΤ_„_ŽšõIêõñ]¯þWÁYX¯Þ½eÙ†âþS%ÖîEïÌÜðʤ?ˆP¥ÿ<4W`-ÿ¶93}mûNìòî¦()#S ‰ $TSÜxDÖcJˆkcçè°èœûEEš£Ð_¯Ñ W;õÛ“áÊ@«W˜<«º+¬ŸÑþçª ³YuÓawWŠÞž®‡ ¼¾(¢À}šlñ#;M¯A)±X¤hJ17œiCÏc¹È½VîROí/ö9›¡¡ÊœwV6-›p¢UT5ËìÖÆ`u´uhªFÈÖ±—ôeHÞ…±÷é–nÑyzRþ!sÌnF$©ÐuD_©G"‹ê“7³Éú°lf±…¢]í@îu@(å(ÿ4… \ô5–[÷­IÒngþ‰ÚŒbè ª©%4Ø•ŽIûïD‰d,©Çðåuñ¾éÞælŸýôá^n¼„Ì€é~»«ÁéwïnÓ9òͺÒíkr‘ͱ^{S¿yßîlÙ#ÔŽ©ñy`U` ¯EÊ‹€>K1زé#eZÁËT›ŠxZ+ˆFhÄFpôÌú³÷ab5¨5—/ƒP¦hÎ8úÇ-“ë÷‡½ìß8»ç™Ëë—ˆÖ(W¢dëÎá ÁôãU=€*”-¤«wÇù„Ò ½×"­-œ`::-;/g&BViRX1jY^xJß$6 ×¾ÛäÆk»l4/fä×±HQÆ6r)IȪî›0fH2Ñc𬣲†Í HjÚÆa{ºrMËÏ uz¿Bu€=„"¼–>¹1êeå @£ÉRÀ6¾|U‡ÀŠù6%PV­ˆGp®hs#Êg& 7iáÀ7ÆRó9þÆÊ-\¹‹Ì€mw+KmÊrÎ/'ÞD[²#—EÀ¡2# ìÀßWqR¼&êÓ«0VÂþf^eVÀ²W‚ÄfÔî ?U‡Pb>®ªlÒ¥1¬$^.>—ìÆtcýiÛ9ð@ЛÖøîC(+U{¥Ì@ºzG.‰ú,š³v>”)œ)´Ð*ê¨ãX G= ªyút¯'¾'ÛT"ø{«·¾ÛD$,}÷Û£û;ÿ^‚%à û¡|¨| Sú#ÏSYg !)Œ0;˜.2dåÙØ*R«Ô8ÓÊ\/ÞœÚ9Ý:Þ@î¡vÐp&:^/o_®c˜Àd²¸cõÍL[0_š3£F˜7D>í¼> ²Ÿ£É®ÇIkí!Ûü<@µáÊ ¾‡ÜžG#CZ¦hQJC!F°ËÃ$¨‹ÖPÈW±Èf-™P-‹B6o‚ßb”Uõ_8“¤- Ô-raÝ Ùß½Á×OHçºßEi&EœðC? —£ê;ã­‚?_Š +iO£¢Žº z(…¡Æ™ÁáŠáDÐk 4Dä:'AÅuÞ·éÔuì:6ÔvA-Í:\sP§¦‡‰¦©ŽÑÇŽƒŠkÑY.Ñ(P‹Š…Sûþå›zQ×ðFòÜsÄ•ƒ$G‰šBklºÃŽŠ©(G(ÆÓ'x úº ¦¨«1RHØgV—tm—‘É`´x¬úlЬèw érÉuÈ=™EpÐ4±ØJkhWñ$î/!t(1ˆxåfþ‹äŽ“ ý+àì‘S«+¶$.ÀÔÏ Ÿ\8+N æà•F-®Iˆ;OÈv† w2NˆÍËm1!C¸À¬,•£°¸(¢|Ÿ„_)^åýÀÓÀ‚v/f4÷ýH4›Ö‚£½G+—ëV†Ÿ:ܯ¾-‹Ø;ã>ö½ÉRÚÅ:øÞåÎ{Xâ²ÂeE4Àñnïg}àU#QHÎ/¸úö’6Hµ gƒ‡núä¢.  Z§›ý% £±úÖùʼnęÄéåêèò®”,elÔ!¨†´}´Uörclë²{ó ò‘2D‚4ò´*ž¹•GCGO%_§†NE ´KSáày¿å¸RøfplTE¨gýÅ1ôEƒ&„Åa2`ÌüÛßIÆËÔÌç¹ï¤Gj;Šƒ{˜6‡“ ,…ÿPÈ=Ð)(z—z׉L’cAbaœ)F›ò*‹¯æ>eæ© ›Ùè6 )¦M˜©ò¾û‘î³Gêè/4-¤6¢ÖÍß~ÑÜüñ‰äÍ ,¯¬ÌL†RNâ,~{'2t}Ô>ìþ˜¤©²íuÔÿÜU^Ç‘¢Ód|¾Þåbå‹ã9$ƒyÝeí½˜¯öÔÈ`›£ÊÝqºb!¡Þ,D8í»Bq+$.š•’L‘Ǩ@¬Üê ¦MvOLg£dˆ×a|±`H¼Q€"á£Aüˆ9Q°˜Z®„"ò, Ž—ÖH¶L®åòÉæÓ}»,N|>¾øú~¿Cœè#¢ÇJdGú@œ—ȉg)YÊ×ÐãœÍJ¼ŽJøzª»ˆ€*j(f§ã”1y?ˆêÆŸžæ ³†Þà¸Æþ\£¹¦ñ –M»¿U¸U¸Qº[º>yË|K3w'©3Ôôw5é¶<èû¼…ç=W‹i²d€½{ê;7û> "ËrŸ6‹DDmB´§¸Èuè~î¿EÔfœõfJçSãk+µv™ ÙWäm€/±û¶1‚i`šÞ”«Ñ•ïº%ä$YúA”U³ºc7­Éÿ©O»ª•S6@¥,¶YÅœ-ÚÅ7³›Çù­û¨v=—tçxg9çpÖŒ‰Ž7‰S‰s‰0îØfÙ†Y©­4\èùà Ú"vBDÆ-8‰?ŸŒ[˜?CùMo~ôÎÁÌ‘ÂÕçLËpü¦pº,Æã“Ìñt#ãÈQef,™ÉN’ÓóaÄïºø˜QLïpœ|j‘NöXK¼·ÚM1£\C‹ü÷°˜‘Ò=¶÷~)*Dhätö½’ÉT‚$vbÂxŽvcð¶k…2=ìåé”EÔ‘¢¤:b¬qºEculª óþ¥‘Ÿ‘C;%"”Y³i3šÅÜÙ¥@©ÑÓ“ÛµëT¥2–ÉzÙ•\=_8zéÀýò1Œ<¨–-.Û‹GAoœ°ï²PèTDØŽµŠƒšC}©Xª¶˜C†Qe¾c„ÓRY];<t–•&(ן—2;º(”Pöõ%ð¸Z,{¡Àˆ|3hOþ*ÊŒš·vg’|N€¹¼vôïN#.PÔ²eáeÝ aµ(—šy"y"¬¼af¼’v×!*“Š:DåD]Sdnê€|ãËÖ÷+ òÍû!6ϺûÌ­¹×žõÇfiXŽúé,…­zÓeëõœRÄlxþýåîÎ’«ã/‚+¯ýóß+Î ”cÙéµvëm[Óâ^÷ßD¸Þ;‘o“è>‰·üШ0 -ÈîøŒÞ¿ç™»º}>Á auBuŒãé3ß¡*ø»7´_­àèzt_ mç.-W£éð˜”. ëöÑdõ2º¦Ý¹îm}åUùñÁ¤–cùøj’0õB)Vmþgç0T&Óôxeý†`Á£÷«BŠWäUKæô‡öT©«+š©iÓ€¹{¹Gc™@k˜‡RÀy<_:éÓ+Ê`TDwfÆâjb-útˆƒ#ìÄÕ‹…ŠøæÕ}‡Õ(²’6ùƒðoO8÷߬&IÅ&Ken“L ÐUÔ*5‡Va¿U#í£‡ÄþSš˜ ‚RPšV"kšµmÞ |±qgÔ|£~c©| éÙéšAú×ÛéPq8÷Z—á—­@®B9!çþ³¤žMÀØI8p.°ê7ÄûYÂTÑ#¶ŒÝ1>Ÿ^8 ÿÜ_µRè*ý'”Â)} ‚Øû IòÄÁõjY² ÍánÌàaöße ±ºï¿erÍHÑÈ"ÊæÒo³jÕ½uG0Ÿ*ûænO³ÿ^‹ ïÍNQµX„Ÿq T™`Ï\ ûÉE½oT¸éOÑ›óL†ŽH»aïŠÞUókÝacì§Ö ½ 3¶y“I3ƒÎYÿÍç ˜Db½3|2\Ý••‰üR¯VXe´ô¶üʸ#5M´MÔÉ‚]Æ |¡è½r—ºÖÏÙÁ¹¿ë‹ ÃQš¡ƒ ïÃÆ2‰T’%œÓ¯jHÇk#y‚’w;Òv®`Fæ³ £Y‘®c¿ò/ÊÉ &¢Ï}i}ùÎú%G}…¼‘@Ð~ƒûò›Û•">M4ÍÛÏË!¿¨|Vôò*0K<"ëQ97¾F ë¤WµQ¸’i¦ÿlL”óð8 ¥À¿%‡}aWÖ§|eRT6²Òú!lRó< ZôÛ5S¡Vv:ÜrMAYUÎå̰¡–±ivÕRCÔ,ff•QªzÌN=Ø [á Ä-÷éÍ¢=>ÒfµM´ v­Ô²RýÂ~Þ}&QñeNÕØz¿†ÿ KÊš#Ö"§¶²wà#Ö÷ƒÇh“]ê³~ºÚ¢ïÄή4ÍU^™ÍPæ…`¦¥ÎVšÁ—m‹Ä¼œ@³mnh†æE˜¹v^ñ\ÝS/u“g–<â™ ¼ñ\Ÿ¡úÕNòÍ1ÌêR¨Vœ–PV6K±9.IJ©U/ÏPU:ñѸû³>Õã¾M^3+ ‰-ݧ·„^ËF”£9Ö¨sÄú¹à¿Ï³ôs«*u”Ÿ’”‘eÊmZ‘bZB °=x&¼^[øÚü¾¹¸”}Àe‘eúɹSkxØ¥ýÈî’WTfñš)¼@‹˜þz@Òê§,YYZ¡›8(!'Â3ˆA9¤£žN›£ GUby‹gœz‹ëb ¿ú»`ÃáðËæam­_9t|€? ùŒ£ÁCYšÂ†,¡~Å=Ø(²„CÂbN¤hK¡œÕó`'…ÕÓá®dwŽH¥lÙl—ÉU-VW³¶N*j ‡ a#<îO‚Ó˜ÉÓ++?¢*ëŽjX+óAmŒÏàù%Í:¨V…—§f‡K(20*ßÉ@ÂÔíþØßž…ýÆc}Å9ìÑÕCòñª7±¢Íü\ÇuÌîýÀô¹ß²tkí±÷Y= Nõ9¾yT™Öíi4K-=Œ°¹A.a®£9jÉꪑ#ïÅ EóDé}Ы=´µ9µõï¬'ÝVìÖýÂT=À1ø2º½líiËóÝT­Ý¶¿Fµ;M5v8yUÇÕ]k謵Þ¯|mšé°·Ñ°ÁÝ©^‹râÏê¦z êlKf ZùoG:½âH30ag~»ðbz×,Öð-ÆhÚ€Gë/ÿëÎãÓùÅûx:µl#›N`p‘º[û~à U$>¬ÒR:~‡è™”]þÅr~¦|o´ÈCQ‰iY<á8ü–dMqfßöìñFƒç1””„t5À¯ÓFó^Ç@¦<åð=Ú¿'?SWx4pL‚]é Â"­xD 4i?¶~tæóëÌÐ $KPüJ©ó„]Ôò–}«#°"~¤,&¡€Â•T›öøM¢;dé^·{†·Í>cÓ— =j´r/%—jMÓsÅ’ãÖÐÊ¢®¬Áù8W©’ XckÍ¡øáòp]Z™ØÌ¶÷ò*3 2]©¯¤ctóå øf8Ÿ„*[ó÷uùÇSŒ6eþ#ù05_g¨MÆV½^Û«Û$ã—c°Þ)Ó.ôãQ¥Z´›£_Z…Â`Fª\ÿ ¦YÔÃÑàN¿*¬°QHŠsCŸþˆD"+ "â†ëÌ? Q®v~V"C·ÊË埒jH<á!ød!}¾‡ŒÔBg‰QCåáš¿”è™a‘R KjýÑ”¨ióÑÍ[sC(W°Y÷ˆ`ÇŒ ƒx•0'ŒÌY€èüû²nJU!Mböôh8;YI)å¢Ô2|ÏÀ¿qحꩱ]ôUšÄ'åí2v›gÝ~àgÛõÅ×÷Ö‚–î8U}@be«£´—øÉfŒ12çü}z¿Ž·÷{ܱwÊçFãt\J¥éçµó>µÓÒ ÞÑ+rG:ýÛ˜öuø=‚-Å:À1ïhgaÒöðY‰Ú,}1 ¦ÙÞNî詌Ôp¦'š¦u€{òwï·Ÿwoþ>¿^ƒé£fRwNga”UpÛÔhEã ‹á£Rº‚'PFþôµ#DÈ^§b¾Þ«zGùÌ,^èpÆÐžHŽj.íˆÛ·£‡‹ KÚüiÒù Ý×£·«¿+x»Çø.xþ ZÒµ­+'ŸR¦ƒË÷‚yKñ©$¢»^*ï[ås‹…Ù`ŒfZO_º}dO‘u!¼Œ‰c(>!Õ-@µh ižXDjÁe¨b.ç–,ÌÀG—\Ò²·¼2s·DÁ>SH// ü×aæ)cÆ)JnÌŒŒê žÑÍ­¤?¡Œo]^®6=“ÿ Y;êAZ¦ˆñM‡‰a¤å™rÁˆý‹¢6­².[·<7¯°æêýjW“Ù^Pº­™AUÚHUV:^;-Y4H~AA µ‰æLÛÊ7–=‘a«^-l(kÖ Ø—ªå Q†ë(ùHeèU»²óQuè+–NÞV“¥fœ½õoÛ-³ bZêñ6V4hÕ€cÜ ”Èßàqí5Û÷ï§³ù$ñËS2G3F¬f!8^N?îU=Å´“˺•ãrH¿„  øÉòñ°¯¹6áw¦Š{Eß’zª¹*G­CÍ$ˆRȸԺEµrÛ@F‹*U°lq›%³ºuT°¶¸ÒN™ÍüÔüŒó¬s[ç2lμ÷Òw¾‰î™ÝSú§þôª¶® ï©;ßWÖIÜkÜcÝeÙgÚ‰û¹ó‰èFlÖé6Ñ»°m­¦*ß¶QÙñb"‡OÏшa&Œdͺ´–¼³¥t>&¼úø•™·¬}¤›«¸BAÍœ*[cî-y,È®S¥ÍÂísÝr ÌÈ%Q*;Ð[bþ=DkÄ0f‹„]§6ZT+ÁœÇ–­‡R¼H•Âà|)+¤*p›g¾‹wa±p¤»U¾KÎKü ªF°Kçˆ`‰Žr©è’è2¯RãÅ5Æj\¦¶>ôѪQÙÜ8@ªR¢»:‘=ì%EÜI{È|ô!*ìB¢  Ükܸ<ñ-Oƒnñei¶M‡iâ‹dægvIÜ´Æ@°ŒÎm² RCä³ÌÆc¾ÆñWÒÔl‚¤IBZ%o ’žx….¨Šb9êO6V£I³PÙj¦ÄjH”)ä‰ 3Ïñ¾Ã-üÖ@ØÈô;š-µ¾ç7T¨JeßXD`=‹+u§a è”ñuW\ÜÌQr”~Æl9Ý9ÜìuŽëOLU!å”ÅrßîûÖ‹K’YääÝóá3šå$à±PÝîã¥a'<÷®—‚±§çÛálèVŒìl'¬h̰Î\oágQ_€F½Ô”}Ah¸|/‚Ï®}äQVŒÅA3ëæ½r}åd­ÐÊ­‰ÁÓ¦ºéÔÛÈÚê!æD˜ÓS˜¥°wÂ;ÉÄmÀöëgÛçÒkæM£×ÐøÓùúâÑá6þSgÇïj¥Ë«º€ÑÜ…<ª,…â´ä³(~ðò?‹u*¸ gsœéŸ=P9Xæ†!÷̪8 —#kigªÕ³XéP„ ÞSà"xôií=Ÿ˜æ6ÏTs[ŽhêŠ_e¦Ž°'æŸ<öi•hf±éÉüŽmìf™h"³a<æƒ&Žw:©‡<§ÒŽN.ñæ¼Ðΰ=ÎÀh²¸õž¦”à˜â€ûŠF ZÅ!ÂCœCåbïaÓYï2§Åg¢êa ÌõÐûsÞq禀©&ì«ùÿvŸªÅ>¸¤QÃîÂD]ÔÏ©bïðS?‚ÊŰçЦ‹²½r=°.NZLr­ïj}S³C‰4Ô¤\}Ü0Ûú²ùªyÊÑ•w«Û5&&°<¿9ž ûåý£ƒgb’?Lj[áü(ÜÆR§ó žïèìëܯœ‹—!‚Û‡ä-nÐx¸¡Å Ô¡k Y·Ò·BÀ‚ÇBˆqéY~Õm­ë4´o̘à° 7ªxN1h4p  ºyùÜg¸`˜ª1ÇlM³=4¤·ødÒaIñ$-*êRí7{VO£°P‰ÜSo#:•f„Ãc)A¥M¯õé¸â,£&{ù‘ÀÆvа¹ß\ï)š ¼Rú¯¦x÷^çû\m*·sÇ™ âw„…Ûűyÿ–ýkðY€[ ;îµFÚÆ1 Œ8ÊY(€¢…›°›øìþ™~×3ô¯«ðËÊèˆ Ë +Ø­×®šàµy¶®KÌ¡â‘×UçÛ:dvž…ØèQŸš^+Œ!œ1¼ýîQŽÕ–mÏQÁRÄ•#Ë‘ãªðÖz¾–ë< ¬à.áN½òˆ¤Õì0ŒÃX‰F¶¼ªÄ–ê‡5J•´K¶ˆú¤¼baf B³œ\SŽ0–‚«Ú/Y?É) (*;¸l£¶QÉ˃›^“@9’=ÉÏoî¼rY©aˆ#ðˆ:†sê é0ÑÃÕu ŽNòü/Îý1\Ö¦YE‡msضmÛ¶mkÛ¶mÛ¶mÛÚï÷õZ§Wï^}ö>§ªòªˆ;#2#ŸÊˆÊüq»ùïòbãøÊé ÈÎ[«Q€6K¢y¶n¢H’ì£R^Ãì‘é3bFCü•Ÿæwý"4B¿öº2»ÊúÎŽ’ibYÙœYC ´É®že5<´>œò Þóé—iåG2o¼´f‚q4óýÈMò{3câË(š,¥>—ûD Å¿džR%Ñ$OßT¹¬ZÐr7-Ä2DR3›A е·}?Þž³U©IɦO}­FÙÓL ˜>6ü‡ƒv¤Ë”QdUZjŸE]zuJå 5’(Tßb´Ýï°§­…AUøO†²Ázó©ßl¸”w2ç”ðX›1]–¡)e°ey™@>8«* šÄ\~€4F¨ ¢a½$?5Á§Ë5^×$žqÊu£7‹“QÿpÏ\ŸHýøE³L’‚ð¥5׊K0¡“ôt-d_¹ûÞZenž3À£Oð¼¹Òzü±J­² ÄcÉ86ù¬P]¿º¡ ¦Å±e±²!»”¡º‹÷r“½zŠ›¦©åƒíÐí‘í“ìùlT»³û/ΗKÇh ÔeQçß븛kÆäVRM­áâ ïlÍ&Úœê±%æ3¬wÑGn?!go…õÆ…fÆe¦¢üÍþ[úÛº[{o‰æØ]3ß%ê 6Jé®qkJ¸;@§<ö|öÍÓS%™Ÿ˜Á±×;fîhc+¬R¥y¨Ò! E0¿‰ÓÉE–õ®65N51É?ÛïëEÒz±œÙ´ü4õ’Õ²µtnHª«¨ÕeUäµQê®›‹¦Ö5 G¦«Ÿ-_k¹1O¡nÁéw7ôpÊ•[ºŽ‚„ÿ¨î€4ww#©Q¢Tü•*¦¬ŠBQV}zQ§Vª¡#§[s´|ŠyP*ÎL‰ál®êT±x0´Ðü“ýg€¸{]úì E-PJòË7W•/æ”wÕjôEÙ‚öêO™è¬ZDÍwMb mŒ;ÂÇ–KT¹à±x[ŒJ‰á¡ºÀÙԃєy¤e x`í‚ütðD†8+YëX÷ß3ÁñÅ]a¬ú7Íô2J”t=R%“4H9®]ð'™‹7”jó#¶Äó Þ¶ø8ÉéU,+7¬÷kº^é¬ÔeºÊ>… Ú!-¥ã€wªhgô™(‹ñi?®ªqÎ1H€^!×kaqC–UL}ÙJ'$=w‚sŒK”#ˆì HUÔ€1–̰~™!âf¦0éõ¿ú¨û2H¡e…4\»èåñRÑPÿ2å¦yYÛ`4ŠÇhþ᥆GÕ; |,©Â”®7'¶6 å@cÔZû†5®¤Ñ*n19L´ŠÒ°!ó%dðb%Ìè_k0êKÅ=ºÔ§V¢µú8Ú=nåܓҕO!õKjÒ'pb¦¼<’‘åß9+àñ %k¬€<=ìŸ%4Èîö”GQKFK©Yóµ|´®)2Aµë6“•¸y”s¾¾·£ér«ãÕ kŒ‚wqmi·év ýÀŸŽ†Om  +|?N­eä2’6È/èk½x¿±Ä âi'»ÿ>ç¥E6²Jc‰I½®©Ÿq¶¤WtÈ’°£.Ð]N³LãQßdŸ:ëð/JÓž ë¡`‚Ež~²DŒlb¢Û&µJ¦­.8%3ÉöǹYoi¥5¤ ¯ÖPf±:¼ÙJÎß–Q *áWRÒå´Ë39ldvÞiÚn™æh÷Ùâä=<Üb/Ý8íËoª€¬feYW‡?¥×”Yµ—’±s “¦Ääf¸¬k$)›£SÜÃFi“¨—í'gô<ÆAÆcÞGS¢K"4WزÔIZö‡Ôâ=x÷ÌÌ%ç¤D™D‰ÒìaÊ¡o"½À˜ ~ ð±òä2š£zGCf5¡/dÁ52ýCt–nÈ©ô™§^N < ±i¦{ÓT83ÁÆ[\læ°Ž«—(?„jÖJÍ!Bð„ujî‹¢¡CôØïoòl¡wbÛsö¦ž«$# þîFd'îê4™0l”!+0šÊ§@qB© W{áÜz£v—¿¿嘣Ð HbÛ¢hŸÜ:³‚ýÄO{¦ \ °‡´§{–ù1ÁüùiÝj®_ÒFÚ×ÈÍʰH* æ{‚ž«ø Uµøëx*íztŽßHûúºø+ÏÐôSÔÌ8Àöy¡°Q>ûo•Ù @"\1Ê2­„r)oðJª®;øy´eë¶û>*ñ*Ÿàx Ÿw ·ÔMü×½ßQ/Y¹Tû ZòúóŽ£þº¶n¬Øžô\®Eé—´v³‰ßøQóÜÇ;ßR7çÅ. 𥟴2Ovã7ÖçT]%‹V50-*ŽW±djéÙÔRSÔ8¨Õh³_9.//%Õ˜O~sÅèéQ¯HÓEû¾R-œ¦ž¢1²¹а¾ñ>šž¨~Ó2v뉾QÙgiNÈ#ßùN’ð^3gSƒ%]ø7¶)?”a‘“ƒÅÅæØ~9Žæ„Ù¯‹±]uÿ9ÓmãYÅÖèÚ|{ÑÛ£ôÎ/5ŸùlÿÔõ8LßÄ÷‰s–”Ì«ROÛ‹X‚F¡öÞ‚I¯*/š1¥ Ô8þkóÌÆ—¹e º°d[/.âJ]”¶ôÛ‰?Ñý²ŠýBÙ¶]fN§Ù´³Õ²êÄ7±Ÿ3¶õzW¢¦»ë÷Ií71}HÝ ŠL\ŽÌX_&ÙjÄ[•L«jD-LOÊfÇqÎÁ-ñ=çÔÕgÄnâ9ì;r+¤Û‹þ´vkrvQ•ò}&ϤËg¥èuÍ‹r6H”Ö¯nc¸”/ÁÖëy°ìœDW*÷{ï“•òóvÍ¥ç›.4†‹5°úÁP’·`Úð`Û¸yxÏé#9Q+LìKÒ¨y –@qCBlGHéÑùÐä»N *©wR –‡4›–qrû3²‡µHé+ÒoùÛ\í]êGÅ7pͼc»1~Ûtõ°‹öqNõ|Rû›s·ëŠÿ»öDôÞƒÿþ»ýDú›sŽûƒû;ZÒ{ÿÉû‚n%É- ànTF|ôøÙU¸RXï…‹×™¡à±Ô÷æa½ú.ÃÝ>5ŽÍøz‹AÆM ÉÀäKù ¸;!*&|þd¹3´qåêˆÜì†ó«³”F÷X§ÀË”Í?Ít)pfÀgd Œsä8h|@¬¸á1ðwNÄ,RBXœàÏsccúÄõo0Çú¸_»ª ôo’T;»Ðkâaeªt[›¶îo)rJŒ~îÛärþù¥•jÛ„ÙÁnð}æú‰ ò­nzôk×7ïÏvt=¿&jxZVºW[åì¶ßfrŸ}¹¾4+}ò¯€èC2ýÂöSÔLr˜ν]ÕâSÙ™…ö+ðvf‡›sVŽ~É $š©ykhIAšRc:.uéáó®EoŠö“®æ“ñ'êôUµ£rR’ti‰¯Réƒd¦º5·ìí¾ûwt÷üA}‡$ƒSô#–šR(•ëÖF ÜèýU…Tj‚fæ:£ fˆ  9·­QÙ›x>%U”˜bú7µiÍP¥ŠÕwå¡L:౿%¯üR*f?ÑD¦ Û¨X„(ãgDO@o”©îK÷ W:¢­ÂòuT£ú°{ñò_¨€ˆRnq¤ß{.‚šƒc @fðJë¥Áå ‘ù [y¤#g.–^†kìN¿ÌçÁÆÜç9â¤VÛ3‘Æ uÜǦnŽŒ…SãQŒ-Æ™êJ¯ue4ÐT+ÂV† u8Pê@T2Ù%JÍ@çòE;óáFLE‡%Ñ1+‰ÉDO´}X{§¥0L;tá¨MÅœëµ7ª;ÆÄ¯ ¿¨Z;},lnþ²u4¹°Ç NlsPu×LLÝ—b›éï¬|TTø§ÊV[g§ñ$£K Ú`JĶ Ķû¶*¢;«ßò¥‹ËÀÁÔÁ*Z®úMIº³yz)éŒlˆëË•ð5…·$¤Ëc^U9—„j­mØXG„ø«èO°:³ë¸¨„ ‹]£j‚dìš1§ë—Ø &% Leòò°“iH I£­®¢n½¹»Bøî.=CP=ª)7b/M±š4íß¶î|é?ûlNžËØÞ$$?{x5ú7YψoŠã«'Á“X…$âѳ֯׆?¡ Æ®/+Æ]fR&®)©ÃŒR×Þ—-gÐ0´±ã6Ð%<$"\âœs:[¨[nñßòÔ̲҅N!y¨Î ôå Éy¤¯3Q¢Ë’D–`øFFõj1õ–¨ˆ Kä.Uª… ”¨ÆÔg²³5‹ÑÛ °÷u5J1yKÔÍpXGŒ,ƒ&ø¿xŒ?õq™&è§¢âՕ‡*$ 2ŽG¿ØMs“?˜H=*Òo|]*0å@Ö?ʲÿŒÛ§|IÉ!åëè`f5#5qÜ'f¼ vÖ é¥¾ Ô´'Të¾U[rMh¬äî–."Í1! 2i4HGÝ[2âÉš8[6hŠÚ‚–ez")9[$%ëV/—5mD1V6`d¯Téö>øk*Þ@8tïEÜÈ—KÓ-Uí«Zá-“Nu¨£¯ŽV;¸ (¡Ä&«R4ÞDLyã,ÿÙšu<¶\¶|ƺ¶ü¶|½˜zX 5é¬ÐZ=¿y²¦Ü°Q±U±YÖ-íð úäœu½uÿ@»‘}¾ÞÙ\h5qMÞ©öäw‹}Æ;¾ùäyéMêa{Lû€œFî ÛŠõÀ{åÝ÷böáwá÷QçºáM{÷=1zíxì‰ É>‘6;§…e›^¼¾…}J²Ç­€s) Õ)ÒÀQGÐK:YqW-®ª“© }ÖÄ,÷×_=ïzqŽgC$W1qñÈÇÅo®½7fÖ•5K±\1R¨tV/¡6îïn‰™RÓͼ‰áŽÕ«º÷\ÆŠ^¹|x®“¾Pu Å’#ãvˆÄS /ÌQoqÎ ãòÍ¿õ+E¡{ŠÅl–œ»t&„~iëÖí­,.¸Á£sw_ämæL’@7fåh½!'"`›[ãTØ-âŒz(dYM·‚—ú2îœÎu|@6+oX:ÚLtnÞj~ìÔsžþý“·Ãk‚¾§Çóh^ERЧâ]-|4¶Z̶(ôÄßWèŽs†ÃW £Ø`ü×8'oƆЃˆ:MpBÝè©î°tŽmH¬¹Û1¥— ½“%çÓáfy`ëɧëüÈì¸É°êÓF»®V5u2CÿmÀ•„þa˜×ËlŽk>ânú4¦Þæ´wÂæMŒ)ÜXpß/Sžë³–2R­© 阋hÊ‹ÍÇ¡™!¡ßZX ‹ó#úDKKº¿BÇò ²çG¯ç–à=V¼m`l6/44õ°<°½Ý\…wé(ók×ÐÍ«%p=u.ÄIm@ir'ä¤Ü{I¯V=\ðn  sÀ{ŠàId%lc€>½=P£äwA mx y àè.ø†;5Ñ™`ÈþTÐÓÀ$¥($ÔÁŒÿþÈ(¸A.†Îß À§hâ6 6”ûKáþº¼×dË/?½ÂJž*±{ì¶&øŽ"÷LômÇ÷æ‘èÎ"Õß íøÌƒ`—'n„ |èœ<1ÐùÄÀ½+¤Ä¨Ø¡øMïh#`¢+òª1ïÜˉëå5ï@H%ß³üDcÛ:Èëàw»ªÁuÝ_¶‘Ç|÷¸m²yû›dv{Ç©gºG[_úMf Ë6ò‹ ƒ¯…áè,úÀ„äC™õùÛYéütâ$õ¶Â¼úImE¹º¢£“džùSqzºÙ…i'Í£V·|dËç¹7kxÖ"çqi‹ìDäÔeÆË{çý ÚCÃ\óí¬À º õư)å •¸Z©h^roÖ׸!ߢâýJ³œ®_/fƒ:ú´TŽ:ÚdHÓ4 …2Áîý^ˆ±Š_Ö[94™<ø7¦r.ó­ ¦R!é=Æj®z œ±J¥Ê¸6Ñ÷BíW9ôÙžiŽÙu.*”hÖW ¨}îULÝ CŸÏx¡ÏÐѯåÀ*ˆfQU·æÛ„zåú&о!ý â Ú¶iž¦{gò$q}·më~ñ+ iù¶6`roPO/Ò„ã(™¤EWî&àñ±? {—<ô÷Ã'ymHM13Œ!#Ÿ(µ÷Í'÷ªÕ´,žÙ4Ø9Í|›Ù{tñÛÃzGÒ<ª‡[ÄAöL×ÇÉD^ÚäHÎÏ+á (rþšâ¿ ±»Ã¿™|7té¯ n¾Ãu¼¤˜›@Vÿ<à&‘óApudz,ðI Ù}byxäÖy¯ øy@0-Äö3ù<''%¢H–›ÌüEðŒ®¯Jô,fÙ–þ4ïxVX„×iý$W~¬Ú”SB-}rsÐFü~VÚL6××ZºlÄûüåÔÓ\ÒÒ¶iá»yÌz‘ÏÂ`Ѳàüi!Á:f²&ú¸Æ*á÷Ѵдa‘aü<ç‡öÇ(1øoñ X¨‹™…'i}ÃKDq@~W(܋à /ñ4ÉÜo:ŠØ¤ØÓNª2G0òH®S¼tÈlFUß;¿{ˆŠ±óhøœÇßÛIˆhsŰA6¨Š_Žm Ûj¼B:M¸;ð¤Ù¦·ë´#¬Š“ZæÙ²™›uß$Øel3§Ä*.Uj²} Ôk Ëߌy¢;%f*•…ŠÀXÛsB yL(k|t2€ÁŽÙºS÷æ\7zCi-†½'ªQJ_e%ª³Wôðm÷¥[±.J]2íVQ²OÁ ½açà-Ê€¤ªšÅJŒ«KöÙ„.,êÀ)H ÞÿÈ—…”ÐDsBò½ÈÍÄ—§ýîú:`Û„sËv½%Sé'Q‡)ðÁOYçÙ@lAô?M®½Í‚æšè#_˜ñN9í·ЊU<Êp™ì¼Á$¾ÞfBiâå MýnP{ÕWw©¦|¨â}Â.þdü’RܯP(÷+SgÎÆ2ç°Ê¥jž›ÇyªÆfyRM‹€g+ã‰~¼c½[;Û&ßÿzFtw¥K}ÌðU¤sË´Èä8Œ«_d—zXÛÄyM–.«ÕjÓ*á¦ê/|¥}½n˜H/ȉý̨ӵN-µ?.2ܳL“ä^¡æ~9¦r‹‚–µ«J†é)jžsÏ Ü/”úe.ý@yÄ$$íË5…,ɦ.#‘Zs\²l’ÅûOƒ¶ÓÜ¿Pì’´f¯41ãK5úý¢ðæ»3:ÿ&ùzÅÓ¾Ÿ²,ËÔ‹®š°z¾¿;ú¿ôF* àêÀ¬C@yþ]a—èujàVì)y¹™fLIºÐÃRñqümû+Cw-ô[%Œþ)LóI¦Eˇ.™´Ý—´ Î$ƒm³œ3¹‘÷ÛðÑ“ÆH¡GW|¼qÜ<‡‹–0 ü+yyº˜8bÆ«ËIw²øiÌ%ÙL’¬/Ÿ3ÉgƒsªUõÜбs×;q=¬ùs„¾aüaÓýÅÌ"†û Íá$Ç :À·L®^hQ¤ŒOá¯%GÒ‡«*_•QÅy‡é®eGi܆åþ׫JHLFâÛmõ{w6êÝí|ÒÒΑW›wkµ>¿¹ZÈ][}gúüú„ÎÀÕÎOÏÎïR_Í.vnu;é¤ ®å>`˜Á@ÃîáË“@ׂ5“°’šdÉÍ Zª“ÕBŠ Œó£Õµ‰îvc‹@{¾^F}£³½ÙÌbt$ÿfåç Ü’Ö‹fìÖJÕõ >.6¥”“°§¨²º×-ÏmPÿ áN)h^í.´Ç\³ÅøÝÄ9åµµÅÍÚöz—øÉö­ô僋ÕÆõÉ­ Z‡g€D§^ÚÜ¥`àDð0–"¿¹Tçì ïlm²_1‘BöÙöÊîú'çb™pêÜn{e¹1~è:p„1¶åÅÙ Fï£ÌÓ+"$YÇ&¨û{à`åç%&G lå¡á8[8=áxåyw@¿‡†ÍùªÚÇ·ÚOÂþ ÁP4Œ‘öæÚ" éÙÙûÌêúòŒµ½ÅŵÙJ'h e0¿Û(ÏÊHNÌK ,DöC“o®ÞâaއºêÖür{‹ðéÅ9îI\ÄúòúÖõU ,tcƒ ŒÆe± Òæ· œ€%‘|c¤&gÅ6_‡òÛ¿–Rj fšæ¹ÉíIOšaL^ÍO Ûæïó`ê*$sŸñ{ŒPŒ'Á<Ðó+œÍU°+"g öÕæÙÆ`ÈA5”¼žÝ8¥òüÊÎMu +̳Е¯bžLÑÉÃ5¨Ç[ZÐì´%®Ç É¹Ž žg^î$fX†úF^GØáÜ@øúÒüî*û’䧤’›Õ¬>z“SªòôQXÈΫ¾=FFKucs/ê eŽ ðÀa¹æ¦Ö§xDP÷€¨bLÌågG§D@g~lÍíÅ N(<ü³‹PžNPöÀÈŽîYz\êÎô`Ü¥ŠÎ5î\œ°"ŸÆîf§…¿©¹ÓåÈÊe§''X‘îü. ‘>žg1ÞÆÎ&+1j<ˆí5ÞYqÿF'ØG‡VÉìK5îwNQUùLO†³çf‹Mù´£HIÞe8›[)üK“í¸ž9&± øw}nÔŠÇ;èÖʼŒçŸhG‘ÇPØÆËT6f]Bæd4¶©¯á†vÁï¯b§SÚt¯aÇ •¼b#3Ç>„¶Ä sÎ2†³©\Fc‹ò޼1y­¢OØW¸ø4›n- ʸj ð &\ƒjA6eUx»íµÝyJj’ªóìê9õîKp­üÐügÚq˜~ãMý=(\&pI<, `åÉaoøÛ«8N ‹õq÷9ˆ o9‹±nÎRg}%zIÒJCFƾꕕÂEMp0N°ÒYH*}áqÌ™T  b#Á³àê~=1-tçx¹mMbo¢èÍ‘˜__N2ä[_ÍV”Tí”ò›šiG¹Òý±6Œ${Æs¥F7l‚³üçmàì#®¸åíÌ|)ÁŸüòPÐòÌ çó0sfë6uvF(Àuعپ<‹Ë³Ûx©9ºÓL0¼³+ÜÀþÝúfhR”p^ o$8Íà‘rn4‰¶qFÁ–=à ú º õʬKØÜêéã`Ö>»K®È~7Mp+qÔÉ Åp¹Ê2YˆåâÅ1W?ÑBzn¦âbTT}µT5èe•äõ‰*OxÑm>ܨ# ³Ø!ý/ŠL‰5 b8#à8¼U ÈZí‡Û‡H¿ë…ÜO‚OêO›z±µÂ+¥\›*ï–ú‡õ‘ 3lª³ÆèäYVUÞMõwk£ãÿ³ò›ŠM•Z嫬eñ&çõº9yà¬Ý@ûGa'7Ç”íz_)‡ƒOm‚æàĉ©oF÷40èWRmŽAÈ>VùC¶ ß/ZÄY{I¨Ì—ó4žûû…7'‰M =0î«ô Wd’ÇÌÏKÿf‹ÑgË8¢Ãè^xñÁpêšæ“õSë„"hï~%m[h÷ët­JƒQžE%9©[¥Öäùfð‡ìí‰ë’Ÿ[ŵd/í›ÍxtŽü» &HøeªÐäSרèÖoèáA²àÛ~JŸ6¤wO E¾~vÜ.‡KŸ95ÿð…6xÞ¡NΟòî¹!¶Wß5È &LZ¼¯¨Ÿ ÿEâEbìtP8 ÷Š9M—¾³ßÞœãäÍîë'† ”ÀWx’ý­þmþ [¿KЧËÝkÐ'‹;é ïû;Õ^Â+üÄ|_#îÖ§ÉëäÞ-à+ï?]©G§ÕWÑlä…W}i|}«ŸûݤÀ¡ gºþ7!©Ì kißÜ›2½OÔm&{¶)Úe5×Fýô6…×à85 e ×òÏ›ù'-BÓ6b8] ø'¢Ò^DÁíˆ[¶ÁšãuÀwÝ€Ži€ÖxQ¡!žøþžøyjÒ0üo€r_v`^ði C¶AžÒü5®õ«1ÃÀsã¬xc&.£k°òË:÷ý'ÕZ>E¥8ÿ´‰®³Ä×á>ÙŒFç¼¹âØ£ ÚT°5ØœÑÃPt"Hƒ¦qè³·ØTÑã²Oè°VÆ›Ã\é»ÖqµáBÑ2kéOÊ'b ¤6Õ‘”¡Ñ4·yf—È1Œ?§cF—Y,{®ã^Ù¦¨Ù®ÔG[CW„µ+³ѯz›È¼ãm—ÙöÞ´ÝËÖëyh{z(zˆ–Ç5Y0S¬R|&MðL(‡ni W->Ÿ‹:S×";cf—Y-PQrkúAwæà˜d‚/‚›ÂcÀ]´~¾`ÑÓÌaÑÈ£®ÊoÇ®ú[ÌÑÈ?®ò{5€r§dA¤÷­‚ôÞ‰è'²É诜'îR’íÆÝ™aÓ3¸[è9zﯙA€˜rcîÈA¸rÀsõ£õ1”ïJÐÏYõÍôNŽÝ®(,¼š™§}qÅ™ÌDV×Bvån1½שk©é‘†ö5¨SÓ질KuÔ°maŸG¿òN±„bEß„µwØëÑê¾Ò§Â•»Ð"À Ý÷ nÝ ÏRû:Ü[E¶§ wGºQ›VŽOÔ—DuäC8äb;µOVÈ¡aˆc^¤ñÒÏúï¯5ßi ׿„î*M)Y×›“7ƒÆmÃ!‡ W\zˆÂûôùã¾|/íˆ\sZ¢GŒ„„Æ¢a骛·¹ä‚ ‡QLûß»Ê/-cí`c]†{ð ë[/©"a÷Š]S•éP£ÀâƒÃúAbýà0{³˜¯!1OA$¸ëbt‚•øop~–uÇm(( Ó{Óé«£Ò+ƒÃ<ƒu¡®ÎS[~–_²þìåYÄ@ZÄìÃÏáˆCÆ.“ÂÀõЫjÁËÚV¹ò²#5{õ¿ª_óœÞ¦ÉZ¼Ö(xbóFb³?|¡¯3ÙòËzmEïñZ:€lÙÅ,ûÆÿ‚ ù!äôÆZ"ýX•…l†a—ýª»ÃG£Ö†ÕúšT¹cô|P–è}?¨ßSÜu›U¹£³»Å¢º5zQ 2º_«Û(äíD±†Û!†× ƒÆ9²µ=9FHTqº¿¨·¡daŒ?²GeœhŠãC-w‘K3] ^`Õ‡›îø .©Ä†ð—“ËʂþhzE¡àøü•1òþ}æN²#áþc‹âÁ|[GTÔ1 Xu8’ß+Ý7 PƒúÍ©!k²¡Šê}½˜¶\…Ù+q,*‹7Í’èø;äWUÀ˜À­ˆ–†;jå…!AVÆx¸†¡Ô¿FVO&šcˆ•E/~nÛùT·9fÓªãŠqU'õê“ ±±?ù2wë³ðúùþ™Sôj­óÔ.•úr4Ú^1¤P3yFÄá9%0yÖÓ•ÙIñ›zÕJYg_ÙÕõÕÜAíWÜŸó¥p#†æ?ãiÎÆF³Kïz¤Q-@‹¥ÏNØ[ÓC§üjœª{{5Víâ@®F02ÓóeÜU*äau§CôÉ‘OR%5ÐEOíAHbÀ@S2Ôx—† â )M|`¯¾›…'e„ÿªã…Ö-íiÓ f#ân×ö–¶–£QËÅ#«ŠtØ‹h^gàóK°"NÂ5æ×‹áŽâ™ÖÆíš5º£'bµâm2uì úÕ–í°–ømÖ“†UW®)º¤ržS?ÉÉc®ké Uaš¦K4zâ6Òa‘F–ʹQÈo뜅Þâ4©âÇRïHŠuœñœÐ7Ʊ 7rѦpçn<” 7N–ég ¶™d™§4Á·—s­ÞOÍúó1ª6¼\½WöAØht+ÒlU5§¯šÃ%1Q¥`Zl#BF“k,`Þ» nâunI.ÅÕÜÊ-I:&á¨f½æÖ~ä ø¦È6Sô¥ðh¬ÜÖïx¬@ÜüI²¯ÏJŠâÀ9®Üé§ãþPàÊ€ÀGdg~€_`oÈdøB¿ {Øüà<õÞÔv­2SgoÅDÃPÙTjЉ:uÏnDZ:1UϦÆÑŠ@lL9ä$ûýØVÛ(c³þz€½À?`0CPõ‚[±¦Õ—ˆr¥ôÞÔ±K3‹Z(VHŠ][4r«áè*ÇEöàuK}½”žHï ´ [ôPÝx¾Sb^”_à<¢\ÜP&¶ÆNÎŽ&6ÿ’ì -¡ ˜Ùè þ%1±330üO˜ý?`nn:e{:Q;[ga'#G {g;Ç«²6&t‚ÿzQ)[ؘ8Éš¸)ÚÙØÊ+É(CщZ˜90CýÛVPÐÎ]‹†…•€†‰ž•€ƒƒƒ€žžU‡NÂÙÀÚÂHÀÖÌÚ„€ŠNÀÉÈÄÖ™€ƒŠî_óýK¡ad`…¢2°7±03wþ·#’³‰*;ýÿ˜@ÔÂÚ„‘€™õŸ°yyÿç:8þç:¤MlÍœÍ X˜Ùèþ±v6qüWˆÎ&Â&FvÆ&ÿøüÇ“qÏÒYš¶-¤GÝã‘ëoh,ë[Ó¤œ#2aàËpÆó÷OüÃâ¡qt=zŠ7Å© ÞÖósåßxmà†ñAòÉ{=OwÞŽß±¾;ò¹õñtD÷‡!ÑÙ£sücW;dlpuï õõLŸ5ºL9îjÖjâ[$wiËýN'ñâsóµŸút¾ú‚Qûjññ¤ÆGm‘³ä²ñîtöØœÞâ–{^v*„Ž÷:yÈó®v÷“æMéi#n1:³[RòìĤíÝ0ñÈ©sýêrvÓk¦ÒOXêô9?•¿¼ÀM÷Ài<¯:¦êD1ã©Mßû'u ­}áÊuŒîª¥¶«Ög¥Šm ûö!åÙrÍ"…íˆmgõÇk¥7ØÚq%ogÎŽeÑNÁ ù;ã¼vá©*ýgpÑnŸy>¶9í,lzNÏ‚çû/ýGðsQÏPæí5ļtNo€à‰«J¡@ÏêÌ-MªŸ5ÜwŠêI«:ü³–ç¼U½SØ/\ºwQºw¦÷Hô´x× [°WŸ&åÈIã÷ewA¯&þ¼u ûû­ÚæÉˆVªwØÓæéRýCä²ߪþ‰«+,åŸ)çèÖÀ'¿k›È¾ žŸÔÎn>ƒvæéÊÜž §hoø<;ÎFs¸>@ñÄüòŸçcÛµ½ýêìb|ç=ÎwÍsÏjŸˆ¢õˆxÕ²ÈübšºgØžææíðõ|øÞzO˜e1ŠýzaL±aÿ›b˜Ý,í–ørA—;îÜÆîž>^ÙU\(/—W(fqºTôlcâᄲl¨£\ŸÙ`Ÿi-×!> ®ÍÕ›¤É­ßíIøÏwšöìxAO=CPp"&|¹ͶÀVîzþÚAXL"Jr­¡Z]âtZö åÐåÇÐ’¯1½£ä0•:?Ã鹊ª|‰õ”ö^Kï6Åý’%ÜÚtZTAÕÅùá=xëiÙM=¬Ø%þ‚›÷èçù™·!Ë.%^€ƒ¹ñÖ„yà N|ˇîâ³ø+&˜©ËþIG•LÖL,Z>‡M\7l^Ÿÿ%¿A†pË3ß¶ær$Bᣠ…/­Aº(%=t8áá·E º³>¥Åb; Œàœ§XŠ´{å×ÀûAõĉG›#DSß ž˜Ï6§ò¯Žø•ì>3xà')£â y2çÊà€7c˜«ÀèŽ6sqíæ&ÊÁý¥âéF³û œLwk/0"«‹RZq~Âç—8Ã¹Ìøju—íB׬¬6ÑD¹âç²;05Ås¢öEž[a¶›Í_uHÒ¦bÌCõÿ–;ÓuÆøÊîÁ7ˆjf‡½í³;TBbö0¼öVèâÙ‰µœWl‚¹ö×D‡Û£4…í õ‰ð±{è•ËïÌ¿mÕ ­á1Ç÷„ïŽõÕgf¬\+•gÌ[ÉsÝ;Éæ_<æ† 0Ž˜!˜e›/¥ïuÎ  º·K£Oì7”åá`IF×CØÅó£øÁ{($Ø›b¡?%1g‘ï†Õ‰Œ)HͿβgGzçëJ¸Û{¢{¨kÄ~v.²gʪKn>6¨O¤Ú?ñÔPžÇ2,>`ìÿÓêÚqžÞ»O•Ž·}kòŒoR!Q/$Ûãua§š ŽÃ´R¤©ùG,Á1_o÷¹r9Á?xÀ·q3b¹•ˆª,¢TT'É©É)(è¨G§Ç'Æb ŒLçùMú–÷x­“éWu›Uhãæs—MXÍ’åq“EH1&ôeϫͅåÕ…Ú~ûÒ ¯­›U+ x'QÕ(ѹ‘˜ S°§¢ Sg̤+÷ÇeÜLAµÏ…v+oï]¦È}­Ko™ïóPtùw6(+WÏ‚À=++*ƒ±ª=®M¶R«3+Xg¶½’×µ1LLs¾ÅPü¨ü…ª¿}¾ÖÕ}(úUÍÅŸ˜Ä« …p~2×eÛzBä&ÀÍ6Ù…“)lË+Ä^ÝÓ阅mƒ»¼_bÍçô…  ˜¸¡·¤oߘ£n;Ü5B×Õ.¥½6:‹ä›+Ò#sa[ —Çf6Lg[òÛ΃WÆ®fÕÕ™dëÙ\v{b¯´v:—$NLO€6éü:—3œ¦ú†ß’x¢F5V¶XëýqÜééÉô½i½)IM¶b¨?ï?5Ù™šÌ1Ù…æÃ¬·iƒˆ<8|A8Í_3#¤½nMŒë»[ª‡[v‚\jÖ\ú±/í,队.žõŸ‰MòV¦À ]P Î@*ô¦5¤Éè€B0޹ÜóŠÆá(Iª0®¯ÈÁ˜ß˜ÌY6 ‘ÎñT”[o–’|•‘æÞß³ý„òÂgOSšÂIüoìþú Ë•¨À^ ß™³sgäÈ* «Â2‚bQ™÷0M²/^ãÏÝÇ|Î1îÙh9Y€¬Ý>#ÞbŸŽEݨïy,åp‡ÄÛ?’/JZÆYNUŠOÇO;?ªò™?xæÑÇN(•þ6qFÖ·T:£hõ”r’.­Ö>­Òé-­ÞX£zhí²“‘-¡ÿsOŸ+«ï—1þ+ajAC~Üaaœ`¡ëø–¨X|ƒ“:.@eyr)êR Ò¦qþüÓ'zxO'A•1ÅŒ<ŽC;;:=>K(ר®\Òã0=“=Ááâ¢àzSõøèÂ¥åÖ=êÖôé‘íwj`Ï­è·ÔBJ̉Ç×@åÑtsbš&ŒN jc2÷õíë}øð­à}8Á"æbÓU_o£q±ò>ý©”WsNÏššÂ¸XL<šˆS1šwn´ÇPq4¨ì:§ƒ6ç=Ê.Ú$C>Xf”…8áÜÛ¾W „·¦gRf̳۫ ž¯<€è/"Yâo¦‘â@n\t¥uõJe½_Üát,E:™zÑC©WI-–|œxL|dìõ¬ëð&æ—Ûë+³ Ä'Úý†=Ëô>ó~?ï½H`¦ )v\eËNå¥hgp-Ú…ŽH[]ÜÂxNºv©ÜÊfòÔ ¥R­Ä{m‰¿N%«”¯­Lƒ• ÿ¥Xß_?ÇJÐN¤©hu¸Þ´•;²] ;Ñ ”Dçf’M³jGªdŲ‚Ò’ö­µ=KÛ4Ô5û£¸r1íRù²f¶”Áë\W#G6ÞQÃ$fF*Šaxp?mû4ÿƒ€SþŽ5뙫\Åvœe+ƒt…vf…üØ1Dç3,gUã#[n Ž¡!Èý9©3rò1Â9é)¦äÖªÏÍvä³0:õ[Tã“bÆI`XÀü´vÜGÛ°Ažu!Ã)¹lBŠG%$'Ô]õ*Ñ iSzÍu¹jÑÂdGìàœkKWpÊ6áÈø"3Yq¹±ô°c£ŽN¡ç1Îãìñ«œ#±â)Ôà7‚ANžLn˜œÁ¶c°MöGgåR¯©¨ö[ÖÍ+^!rA(ÌË&Æ?ë=qº"ädˉ Ê’¹¼Ób›ùþ´} +À€¹«©ªYRP, .  í=[ †¼³xnÂÑ´tdÐÅŘ0žÅ‚ã\²›õ>YìT"º¼§-WÝëÒ-ÝÆ›0«îðr&Æ“‘wgöSndzõšõ&”ÙXqEÙ}¿ø%yñbs1¹Ø·²uºi,%]%Û•gij‘ÂÀÉ&uð¢س|Ÿ—Ë|™=L¾Í¡ÒFb!3~þ9HµµµaÚìtµƒ \³!W·jR›~g‰d¤–ÎÊ=5׳)P¯.Ë´²¶²¢Ôêð ÿŒîß-é,v>Ð-¾²úÿ ÍŠÊ5-å²¼Ó+`Ë ÌV‰ÚÿÃîíºÊ=Χñ4;l‡¡—ù+vØ™âFåË÷Ët¸þõ›ö}¯ýÃw‚­,Z.¢˜ŸvËAsqôÞéívŒ¿\¡Š½Šâ†nGÙÙõVÑaGéµÄlª¸£é:6H&ô³”uÖõà&“IÌT)%ïkÛ’;I¹DZ®AbøüÒàñÈ[™|ضG0i¶_åàí¶”®yö\À¸^däÕݹ;ÝëÂýÓ·Ž‹«Ùô2aîävռ̨šQñTA»n€=й¢@âM?ŸFrjvªAßN#EJF† QOcÜ}!õÙS(¼_³ ÌY_"xzÅ“0Í€0KDØy˜e¸îRBI‰wñãûÙÖ/Îò~%=jô(ß^cW$r®éV¦m@ö'=eŒžŠ9u² éß@ËÅtBp…h «@ñê ñâjç_&sñøpáýs9&Ëþ÷%d›Çb°ýûOcÅÆqKBxA½9ââ•~þN)][_z’ä Z/ò¤iÓ%-L‹ûl[ MÕTŸo&›K¿ç ªÍcÓè~0I³ÖÛK?ãÛT¥Ú-÷ùr®\ÓAÚ¹ÝÍ-¸Ènnum$Zr9'ËhÖßdhºïÕ´Ù¤#{¾c×µiœ«‰I«Ñ?Þ¾j³®µnCrN&uÈÆy«zdF2 DéÂâïRÇ›’ã)E—‰7’žÔç&Ë”å*1&bi¥ˆaà…¦£ãG¼ULSžŽÈÖ kËù_öÊ·´RñÉQE–µ:&ˆ®£…%Q0írÏ=QYœd·8öxN’Y‡SbàŒ‹¹:ו)úÞ}`Ù!¸©i¿ŽÛG óøÀËyW\æ DåŠ(xEÒª^ ˜yÐCÔM{Áç«™n‘ ª“ 3d+~9ä@ôiµ ¨zÜ Ë“"¤Ñ3%û¿=ï3¥¯q <ÂÖ\Û‰½ºK9£VWžÕ#ƧÈ„!¿â g¬{‡Ãu7‹ò„¶±´~‚/OéYUÐî’Gªchƹ¤W9nè·×È6­<‡îâ®Þ‹¾K‚boVgŽÓ’×b¼zJáN"ª²5¼rÄ7RŸg¸8ðÇ@/o!„…íì1«óé(a XåªÇÈqFÍ ¬î¢-ÄÓOMT è&æL`Œë0ó›9§Ÿüg.â¡`<ìP”u.‡íŠ[û;w>ã^1on-j·.É[nf^$%|zŸÏÎ-(ȚыñQWx5žÙ»ŒŒŽØëê”iÒÿ6I#ôg#§ò¢¡uýx÷[6GV>c$ ÿ<êA¼¼µƒy¤hkB¬˜—AÉËø{¢´cðÉFûAÛÞ£‰‹pœhV¾;CêÆ‘ß¯›) «ˆ³ØxÞT{å°?êžGÃr¹6Ýž¢rV2–˜ÀGÃÆÁî7øi>mk{źZwþöüLæ._ØVmË4õ“V¸ñ­1xD¤yÑyê­GDv¨Ö’Y‘»ƒ’Þ  Ñ̹>ƽk2Uª”e:sæÆKeпô÷ØõHÕ¾®Ç Ö§­^:ÃvqªšµÒíòŽÞ—gÈfS‚;q’}»æŠ²3EB~'²^LQÿTr­ïïó¾w°Õó9 †ŸCFM¹ÞÕá*°p΄×;á~BÀš¹àŽ!¨ftƒfÝgú™´è¹b6i€>^†¼£6ÌvÍÞ³ÌÚ¯Eq-lÞÉ¥w¿!š¢àë'ÅJâ'â&á$tSV_5 Ä]VÖ=¤a,rfŽ7¿?~rrï]š×™f *Pzs lªœ¶süþÝ;A1â=zrÜÛ<{î0w–¬£~Ydµ&‚‹‰àdm bŠáãâ÷Æ©èÍk>fÒ•5)Žáäì€ô‘‡¿§ú½ýÂÉ&Òø“–@â ï:–Kt¥ æÀƒ#ºðÖ.Ôµaä¨5äqÖ‘ÓñÌ„u}½;Ö3Ú¯Q5Ó|ÍVÝ9dÔêUÒCðá0ÞLtYñ‡§‘'kziÿÛøárÚÒMúÇgdPÃ˃´×åáñQ¢ÐGÿü¢¡¸Þ#¹Ž)côPO„ À}ø+¬ï³Ÿ)‡ig(Ì„â$¯8TII{¸d„Í‚m½˜m}9åþM¯PHT©ñÞð±8¶¿©|°†‰K°#Žpo­|³°sÓ5õPá±ÐÿRŽå¬ÙU‰Q´Š{ü¦É’eo½l.ÚÓN¼„³O"û×ðùCEÂäá¤Ï;ó´-ìžQzg©LU¡ ÕJ‰§¼µŸc ×á2 ø²Ù80£žät 16­&¼Ñ³_® ¸EJŽ-NÀa]eÀ€úÓ´¼n|‘Û-öþS^€7Ðæ‹¤ÁIŸj{4÷ý|^¯çSøó’jг›å宲VÎ'Z¥¶wÌ2K˜²÷޽jÚÕÛsv°–Iãyÿ=Õð%y•Áã<â¥_âš'¤)]ùªÝXE} kÔ žb¹Äƒ’a 1ø,ëêhp é׈XÙ>ÿå¡è2hµTß“úº~Ná¹ß9d$¯ ¥,r±=Žo—tÛ·YB¤ oº4rÒ“”F;C{ÇO é+ìôŒâÝqúZ‘u¥/ÞãZ D°Ü5êð—[|”7bõ*Îö*^’_^.zÙý79²Fþìe ºÉÞf=Êz0‰»­à+*Žž ЪA"ŸLV]ÂÜÓÉÍS†u @®åËFÁØÙªXý²Q„¡”°AÿkPóâOgs¦[ÚÒ¡CqŠ }¨``å-Ì¡ÅzeJ¡‰΂éݬ³ÚLl9½ù×` ù çøVž7ƒ‹ ¥^ž…3+££‰Øß­Ý帕Ç5Öô…ŠIߣ„Ýkjl¬œA>dÈì¿~cu1˜g± ¹”ûO‚/Šÿ,œÌ}ÊèœíÎX>omr×\ñÅAcˆÆ»ôlî)Ðb½´uŸ€ì©ùß"»Þ¬•÷3F$“°ècñTÔO)p|tÂ¥g û¶0Ä1G²fr0÷’2fc÷pûQ. Á}Æ9 ¯HIµú˜ÀIS/ÇIy £Ð™µ×²(mXK…ÃjÉ[($S: Öh[EûK‘l”ˆëLŸ Ññ¡7äÚmP¿VÏaÙëEö¯#í²^Ýz¸Ûší,˜—K±o1;ߌ¸jºSV1¨§ -]oÒ"<¸ý¸Ÿ.ø €Dt#ûmÛŽ›ýºŸË5³§Œ­ –ë^™ƒü.„’"Ù„õÜI}ôÞLŽ¥*^“¶Ï¹á¼€àCráÄkÅ×ñ0 boToX¯%×qãj†7“;¿ý úȵ¦èîi%Ëáõr‰ÇÿIÉâ~O6ÕN~25á«®ÅLqY¹˜·æsƒwÖíGŠZá5N_}h?ž^¦OÚ…ºÙ8.šzAZè«Kƒ(kÞðçR’ã5òe$éäN›¬-íȬú±oÀê˜2ªeØ‹ç ßfiž˜ÚèPF`ñÚE&Ío›ÃÖ'³'½‹â:Å- B\Ãâ†SšÖIíp7-ýÒ^ UÕ@úr÷rv‘D—üI€—Rc iÉdq6Óÿ…Üp&˜|é¤Éé!sOÈ&¥V$/Ô’¨6ŒB¶Ö²¸œ™”v ¢ŽDúp=ϧÉTž V¦ñÞ`¡Kì“å/çË%À¯·‡IÊ ¿« ¶èâ‰üÉ[=|½„kçAŠ'.6Tí×h“ý’„3Ô=¥]8.Ž+fJz2èÂa0h³`äO,_™g¸G¯ñÞÉ ºd—Ët äÃŽãí ™ <Ÿ©¶~J6/j5À˜:ékûp¶ R ¬å4hQÄ•|ë%(IÖ™-„f̾GPf ûþw ¢oEÊø˜Cxøµ½¨„;B½º VäD LùVUÛC±~ \¡’·ö N¥„qOâBö|¼¹¶ç›sdò?£(M˜1y ÉÃER5Ìñâr¬Š…+:åŒÄj‚ÄDñ9êæ©*Lå0òñÃE­ªiâÆÄ£Ê}ìˆ,yÊ*²B—='ÜW UØK ‘%¯ÄSøƒŽ•{¹žÍ¬Ù塞µŸûwºñãüs9xŸIVð¹bž•ôzÞùr|ï˜JXÓ•Ôæ3ë&úÓç'pŠ[ˆ!ò œ8t‡›Ì9Æ„=ËM1ôâyÓŠ?ØÎŽ?^Q{¡{rëŸˆŠ·fLëÔÆlMor‹ÑÈž_¯S·oœß©Mi¦±ue1š$xú„Tzþnž>¶ùVAŠ4™˜ša’›*U*†v½Éʆr›ZT¨Q:Šé‚IlkªYuWî·_µ§NmüŒ;\†ïܦ¨åd#Ìd9³|˜ÉsO”!¯*Y\\.­J¬ž†^¦l9“V‘R ©cÛZY6‡U¸†¬ϋ̖RØ/ïKoI>æ¾ •é ir8®Ð8´jdÊÆŽÅQšh„1Ã2c©Å ]*àt_tßtMdÖ”úÕ Ç~@“î îÞÌê#!üfµ³žbïÞ(óZ/ĦA›t±çÏH#GznbÍá-n*ÈÞpFjgb÷6Ž}¤ÑRwDÖl°é¦7gûÀvŠÎT¸G ¢Å÷â—ZrbÕæÑò®q¹Íé™Ìø¼+Þ•ç?Ô;€Úv¤6ô>]Kt¿ty4Ô1ê¿ÊT:Ûë•;¥/ïìÊqßyíL· §Ûÿ8/‰Ý€iÝS7bEÓú&³ôÑ|ÏçCR¿Ó¶¶Òù”ŠI÷¸mÞ5ï¦þxÿ÷Öd׫‡ø!{J;ˆwµl ñó`Åüg´hé+Ås¨q®Wvçíc{ÿ ñç×>?GZ²Ì­|uÞ¸†G{}BÿÜa¥°#¾KŠT©bQ«©V=‹a“¦Œhî`ZfŽ ¤WPÿ_> éûÏ…o^#[ÚÛ×ô/ßÜý=DuÓ Ä}í2ú3†„N0Pʼn„D¸°éñ$ Ô…¤†§¬{vÈöÔötÀWÌ×Ô#06¿t¢pÙ)0b3b=É"$/ÝÄ+Ô/[öÍV럮Ä*)té¾Í‚v뤔"¯€Êá– °y qkqÞšè?H€ÓiÉ6t¿:ËáU7ÞÖéÚj´t¥·µ]ìûÙdM‡Á¦?ø³*Tt‹L.íÛ×si4øJLÕQ—P|Τ5sõ¤ž§%脌«FLçÒPŸ jÖå3Œâ}¾‘l8ÁXïeï ÓîÉArE”0ÿàIÈ,š_õbGÇb¦#¿³o'áAŸ–¢&)ƒ„P>²¥³¾«`G­ÙQ33( }÷}¾ç‚óÎþrÞ¹ôt1ËgôR˜(õó.òö2÷ÝŠEeÙ:$üüO啨í#Œwu'`ðKj¡ ^j9ËÙ;zøø™å›”.–LÃÕIËÇÅE·›š®GÐE©H"2ô*<”À + ÂRØòY†ûL’…Y^ä-@ÞD‹U æÄ/ ¥gGI>öšFûe_ñDd$!q0ÚÃXº¨B-C39F5955¦¯lÆ“j jñg5:—ê\Q§ßK@’øë“—«—%wšÌ¡æ‘ Ø8 J”l?UàZ%†Š8êË@ᛀ0Mvc—¶á±ØÓÐ"Mi%V·dAm`ƒ£†g€]T.çm-sŒÐ5’¢WMœ&“§©$2ÝEæ=,R]ºDÁô/j¬¶ýöãv[]¹ž?­ÔBr*mHÔ=8Å{‰ƒxó‡7Q;N`Â2L¤þH,{Ô×ýì¨ôC_ü›ž‚ïW¥Å( ¬ž? ú¤p•ÄÚãûÐ&ÌARè}“H`ߌ–¯MÀ½ïO1¦Þ ô¢J!¡j»%ß’èõcjQ“õ^—‰åé”ÍÛoSp+êžþ—G‡AÞ=BUYó^7[œ¡“rõœF3´ëX3TEÝÅ_íjÕ1M6 I]KÙ Ÿ›¼Ô£}÷@¥‹·$º[hÃÈ•!¯Iyy‹æäþœšdª gÍòñÉ w<ÂʲŒ?E‹‰ö¢¤©¾§z†2slÐ-”Þ\ú5­¯<èÞáŸÝeHbã×x|§–³úGE þñ‹^q–H;"j^¢ 5tN_NìèÞaîá‡/mžK:%¨ªX©hÊf`A€-å±`Iƒ‰^ÀQ)5ÐÛ½˜íû³f…ø€É¥¸G#ÊÊÉ¡×Fé Lóî'´:g[›ªÌ9ÑxP¡™i¡›°òÖŸ§W…ÆYÝuÄX•Lœof5žÛGò˜øNLO–ÊFŒn”}{l^†jD û<пÅ>ôâa޶é>ãù’äUGiIøII¾BZçp¤´Hú9Tý5šÀ>…ӄм‹!¤ú®v†äþÖÁð„ê¡pHšx «dIé·õ[C×A=ÛÛý­Ã²‰C‰¬õ”`B”¼\¢*ñèÔùn‚0*Z0Ž 2Î0D4U()Q æè;G4€T´j‰¨C!±¨A5ÀÔ,o× >+ã €ê{ï„ ªg•ø3@©\Æ"ó[Á(שÌr…u-'Ìi‚¹,I/ÖVÃ*INMˆÄpAA]|¬ 5iîArŒ’#fï%ýü†‘±9œ~ll‰n€€hÑê_z6šA\üùí¿Dl cc^꺜€ð4ƒDY;ÿñ´^ý—ÓKú?"7¼‚ºÄÉ1âFض;fÈrQV«"{ø€/Ôå˜öDH4¶»Á iÄ"¿ÓµMMÓ—"Ô¦Åf5Šl ; m„zÊT©üW)y–ì~!Ê 1UP©w¢Ò‡ˆ:Æ%.PeÈ<9õ” K9ÀðØR('Áñü!–zHjžï¸úÐË ©v’› ž¤gž?KS×õoCé³ ·&$ ¹1–ÄšeTjäÞ861ÿ{S”æІ5V¨E%êÜ.B°@è¬ä ÄQ‡flŠ`G²¡Mð^žÕЄTo¡ ŵ‡Ü1:9aÒ=9¬ç9˜'x¾)ÄØÍî·ön÷Ô0š}* ÝX­ûé’‡OÒmpevÜmqcÊÄXÛï)Ò~~Ò]1¶9v 3ÜC;nSðO<ʳ@8ÿ2ÂÔÁ"(æ«P­ñ`ž5¶&Ä{u’é‚Ïìà¢Ðgm>bøKxa«B;’ÔSGr ;õƒ³h-Ûœât:|’ñßÇìéßjC» ”ÞÄ…¦ä™Ð/Æ”3e²aÛBhÎvϪu"„îh….aZ<¨Ô=9/™9Yú²[€7F‡P`gVÜ>ìœ &?Æ ‹˜¥â¢zÔ½àǤYNÖù³ eë4’^ߎCú:ñuʈU‰ó˰´³„:ÊÐsõÝ™{߉d@¬Ù;㸗-9ã¿ ¼ÒUŒ‚N½e¥ža·lƒé²}þªÝò«bŸâghÙ4˜í(Ub Dr+ÔxK¦óí/@`ý·!ºDðÀ²Ø×ÉG¡sž AÈób§é,Û'¿›õ-Š2ˆöF¢XúÑÔ@:Z š"û`ä$lX.z“ãt+j‚Ò΀굚^¶Š™Å%€5ž·•;ÃèêÞÂûiaä©XjNm]êw0Ï]"YÝ‚- am µe¢$L»LRh0óm@y$çˆÎTè†ÿùÙ+»Ã.„£nÀ4[â_2W` §Ìuä{Cžq!©’ç®oO¤T¡_ÈEWù[Å× –Gl5wh–X+nƒ”#vGj}t¿— §žT®7G.Q®iæ];J@*6EÆŸÂyü2ƒU8HÖ²¡›â;Ki.Ñ:^DÏ<¯Ûy„½°fêË¾Ï ß„šJmÐÇã·²…ë˜m’ 'Öm-a¹|ã¦<Ð?ËT΂¶¦ÚŒÙ®Š²Ð®BôåÀÌÞÁ1ׂŸTÓvï~”‚;tçáfÖ¦Ò^lÌc–3êã˜Ò¾j„¬Í¹®ÝþÆÐN‰åYµ¸&âæ˜õù¨ÀþýˆŸ[xt9û4e.Ä<Û¢KÂÙ§?÷cJ5ß _Ÿº" Z²MéßóøÊ9OŸªÍ½Í>ЭÙØ®n¼êƒ÷-à3IhîI™Â’zrËǵ…dÂ(aÕÓZu4´9½÷üEkfÒ›5Gwj¤|ã¬{%öT}É}CLÿû–1¼Œà7.”ÌñÐùá ±þ7+N5@ŽDßTÈîûH;ø N5¤ À[Iüö×Ëkû÷}&­,2k´ûü»Îa“ßZHK,Ê/Ëî*â^êãó#,ÿoí >ξ5öÍÍí!¸'gÃ[FË~Ë/¨b°óHíÕ-&!‰ÐW¯yª’bB6¤‚/šÞòâ`íÙÎ ÷Pí_Øa°Ö!ÎÊO1°N£ ¯…µÔúh"› ð)3•dÛpX¸xÇFɆ÷&’mÜ o¡oìP¡F{Úœ?hBç ÉRXÃ[ÈÞ‡úZfU­6wŠ=~ƒ¯²ÁøQ6WxX1°#œï7¼=D¬ì[¢ ?ñ=g–äV„Sm~‡ú›ü«-!ßZ›`\ïkwörÖu¤¬KÝõð­°÷6ÜÛœk°¸I«Øàï"©Æw­@úQ·8Ö›£­v‡•œà›o{G¢ ’æ}µQÏCœþÁpxkbûï” ^nbZð½ùŸŒöy^m…‘·E‡fwÝÁs?=é#+û<»+«ãðÏMÍ‹´¬sïϯ.C`¡ù*Bsƒö|ø—  0£¦È;|ŒÜôŒÒ˜Q€¡aŒoû-®}­øî’ž¸p.Ï. ‡œ_Ü¢žF>B)ä;BÃÀ%‰â«µZMŸ#ª&/În£a__„=ÃÆùê"žàªoÖæÑ73D,}õ+«ëñÈ£çI¥°±Ð—i*j,µ—ê’ìÊé4•u:MhôšõEl®6Š£±ŠE××–y„KECôçBúãY °wÿèrvÛÎmÅçcÄϵü .4úA/'Ä÷Èî!žƒ»/s sZzupúøï´8УMdæk¨m¦`ìîÊ*GØ{Á1W ÄzWasbqu‚ ìñl¾Á}w39 „~²O¨öeY¬Bzr­mó2Âè§ ?5’¢àH"€v0MGœl k´6=G55¿¬ Õï¯W7¾©øðÔçrzpB§%¶‰ó[l¼å¨‹½‚ÍÍ@›Î£†v~².´Ødgo ÓñË•óŒ %µÒïܽ ?;¿ºÂCgHÜÿí…¬AM@{\Àݸ=Ç»øsȯÔéµÙrÏÏ÷×RÛ` &ìNø½Þ|pêhã£å>SÌPÈ£Cìs&ÂYé(¿æ'>GVU«ÃH«N›Ý'Ý’o÷ôPß.éÏ{ÆëÑÉ&|ºÅÿMù=6ÙÈRš…ٽﭾ zÜ:É ÁÜü¼ÊÇ⥚moÇ©ÉÑê›j3žóS£“:Ï lKóAÇþúÓAÛÓ9Ý:xð6—äwóÛ"ã“7òæðò¶VÑuÆN\ÌÌ—”ÐTD +3Šâ‘¬‚øÂŽåœˆÀÞ„wpmÉ &HPÜiñ­ûBŽb3 … >ŽHÎD"–w‘¤ÜpÑ8[?Sì®~EÑ®§Ì±L``gæFH÷äžVƒ¾¸W"ä™™¶3pƒEõh^‰7zý÷#mØË^)¯, È ‰Ä Ž?¼tb†@™ç??>€ ¡º¯üïxä»ÀÇ^æïx2ògØHHà#Mhõ þ”$›Sº0£ôð›Ä%•ë‘£Ž<£šë*sÏLÌ;“¬‚]™#—[ü‡×M®\!Ÿ[rû’PjôáD†ðÑÍĦjuT ^kÇW†´FV `c‰ÎK¤Ö@™»!&lâV€s†­q§é½a‹  å@­·Õ£É:-.ócîÅ=`çä™—ÅÇm!¼³JH. wàö‰AR [Àr,Ý+Ö(ÍCx ä‘E¨±CJ‘r Z€,Oo¸ÿZsdqè'Š÷U:žµã )ÁPˆ1¶àæÐð¯# ¤˜‡ø2æé€Í%×¥“ƒ~<0²!u¥y¦éŒÁ–g®³œ¼W¢L 4ÁRBp¥!µL€¸ƒ°åxBlrƒìcB8*S, «H“(³ÒÃRƒoàrÊëæH-v ºt+_-«Â6ê+Að€€&ñ;B”dŒyÐçq 8à H@”"„‚` è‹"b¿D»ëñÿÒœûÕÆ´¹@H_†¦ J`+Èdî“ ZXD0Ú\¢ ªÃ`¨ŸK^K^÷y÷߀JFY¼2ð1‚yï í±/o©€xÛ¤L×òÏlص%ĈÁ ÈD4b 6#_¼ ÞƒH‹…Ç£ãææ&X]è„ÍÙ¾Û‚š¥¹Û‚™¥ =O¿«§oìÉëÕ÷ê¿ ƒÊ¡`W%àšhê_œhM3L4Ÿ›ÄUá ºŒûEÎ-¸'ˆ.dnŽd6[µÐÕ)m·Ë>©qÉ(qÉ(¾dv¹dŽwÉpØÂkeé…·²…¶¢…³‚̬#¥«‚ aQñÜŠRÄ{‡Æš-u öavÝ_6Â*rÏùr‚â0ç†Ã{Ò½~ãšÙg˜mÞqà ^…hoT…Ê}6öbód NP&”¸ïé€7GõE·gÏ ô’iÑEKNk0No`Ö?Iõ0&m¨I$_v,Ö¦Màà s|ø,7Oóv[$^ŽžˆO+¬‹!d òŽNîw£-mÉ)×ãÀ_Å»å¾þ:S£ú>To÷ž.âÃźPð9ôÞ4WhóÕFÓcÐÄ©ÁW¡ÖÚlÂ?ÔÊ$d›ØbQ0ZdI,¸Lïe࢟i»I1f°±%µÉxqzKÑʈ7‘*®^ϧ+¯|O)S‰2ûÆÛeý}FZ=Û~¦Ê͵g‰=A"¸±¬Ÿ‰ÿõôN£0…²²‚c!¶°å¬ÚêGY­!8žÀe™frÿö18&…æööx·%Ø'Üç¤ÄýŠÓõýØÕÌòÙ½2ÃÁdG;3ÒÎŽM1#ІjßÕhol—zànbFÛçð Õ×z0u`jíûæÅûF¯µ»­çÃñž:_ý³¼2¯£Õa]ȵáñC_1rñ+ÃÙÜÜ]YdSïðUçʤy]»ÿJ×ó5ð1?)ñlè7íëìac´ø3YXŸQ?eÕIùv7O¹Å]ñ2‹¿ÌO ’þÏ»J³\òÏ/¦vUƒŽÊ‰ o ­êyžúmn¬uó6Ë31ß¹¹ù[B¾T¨¸äoh¨C¼Ò+} ¦rùùÏ  •´õ#-›ö4,  «e>¶ð_/3? ™‹Ë„ÿŽÊøLBf&F¦ÿ 3ý¿¤X‹þª¨‹£…‰£¬‰ÛåW³üW~5# +++=#ýÿ‘]ÍÄø_ØÕLôôÿ+»š‘þ¿gWÿ›ù_ÙÕ,ÌÿG’‰‰õÿ‰] cD úš50éŒ ,( .â§¼Œà%QÕ¸÷òìnNåè9² #‡f¢ÄbY·Ÿ™. ò”vrnR‡½Žz—6.°ær¼kxåŒÐ±ášN}QÊ ?àÖʦÕX7-!Ëæ5 FÔs1Êor¹r-tI‡! *ÒOüT3ëTª×(¨×lS´‰wÕàBâpY0ì;-!"dº IŸÝ!'¦ÐÿÐA $ÿ°«ÛUš EÇËuX°%RHÞƒct€åqÊÁñ‡,G)wÝkÆuŒÂ!0K!”ÿ¯ÔÑsTœÜêC:#øQÂÚk÷­áô=^;)þ× ÑD!Ù/̧Ӄ\ß?Â÷û…Ÿ¨îwý? È?rîŸD¿“_‘‹cx'nÕ¬ÏÙ¶4wŒCÑcº°\DY3ù;/¿§¢ú ¾Ý·âáÿn¿±üÿFEþ?í¯ÿ–‡Ìø?yȬÿâèþÿß þßÚþïüݧÎÿÂßý÷Nü_ù»¬ÿ;—íÿ‘¿ËÂÂÌü¿ðw½=¼µ†—ÏÒ½š5¬×¨×ª©ÆÐ=Z»FÂk¸ÙgLÁ¯'[¬#Ý#‰GÓbã‚2€l(>7P%p€$óâ€hBRÄzk¡›ðcXa¥2µÄ=@¥n§¶¬FáPÌ?¿}é:¦²™Ìf¦²8.£„qàd?_à Ëì ˜‰tüñã–~² öËíªœ ¾8ý?úÃóç{¹ë¯UýƒiŽ#…ˆƒè*d¯ÁÅgÀŽLÖn±ß΄uøie—ÿb„_+u¢m¶îGº ~BœÚ»޹÷Þ.wá¯×û`·„(ì#'op€¶6ñœ`?×¾µbnÖ¸#“ެö4að¯:lð!>ɧšá€ž0l˜!ˆÝåç ƒ¨4MßCKVKÌ] =KÜýîŠy§4ë& ß@ó±©ï‘¸Ú~»Ú„ö«ÜO;ÆóÒºØp®¦ Q)÷±íƒ®Á!×!ÂñwIé†.çñWP žd¿é£–÷¯ÀêÇôúó íÈ€·ê¾AKÐåáûP¶)„/$ÓÄÂ(¼C)ªØ;}õ Tß ´î%¿UÞcdë…©‰Ÿï½’«õ½ÛCjCé›*Š[Žï‰âцÞ0ÌÄÙcòBߊèìÄÇ 1ºãI§"δ&hMࣘˆhï®ÿêÆ“‹ýÅLˆ_¦‡1ªÃ®ÙÞ–¬Ýc÷æGR[ÔÆ|¥â•pˆ]À {[.c”̇,vaІ]ÝsóÎ… ÎUN M"q"Ú2Ù⽉ýjþøq‹`;˜¾?˜Í›Úba¿w$]º ]Úˆ.ëü¾£Ç %ÿBØ uÛçI:9âtŒÃ!õ;\h kË¥âUì)ðfóuû"šËoÒ‘1yS.QÄ:2¹á—᧨¼s{ãªOàtÄlϘŠÃ›>B{°rŒèŒð ÷Šî ù…Ãß÷+#ˆ€™¸Lª­d­ð©×2tÚ´wÍÚŒtm29Ó‚É—Ù qÌ Ý£8º¶»#œþÛ9M29FêÈОtE–ð9F×îÔÙæ`HæÍz{ôøf…Â:Ì;ìGé[Ü%ì¡3…{“ ÐŒM£9ÈxÔ kCÿ·R;pÖ…< øÄò’£i£u.c7 ÞˆÔÌ£5wºì™Ö‘ð>Þ !Ò;芨÷ßLVˆ“=›£ÞqOÿ7ú+XoPoxo"ßÐoR,ÅdvP.« Û[î]Åt…Õ2Õ²ÏÅ' _UÖçþ–UýPE úå_D/îÔ]ÌÜ?_¿¯ü·€7†“rî [ÖT9¼¤¼Ó¦~Í6„÷]8®9R³=©;ï›Qœ@±=¦¶ßàßÀÕ½Õ·,6…F?}/O|Þ¿‰I…ÝKqÕE±0 Ž^´d­ñ›ãÙÄ‚9FÇ JL ƒöÇæåeºÞÛQõžôüEº>ùèIÝ*ø@´>æ6·™Ü×ã\:µ¢—©ié»þb)³à2æÏ7gÈú É o×j³j;˜uðjk©Ïð?p‰ú Ñ'òó‹oEoËu\M„B}ÇÒˆå6¾ÇjŠ=%HíÄþú"V°÷jë¥Ô ÐstÙBëùû%¢0óXøüzFêG€µ&g»ÏKàÚ–~Õ¿ý›óðɼ_K<3}Ïæ8xvÄ‘0ã1&Ùc¸e'Ž/ 觘Z)À§˜¯“½‡w>ž}§t¼øUáú£ G&ûŠHgOS”] ’z¬‹ÿéÑ…©ÙæþEµ½ÅEž{7ùÖè]?PÊþ:8ˆß3”>®ö¦`¢öEW¯3AgO¥/ÿžâÉ!í4A¨; í 5Î¥M:±šæ™”¤q_ ÏwÄ*ÈúJëOKÊVàvt ËÉàˆãOø°@ÉÀÆbˆöÆÕ _hÊÅîp¢àŽVý­u-Zƒÿ8E¿‚c : î–Çò¥èoÁ-`ãkȺ&ó e¨TºÖfè1Øðѱ‘Aig"â¡a!á_ZGÀ7{=áÂyHCÆ5™‘nÀPš¹UNzDìS ›™§P™•C$¿t¡½L%¼Õ²é1Ã1ÜtýD8Ð 1ˆÐlI!#|fb< (‰H>›‡B"é€ fNN‹¤™-x4ŒVŠ@¢ÅêJèíÙ¨õ7ËØž+¨wHŸgÀ• A“fF0ø ª:^…+ÌLÓK˾#ˆÓ¦±q½–†60H73øé逡®F_ºÖ%¼cÞ<‘R¼´™”ñຠÇß»Õ]~ãÁ‘š´ÊímÂéï™v5Ñž(ŒuFD'áéD g6ûlûº³µ)ôB°³Ëgb8vpo­Î?]µ™5ñý.ZN¤b´dÙ–Nj*Ñ_5ßÓU¥øó¦ÓÓ+CÆ|°;Ñ^7Î;;±Oû곬Tn§(µ?"•ÿ¦(7ȵþ±™¸µ.NÛ‰ t*vr?)Ð];‘a•Ù4cb&º_%lTI;¿rÜ ŠV5k­%¹ÔÇš²ÎàM"„”òŠˆ„ÃR$¶À“–L þ6ìë%•Ío‘> N":Åj ü;GG@¦#»%NãónePÒ‹6~·(c¯l °Œð—˜ùuã‚"'&–\¬”þ‘7o#›ì· å0‘2uß'€%ôsàǃýà^ º\§,S!~ìj¥†’vóøç(‹B=ÿM_N$}óZ¹rUDälPÔxv„¶œ†?×mHQÎ-ø²˜gôÄK:Ób6ç='d¿[–yWlmºe¾ ýGе[+ì’ˆø[©ã[ÑO+¬Ñæ0…6ߥD-ÅFë4!eI¯sŽLÑÈ ÝîØøoãC:Ã%ƒ3‰( í—Ü0µAÅJ# ¢ªL÷©ø­mõö¸Èáí‚B'´)“â­Æ†nKQwp÷&¦j[{Ë Ó‰O%ýÕÔT•#T¥©§™÷ü[o©¾ý N?“®±iA³›náüÛüeU¼ŠÞÕˆ L¢g“*œ*žs‚+Ê8 ®VG Ʊ—ŒÙ*µXÛÉNò6¸S}íå_FêVÿbš€–¼è¼âYG¬ž?@WÝP¡ú×?䀡‹S‡Obfúì_(‹Y³ ßT÷ý‹)XXøûcà”.IØ(ÑyÊÞ}AªÕoþü¡¢Å_þ‡…G[Bz~Ë Y–iVêfîµù>5€Xˆ¬ÞÁµŒ^<Ã!tÒKwñ}øŸâÊŠò8B`¶{k¾L,ÝEµ­P/Ÿ],\Ž­&Z‹à6 ¾÷egQÃ[×ðB!Ådz~þð [ÎN´˜–˜›¥¨Øw,%%á„ñLÇ'\Õi11IéŒgÁŠ s0(èh¼'_(òZô%ëG'$7n±«.¯öºË–ƒâöjIÓö©@-ÛìùEûUqݹoì2fq¾ïŒι7ÔïÇ:‘y**^Jb׬$¹'¸bæ¬U4úï3à$î›ð{r»©ªyl˨«ª*¯bÔgŸºrZ]Y&CÍÚtI;9ªì,]d\L@9q’(ò^»‘q;k]½ßî~?A£nk}Ȥ‡ ›ž³×¶ø¨ˆFxím€{ú"r5Vþ~>íþ>[x†2Iµó´´Ó´Þ®¬UÚ ù~_Å–³.8Å­¦e§Šá€®Líj¤¼²$Ü…@hüìFŸ¦bN‹<}±Q6Åô¢ÙUÿŠYP‡»¸Rc›þ C7gBá½’ñ™¿«š$§³™ð|5Gl¢jÇL‹æ-ÿ¸ú `6!ÓÊðß#y•çœéÎÇŸ(–ÿåħ Óo„|aè3®n…ma0ʵOBî¹o…žc¨Ò&û»s¬+@]4¤M GÍ ˆ¨±®‹ñ %ɇ¸ƒxÅôÀÆØnX=|ÈеÚ8Hä3‹{{Å «F%ŵ>º &¬t\´Ø{›ûä-¼Ó›Qƒº$äéÉW)W\0ôËefpô\¾òHì:IåD>þ†·ÍÁ¨xûº%óJ¶z¾˜ØLÕå Ê JeàZ,øˆ¹³\‹6@ÍâÝ ¿eË,[&[Æo¡ £o!Û¡ø>2%3ÚM«'¹¢†uŬеfœ]ÏIײT¡ZFZz|¹Ü£x!«r®Xä½Û°ÂSN+L­»´ýBô¹ÛR6;>]xÞƒ #K`ˆ!³’¿ª/AIÄnÊÄ]©^ç³òÓòªÔ3sW°ó6Ÿ%Ã;(? {bRÞ˜‡¼Y6 @°rZ¤¿|~7‹Ö†-™ÏÙîêMe•÷ ×°û Âg·g/+óg¥óä£×Û6üÛò7àÒöÏû'¾îò"~çÙfè[¬"w¤¢9Ak„šùÓEûRÅÃòq6,z±\&­Ä®Ýï×vR]{‘Óº¤wf9rKÚ›¥9½ÃËW §­P¹ÛIíìnÊoϹòǧËE“ ,ìH5WmK ïvU3 ƶÇÊ1Ir2keœi3ž 0ž‹ÇŽ©µ86µg¥ƒG¾ =Vd·wÒ¡ˆÕ hR¢B“±{tAù„©¶Ý–i×¼{­‚E”£.Cý‚ ª”x¸TÇr„*·–žH¥øÌs¢2º‡)ûc·‰ê¼gnr«)µ\]F´©¶2×3LóA(XR+±Í—þ§5jvy´u 2n%|ɪ›…ß­qì>q!wÌ>ÄhÁ4v îà§‹b¸Xx(lÊH¨è†:„´ºâ´Kè)ì¬^¶±yµŸü÷žfÁ°ñürúÜm¼z+Œf u‚‘K"Þ@ešc¥ÕåÉ•Œ ,ÙÐ(êå‹® ¶„s5{–¯FøÓs P\ŽK.KÊLj§Ðp¿]µ?hÙtEæ2!§œÃ–j?!˜¤Ÿ"àB2ç_1íßjœJJÖD':“["²b×íøC;éê;®dåX­¤%GO%aVÜÅ5—†»Ý̵_ÛN½Ú‘?(³áÙ“¿4ã¨EBÖ›Ò5r]\÷ùCdOœJØU’š&œ¥ö¹ÌéÌÙí|¿O‰BfÍHY¸Xû<$Ü9…÷Ây[»þP@.0M›œ—51&6¢: *öN­Ãµ-Éôib×¢ãB–=BfI´H[iš–,gW.ï—I'h¤ÛM3£à¿b]IÊLñW<¨“dS6›WjÖ’þÑÝ![z--O¾(üb„™Ì«’5cFÇ«,›@qRĦ .15̸ö!KTg¢dšNOÞ ×ó»ã6Ã[¥õXT´dç mUŸ­Ç5'Š&+7ÈŠ‰!+÷™cÇÇB/¥·¤~[¾ \­¬$†M”?O¶ô’qyîB,«ŒR¾í²F<ú¬.üì褋ܾ/xi`97µó¬âß\Lâ2k\ß©G¼›+)a9&â£9­å©7»ûÀ­¹Xh4¬o>qÖŠ˜,.ÛžŽu.ÒpZòpqº bëë5xß-q(dùAcè#œ˜ALZM5Sáäö †'3&Žé™mpý”0Ï¢¢«Ì§–ÑüÎ3Ó2Øo0 K™gCã\¤ÅñíqĨº L+Ÿ!>uÚ’¥òS«]òÑ'=1!å ”´„·X^."jv‡šº3ÙK Àg¬Ë–8¹£7'å6€'ƒ(BäFÂ%¹ ­EG®R•·\ çÇ;}ìHϺ‰£I'{çV0#ÐøKdšPÌ3qœ¸P$¯‚}ŸuÁ±sÇËÃÏŠ±cUô!%:‚<:º<ÒÊ‘ÿÁ ô\7ú—NB̃öçk-Q#Ýòô$Õ-ã*†Mç¶Ålål\ôø.ìõ½ýa‰=§ØQ²Ä}K'c>=7Í‹ð¤Ê2jeWiÀԤؼ­O’Y³@v â” V±Íblbâ~A…‰±›BR0« :M+qgFŽŽ©Uëi(G"à8T=R&7©d•lÝÉéâʪùÇÝ´F¶Ôí˜ì–ƒÙ¤òÕYŸÒKbz}IxÛÌ#ð°_ÚQ 5Åãy)ó¡RýØU'g¥.•¯ä7üG‡'–,Ñ$Ldzq§8Ú±%hvlæYÇáÏ£nàøñ¡Äì6„°¢•i|<˜¹<·|%H:/P™pãŸ:ùáÅ¿é”^@­?ŸÝl5ØŸ†Ì//Ë­;RY`NõãJôu-—«:uf+Õ]5>{[µWšW^\('J%¼*×?Ÿà,^Ã:Zav=H×O_o™[y†tGˈò#zÈnÄ«$‘ã.»Ê=ƒ>iCv-3}§¦«ƒ Þ q4ö÷sCÇüŠŒ äË–c3õ SUW8‘ŒḂŒŒ(P?¤m±D”…pK–Zž`œ.¢4|DÛ}Õ'6Üè§|ú]¤š,XNF‡–œë;f¼¨¿Éε9%Õ–6æMš•¬L6‹ŠÏI8t¦ä¬¤ÑïB¢Š“Ú•È`GÏϯۈŸEÚ+ë÷ÙuQî—²·/ÖÂeÖçøÍ‘ ÁdSÁcW“-K³¬-ÑÌÈ ÁHfºnn’:_Ì01ASà>Wdš]Dš¾[-G{DêÙ^l·®Zª’”Í6äGTXn°äžÙW³R™\£!iÛt0£h§ ´ž:Ä>dÁ3ƒSÞ º`2RU:€Hºº°íL0Ã_s KK¿yºpÂ^OìýÈð!’RÛ¯T^’&P:ÇsŠGÆsÙâ{GžV{mÿøÅØEœ;L¿I,Ýb§õNì10´"4Zš/¡Œ£eS´óÊ¥×:‘îÎ<ÅW‘mþº¥…RÐz&W¢r‚^ÂTpTÊðÕg¸lpRrÞ‚ùòÿëî»ÆáÁ[þC6p9Ð Ô«Õ†8»8 bŸ[²ããt"=O½¤µC©:|s·cÆaºOš‚OBc ¿ïsAq»pØ“Ã1Å0*×à5wŸ¸Ð6Œo ±s^ò"K$×â’ÎÄ|¾„;Ç&yW3HË©l¶…=©3±ÿ‚jÕ0I§÷9‡zªm.GÃÚ™Ò©ÜÿÓ¼Z%”¼ .!D‘cý³±©¦³ hjWÖ³/up·ŠNRtÜ2ÉZ;æÆüuU@¹«"É[‹2 àHîyÿs€þÞЖÊ&Ð10°ÉW­”\)뜠hVèÔí”é´å‘þvŠ;3šmQ ’½PãLé4y•Ó°µr¥:j:jþâoDïf¶Qt8:+Í×Ú{õ%R3M>ˆ,àÑRQU¬Q¼ÁÍP¸s‰ý¬[8™ž‘rélÈí±¡¬Óºië¨z¯ÆCÆÅ`Þ€Ü ½rE]:~Ôĵp寴l!9¯(RC5}ÌŒ…ŒÓgffbÑ ‰µ°‡³×”¹àˆZ~ÈÌ0PZùšQð51š•Ê1S¹Jxˆ£à;l‡Çà殮 ',ÔÍ–žT0}GwêI¿-OfmÝgEËó> .\Eƒ $eÛù¶òå_<‚P®j˜¦Ì6‡ *áéÙ—Ô”M<½PÐníÛ 1kVqu4_Sk‘-sÝÑÞêrRh$kõW‡Séÿ¢ìƒ†iº5Mø¾lÛ¶mß—mÛ¶mÛ¶mÛ¶mÛžç}¿îow÷Þ3{&"«*r­Ì¬¨ŠQY?Îó°¸g=38ìpt•º Þ “ŽŸž¸ÆÛiá%ÚMÊ>v0ƈ´ÊÿŽe[Q Bœ9>ð´Q¬ŠÄÉð„rTÑù’&;©RPÉA½•?cq(”™*[0˜_†Ñ¡ñ>§Þ¡¬•©*2N^­˜^iØ®m®=WŠ.y.±.™NŽXÄ~Þ¹¨`á°L±¦mï¸@u0ÝÀ)±,$®àÉú{•SV ƒg#ð­& G%^Vð`5k?«Ð åtD«dâpܶ9 K1s.fpMúRv:MMJnáZ¤¼œØZù°0Å´-zOUèjj»_8¨™BYrž«5q¼¸(ð /kÜcY‹»eÅÄŸx Ý‹â¼XÞ°8j<À­µ‘”MXsäѱNbŽÔøq;ŸÖcFç0dî1m)œpKêĨ`8%Ug8Õˆ¯LËž¼¡K3Xg!-ºô ›-\_Ë«nó5'õ¾ä™9t4'+WÏΚ-s›y¢æpoзØþZW”¨ 9õT³Y®;¢Ôæ5K« $&ß}4#ˆ$ìÁ¾³<êuJf“,Åá@XD†¶'ç4„3*&>SÄÁÂ>Ý"æŒpxèán.·¾Ž!»ýˆÍ¿p¾áÍædX7bSUŸ­î¤~™™HÐW¨íÚôßÁ¶®ØY°Älkª*«y]XÕtq~3i8/˜N6ª¤âá[LJ*p™r®ß!Û\I,=ÿî:tgÄqGyœ2|V>纬Tà1°Hå´p%·Kpr¨ô–ôñQð$”*+øÒ60)àäcà¨Ç_O1”¯×tæ~ÁtM´~$‡@ò2HªÊ9öL,zW볈1«žÈ÷Ñíš¼¤«njÄþp†´Äˆ>ßiÈÂrlËŠÀv”Éw¹ †Z•Miræ`Ö= óðš_x®â53I¸y9t•E—Á”¡)s»otüì9§(íXÈjáNë~)«Ÿ¹¢_‘¿³sÈÆ˜¥{àAz|/Hì¸Gzüøb ì’Së­ÊÓS÷ž†[ˆú0RÔîn4¶œŒ´9˜ÎØ©ª¥£Vk©Ê«ãEͰÔÉ6cBvM UUMîÔ:74T?WïàÖÓµt$ ÉùüÈVßLÀÚY52[“ÍÚ­îåú¤èV¦ôš ~ÿ­Õ?\zQ£Ê꽪±Žä¯àL(ö‚µÙkÌ—˜ÃOvƒ›‹¸£¥^ÖÂq¢ÛçåòQ¸ï|Ó›ÒB‹yÀ Ôbè¯a(Q"ÏUÑ®ÊPvµ •jâx 9CútA½E*®Ã`ÚEkšÕKÊ[³Ž\³=|BJô%+õ:­C!I Æ8Ñ8š¬Œ³f¬†rW Q–ÑIÖgé ™¨ÝÚ²Çñ«t«†¶L>½×MÐÇf!3bG=ÿÖ뾈"3f‡hÁ €Bܱ~Óe”Âç(Öá+¶JÖǵPü0R8§p«CDT$d¬”œ@b¬ ‰Oʯ*/€&ãw´…¶l÷.o«ƒ½’†úU.>GÄXyÔ”‚·CL2*‹G*ÈS(KÐ9ÈĨ2zÃa”ºïĨ*x˜u ‘?ÁE¯2yÛ¤þMïÇŒ˜JÑ2E¥g÷¿`sl„.ô©ÆaŒzZùçôÑ|“ mÉC1X¶[!òm’Z%ꃸñ€’)@½nÑ B’?rìYrÛŽÕ>ðÜÒ¯ñŸÒ1NÒÏèÔÅïs¥ó”È«£l“’çpaX<ÍŽÓÿr.Rô¤¥êýó¿À8£¸E>È.kXå#Bñ =còj”¶À7¬‡5ø+Bñc =Sü˜ÄnŒ~J¤2v·ùÏÚ¢ZRDãÙŒUi¡,R?Pÿ, ¸Þ#¡“¢aú!môT,‚!âUžÝ/EW|ùÏ*tÀpêc’öÁîúT¼Á¦È ›ØqE ¢§ÔI‘ð3µ€/¶ÂÝˆà›¦â/ Üõø:–,€ÌŸõå ¤iåAØÀ¬,xË…Êb OÊ ²î‡!T±í§L‰X¼ ªÔ!íµ‰üù§ˆœÕd2ì‡Þí~X¬ðˆÄ~.ù «êŒœx²FU`h®ÄÛQï²ÖfÌÖÃ!Ê×ç·JOãLØy|€&šÞ¢¾}F2à˜¬´aôð†Aô¹ÈG;r@†>ä…´X¦^ž õ2‚ŽÕÙˆz¤G¡öL.›=yjxÆž¤9Š¿Ø-Ãîy©gU ¸Å[Xb.rHã\>ðÞÓ¥;(F–ç:Ö #Ò£C0àˆÂÒSŽá: Q N¸EðLÜVßåÅcêyä7«ÿNËs€¦D]ò²6Öõm›û{§?· m\r Å<6ζž67«&à®2ú€áD3ˆ‡ØÉàe²¹0’ßêGÜoÄÚrÚ¦é{YÕ°’=w1ŒˆB|ö~¯Ðgè§“án:5ˆÃI¹x¯]˜ºœšzE?ïF¦pGím9ÕÈŸp黯ø¦Ãó®aª³ø¿§ûÆã„휚[‡ŽÀRÏ{‡UÓ¨§ô=1õ剓²ð%N–n æPçÿÉ,ô}b¸mEâ­éFL1b&uälñ+D0Iˆ!ôiKsßF_\‘a5åA´ ˆÎN­0æRWÚißCRZp¿Þ i¡[ÛJÿ!ß|OkêLžr îÄÕÝÓm‡5tÈÒ¥´?M{!¹Ì1BL1É0òL]Ùê;)` A¸ÕDVeÝÓñ¶úñ»šõ'Y¡3YÇB0à‰¯¶I;Ÿ'«ÅJ:ÄÙÖ3÷qÿ•³)~«ÐS¡8¼éÌ³o‚›?´Ñع'Žl^R¬2M›f‰Efú4ÞŽ:@_9%Úþ£©¯‘2à¶0:b“êžLü—„>èQDˆÆØ¾*¯…öدn‹©‡8–þ3C«`ø=Ý›º$–.ÃÚoÜ6àÜÈâøO¢\ܸ‹N- õoh|î,J19ÐGR]Yý¡äQ™âÄ™¨ý‹‚W¨:qž×¼ xK};Imâ¡h uÔW®¶}„¹/iŽŽ1އ8™Ç7%õ´ÛIþÇ­úpØp+a Ðî“R&“ï5ô&„Ç\ƒç­÷àÔa(Úp„úÄ"Tüû£ñúbä˜FŠ•%ï‡4“g¶nâÛibÿ9×lì‘la¤CZ¬{ ‘ºáÃïHpî±Òø@Y¶ëd¥üíÅè//j7ÃIÂÊ»hB:_­ô8Fꇂž<´ö1‰,fRõqâ ím¤ÄT/qÂÌçÕè*V‘xœûÏ‘àÐY"K#»«°%Eª×%oÿ‹<É©áéãb)2Þ$’j*¯»5å÷´CL’ÃÄÓ¨Á¹ùyº\DïÈõéƒìá{ôûLÜóßðµ9…¯W°Ïû× Y+ªãQ*“>ºãìôÓÏð‚4 žE?Ô“nÄí¨ ÃJ÷•®À¼m«P¦”zH÷ñÚaò>f—ÐRü²\0¤ÆÙÕ¯VÍç*Ó¡—î9Æ‚æXhö"Ò€n¯î[ÌÇÚÆèèÈ`bÆh Ò¡€.;=/©TÔS*8MOÝ-S˜Ä¹‘àèÛñpP‹!{MWMMº. ‰àâXX=36,Dæ¯]©oýÄå—â@ɽüîZF-ñŸï7FwÀ’¶oz¤JÒR2ÄûµÿÏž›™½ÖÝøQd9bÝ ÎðƒG¶i”ÿôèWaÏž‹¾×â+5ð·¿ÜÞ¸?ATQ&WLOÝ*çfðs` I€­ÿ]lЗ⠘J=’„¥ùY&zŽNÔÀ®’žUX LžADpL­Õ_470Pþ}=á~¿¯7yÕ3Ø—¡>²½±ð©~P—\Š·7‘wíj??=2ª’OîI)ËvJ“ýÖáÆ}=ÀÖ|š/%ó~æXd€–¾û5ðÀ@1ô›>Ã/¸_JRÆÔ»«!Ñ¢`n¯¯×]Tìû¹—¼âa¼=g téççwþÜÞ¸¡ôÜG:´;–1p++2úc48ÐÂÐ2¤$º0 x¢!ÐÏ^ºÂ˜P €/ŸâßݧtFê+YtÌAñQrdì\lL‘nðŸÈÁŸz9½R<¶óR a3Bvua„kl¼uqvŒ¡yŽük%‡ÇA\^ö- ]pc™SQuB8C³GÆCòv€|¼ažu<‘‚qxœö~¼¬»P2ï)Ö¨·‘€%G~D&£&z^Fü Å²}ݹPCû»<>â'i‰{˜7Ò³¨}{ý°›A_û^_£ 7`ë&:zDpÙÃD$"1fp8(¡ÃC³ô±É·3fo½¾Øˆ,hq—6Þž!tÌg˜wZˆ¨w¡eàï3}«(ITNú> ëxÄOÁÃ(+|ø ô{iÉ1ÿì=vþW FlXÙ¥Ôié䨉jE ØØ˜¤˜ó^H:k(Åå|ÐrÑ”¹™Y» >AšxõÖÇé –iélË•Z¤•ZÄ•síÕyJæê¥ŽÅÚeËô[jvèàÐt”¼ 9³©Ü½+U€-F‰ÈŒÝ§7bœu¾žÄ`Àq£sî²Rù}^!ÌLÝ;oÊ:Ä|¬P˜O9õCo}< ôæÖ ÌÕ’9{_?õ<£‡²"äIó¹ý{‚^£"¥F¨C9ˆ}½?ÒÀ¥"¬Æ·ZF}ŒŠg$PjèQu²kçXéFÃh¢©à™óDóŠ%§®þž D˜ù²àÒoŸ@f„x»í~ E¬¼C#  €K±óßÔ|ÈòNqã)à Li˜Q F’¦NKtBú¤Š“†c÷Ðs‰ÞDþsôÚ`§ C<¤á_…¤% ãRÆ4Úaã†Ó{çÓòtCzh“y‹OŸ†°Œ»QìÌܼá3˜O·Ðz‚ù¬w÷׊œKExßJ/­u¢FPhà…©¹+ Ûi²'w‰¥ÆÔtÐg£o+§¹¹à«÷Á—ý'1¤R”ïÍ;€ù%]-mSš*)quí0ñÆÇÇ:t=–”–AœÝ/xnÛ¥œDÖÉW |FºJÀǵS^¹ïÊß ×UÎY_Z†© ¶ºÌ‹§Xó-ßH‡¨ó¯÷±ŸïB_?{¦Ñ\/W±¦¾JœŸ… ëxÉ3?eN/¢Å|”Ùßòçÿ©ß|¼dúÕo¦Ýêb¯éIöÆq߬´`†RÛå„gߟ̮¿»‚¬T_é‡'çEŸÏÜp¹Ý§ÃK¥$ü§©hÅ?h½åo-–î+§ß6R% ÑUß³;—ˆÂ%ê”Èͳ˜íA-×¹¨vϽäD -zH›|–[ˆ0ô2^Ôœ1Þ°ïÔŸ†öll™ÎGh‹\¿nvçÎ{»ozKz«Ã{÷^\ Ûsï8 ³ï ¿C54©†ÈWrUfOД°õqÒo"x¤E~ìÛÐ&¨éôå÷ºba Fùfpú3`Âc§J4×Ëή&’å´©B!#©|¶‹Ž ï`QcbDé'åC$)w¬öšbú,ÿ”ú&íriÚJÚ0>†öõÍø¥@P×Ê0…î­ªíô#Žø)ýø¯ïêXz&ô¦Z‘¹î·öKc’n¸P¹PµooÙäl¾°£ ò‰L5b¢<áµ½3ÅOóÿEØ=®Å7éðùBwÕkéRˆ2P¨"GUé|,Ú¸I¸ ö_n” ç,|è!“£F ®uƒ}$´¥ë þq(Ûéî:€øqøàï…Ýü»º"ñ‘ kÒ ‹ßŸ Û.AyŽ÷ïÎ=~+?UçŠ8œ¬‚n¶;E‰hÞmc¸R¹Iu¦ûž®-ü‹>¦ÃÐßÝA¸¼¼5fBEyž¶>0¡$¡ÑØEølŸeÇ¡4²‹$]sœŒªº ‚šº©™ï€¢Bzð1ï&5匢Jº’ª¢²uã1ç¼ôÜ%™ï¤lh+l-,â-àmÍckŸn? ú ÝL;Œô¥óá' TПbU8µIÛbÏ©©Nrs1jeµéyÄJ̸nÑRWÖn]D9SwŽô’z0P±ñÍ*SUùh®£©‡0yFIa<Ã,ÉÊǵ8€u5벘2AùEJÅÀÀ¥xP[E~ßNuåà/Lƒ6g¯eƒÊ$g¨¼7“wo$:yͪBA¹0¢7•«g¸{ˆâæîNöžî>¾¨#þÔÚ‹bw¬ö´õ~⨄ò†˜Ò«çvøÁ2æg²JÁ‹ÍôÈLxåËÖN.³¥1ÇÚôIîÿ‹šo2{ˆ,n†ÐdžœˆpÜxÀtÊ$êO9f½øè7ØéPRµé-æKKDºÎw´Î7ŠÎwêîõ²«Çó©Û³°Žwê®Ñ^ýÑ•wbìÚ'ÆËNߎ°AIž¯êÅY/åY)È·pÀÙY+ĪÀn• îtJ£+†§é"[º'q;b¡£[ÅÒ#˜|i}‰zë*êñ‚Év´š¹‰#n+‹±åØ<ü•²Áz¨0Á*÷2²l^–Àû¸»÷%m“ð¸Bsì̺QïŸäÂÝìƒÌºØæ^ØãªŸ ­]žKì~}×ù ÿ-™pZ¹qÔuŽKÚþ›øõ‡¼jŠ`«Q t uª ðj¬cý™õ,O+Yx®#»&ˆØ¶RkGöËOOc%!ë_2¶‰ëÌæîóD«oÜÏ›´fºÑ”²Æïš¯¶ëÖãÕŸIœã±Â–Ü6ݦî³ʸ»z‘·ºLé>°x1_Ù÷àÍí×5›„“œq㿺üú7HÑ-“ö’‰3‡-Y&}MÚäÒˇFØV¼ý8cp*—TÙ™2VMÕ´x Õ·,©½fšû]s*¶B_…-[‡-˜dš•Cã!h'{'q‰€§¼š:ï¼›'dcpüÒ&unCˆÏ6ϰÎüBvÍÒ“<ùBïvšå½€:Z‹½÷ ôŠ`Š{Åî.êý»³(tƒt˜Hÿ˜[Ö–• Å0?*Ç›Åz÷éêÒtVð ³Bò_”ëÿG†á_>ÿ¥„ˆùCþ ø?°…ÿ+®ðE²²1ÿ{Ì¿€ÿ3ö_È‚þÖå?aýXÿ3Öõ¿Åú11rüX¿n'ìáfz­ežÏ ¦ åÂôõæ(æBã&•å–(ë:è„âxÍÆ+`ÖÊ~+1µÄº º“á‡0t A9À ;Üðlć:ñ,`pÀôxbïÖ Íè®>Ï[¾Óß™÷¹¶ åíŠçÕ ›hîLiB P5b—› ô­&ómÈ 9(p}ËÓß\4_ؽDpsæíÀM×í•VÁUx…ž¦nÛ*Ï(8+‘1î`¹Œ#ÑÕ]^_†P¹d€òKÿêFC‹26~ž¥FË£èëþj¹[Õ]~ÓQ¶Üì‹:\‰«‹6³º9‹?_$r‚¾£5F{›•rhð”í¦vznGæÈBe¿ |i°3ÙŽ» e¹‚cŒ°Öè¼G`+£” ¦jÁ Å+fÎóF'Ô+ЙÛûNù¿ª>Il„:í!%Ô/’csØwްo,¯WípI\òv.£[â¬@i( _H"ªð0Û9‚Ög|è¨,èQÈkÁ[aýÉf,'qHõ°ÇyøÑq8Õ‚½£"Nà(ùlvHСóœO(Èü–ñ@Ëùty÷í«óÆg¦mh<€…xC„"–wíHf<éžÐ+øvT@™ðp<œØYž &è"Ÿó‡g©f°Ö­·!êse¾7 N;³¹“‚Tò|ÈÛ!7²Šo˜ÜÅr3Í… M0Åu˜¦{Ø/í`òÁ$×­§9é\1ì}½`&Þ‰á”';‚„Òþ§}˜Œr^óÝgà0.û -¸ér€+B¹B¡kØyö ¨//ì=ÀxÒŽ¯ý6òd‘2‘wȇª=E^¦7FÁß$~ @g¼OÉצ¿˜øüJæ=ÏnÙÁ^$Ó5NŒï§ŒwEÖòÁ«þžæó>Ic÷òâø¯jM\ÇÖõÊ‘ïäT¦yb–ËaÛщ öøDù„軳ì›|Iž(¸º?o‘ðÛb?÷ÑÖüÝåïT o^ó³Vë#iÃvqȨeÔ!³2i­MZžÞöûÊ£²&‡ rÍ£šS>cp‚þFý†>”BÇonÙõ³˜¥]{\ƒZÓD.8@&Џ¥â•ÓHÚÍý¿ßãL/…g,…q.Œô_M¼sÇüÆl\ ô¦_Tè‚4Ùˆè¼?½ø2þröâã Ƚ¿|³þ‚Öšõ wpOáÁ6í¶ÂÝìÛîÉ»£ò©~+í0áú\ºúg—Ô~¦5¾Õß^;ÕÍÍ&]‚©y!ÐéÀ×…ˆóWWD$‡/Â'ÈÀÃ5ÀG&ß•ôwˆrŽ”M¹O +úQßõtÚ Ž×ÏväuoOõžâ ÉOWq¤Ê€VÖtLjWÏÙ*®ÚÎÀÖØw¢§]uðá¸5ù¶L¦|0·Íñ$Yy“Ësò*Û±8/’r® º®X”³²h+8d/Q°swo~¸ï¾Öï[Ðü øƒÀíÖðÎã;dA’·6X¦ƒ9#®Ñrh^<¡ÌÚ|T<®ú©X7¦v¸µÿTæ‹Å îî•ì­ôÍô øÇí%( °$F–ËBÆtc¯1ù܈ְÆ7x66]—‘Ž©×xÇ”ceàÁŠusÌÆ´V®îðªðjv]—œ0‚srXfØ1"·oF•\•²•’+y•âVÞGûÝû¸—J–âT‰­òFYºBGñrÉøõYä¹ù"tI¶Ì·0gÑ(—è¹=rkkúÍK8‹#ì4M|íÕ5á…/n_tæ!Ì/€¶)9Ý(רç£=ðÊvݖަɓ­mÎuj"—âjbuÎQŠÔêk?u ö¤sê: *µ^Y•BçO-¨“O6GYºß=Y܈JDç¼.òiA«ÿïh˜_©ôÕ ØåÜQêœ÷Ò™‹›ÄC^ ”7Vwl_l / G+ލmA¢i™òi C3%Z£¦2"gÄê,ÏèÑÌ7DÕ²ŠGº7žÓ¯Vêèü÷™+þ³îð J#$Ü„/^RÓK¬ ÕÝ. A]žë¼<« ¹LÀÔ!Óì,+¹èÕ§NØ¢8Ï+çAt?Ø.hOÄ›<'pÆ §Ü¹X»?moL˵ó8g¤Oì–¢ú/XS¼&1ßù‚üºÉÖYê.NûùŒ÷§š žxƒÚù¦vÀ˜®øQÊm5倞ªø{Úµùß¿÷"‡Ú¡NlT[( ÏFÝ`Z Kû<Ûý"[É›ýÞÉ×ö'Í@z`¥§\ò)¨h±G†ó¤'¶''B“c){+¿â+ÀµU<ÉÄî[Ϙÿ¸Òíðûië‚®$½(‚)Uù¡¯Œ65Èxd…ƒöƒøíê€r¯&`‡¨ÓŠ®òxŒÅÎs¥ ¢åݤö9déf×õ€ž¨G³ú/…ÎïáÔö!Í-{‘ý=ñ²îCš¯ù¼ Þ5*ò/ÚÎáõ9NÉw<Ü#nýY¸üëKÔM•”ê…nð€ ßàï ðŒßA „Õ 7Òh“‡¦L銑Ä×éRpnœ;B:W ¿žz@7ý™&àÒ›å9 ‹ÛÇ×Rà‡½æÚK´ßøð„\XàÙ)Ý^ù»-ƒ–è)a;ï2hð]¬&÷h®å¥,ŸjmÙ¾ÿľ¥0¹J?| æp&R¶}FIßÁòûä;Ò“çÊ_Dµ–¿ ö mäV§×ݾ6÷¨ºš,Êx@ú,CïüÒ?¼’ ¼ÓQùòÄÞ …š".;N[üSK\òV_ K õŠ£®ço¢»v6Ó‰¬©I„è©LÁ”È~ßÈ´À¯[5ÙÚÆ|œóZlë>*Èë:È{(GäÐÚ i#4zT¤ÿ‘ ^09§¿JncŸº9d p0:'Hž‘#ü‹’@ƒçœ:€ÙÝb„þ’šX‘«–æ®_%áµÑWãH0Ð-áœßÞS¦i«ÓMý+-!þ|"ˆrà»7&Îì$eA X:`4K“ŽF\¯gB;û„ÜŸO9¨»–ÂfJØ«×Ïp¿ÚLäyÖ²¨[Í^õ (³L²è!·&ç,s2ÅGžé"Ó]¯Ù»|³‡Â'C¼Õ¿¨?é¢"†²Ëì._‘7EK½BvÏ4™Á#@׌¶ª¶Ü•¤ÏªÔOãsn"ß&+ƒcð‡úætäÇ PcE ëô5àƒ|(Ê5âÍ M -zùæY–ºÇÕœ½¿Š°Yž¡QÃéúŒé‡Êó—’3¹Â9û €©§.¢5a±ç²—WqÏúüKòæœò¾ùG¶z¬ü¡pÙ!G“¯€£¯³³<¿8gÝŸ«ëÂqµ@6&òŒCß2žC…½¦Ch2c¼UÛLO˜Ó?^Œ«C…æZßËüâÄ4ÓƒUÄ`heòÎÍE„:¹E…ƒ^æu@¬óžgÄ£Ì2Mz` gÄE,Ͱ¾ûx¨¡ Rû9ä£Sx&öJ•/ÄUˆLð‹ÌQÏ—"J,•&J1^¸Ú`Í Ür"µäoº/Ò/š·Î/NÒ-” è_Œ„ƒ}"wC M)æÀ¹žýËþþþ 0ˆØªioWM6o“ü¨%‡œ‘WNϺǧ•ß²0mï±;0­}$8-Rܷ㼦mÍÔ‚ï³é·d6{Ò Ü¥qû>8Úz6HW¬Fd›.Çhîè3oíG/–H>t:jº’GO²)Ý4_%O¶è÷Co–ÀÊÊÊô¨Ò3&Z»Ï>áö:”Ý ¹îò…ðy½ïy>å¸éºº™ÌñÚÔí}͉t³¿Ìж Ÿð‰“¡AuþëQ°e²köhXç'8Æ#ãö"”¸†P¸‘|,nºxŸ6‰I1h™Žß+xCýî'/tÅ¢hD“ÉlïJ}´o§´Óò÷tß%ö¹œ÷{ëJýÓ¯6 ÂÿêŠï/ëK£^ï6hC ¿Ñ^lé¶.䉡¸RáÀl«¶®ª¼.•vå Fwz» @%Ì}„úóB™…ÞÛ;Z ëÊJ­…\¶ÿFÌ<%Gl&LpR2Óª¨J #4£ŸkÄ;ÇXYÚ:N]“c3äLšåaW;dò´þ§ÀX¼®½­ÝïÞR»{mÕäfí¯ÃÝd~è“.œÌÌE2h±|¢%õj-kÙßn3½ªaîŽÄ}Të ê‹êfWý(2{¢·#®j{3±¡_Lçû./R~Ž´p&²{†³žÅ{ÕJeŒðŠêž0eÛd)‘¦ ¤7/ªGŸÙjY[±f¿ð%C=™.ˆLÆilÙ*I+ihâRX%ˆJõAKôhp bò¸ôL&Û:Ž4¿T+ÚQm†¿01ikåç-gðÀ¶–Àpª•b2èyÖ?ÒÍ<àéDÐÊøÚ :Tj§sžô2îyº~×@ÕdÂ4S{½ð"Hdz€ØMAæbé:i3øõi‡€°&êø‡ÜqéYq8ˆTmâfO™V?HO}¸ôëÄIôâaÎãp„'Úm#Uú”?þÈŸQ[ã{>2ÃÇ´Ÿ/.†y2I¤° g³ÒZLÇõë€c $ÔMìâÃ\½½) ¨lŸlØù/þµàCjþ…=²qw´•K–ÅM!+0VÆbÔþÝLÎù»"ve6hm­ÑnÆ ]*$ åÿABŒ¯y€_!PÞ§D¼;ì5Ž­æ!¬m“,ÝßÒÌ–£Éw¶ø‰ãC+w·\2P¼ÌÓŠFýùc™âI­ù½sõjsEç¾Õ>«íz³¡©ñð4Üv†…a˜3I1ð$S¡\\”¡aÂnN1‰t(ã…¬IÖsŽ8¿z`è¦Ûyk³Û&‹ðºüa¨7áºPúP“ (ºÄš”ñ*<„!Pôç^¼Ç,Cå ÀyÖ[ÏUý9Ôšal²Œt‘)üÄCŸÚal ê¸6±½¨×ýWðx0 ßz¬™N76ˆy<‰n—Cöˆì}–Ù—”ÆàÂÊœ:‘Íî.ÒO«ËCÅÅ&X¨V£VQÒVZ9uáÏŬõ ~ Œc‘ø‘¨ü)Ù@ØœV±¡QÄìX-Òd™téuú† @¡ÒëKšü,4•Rè!hôq5NF‹M I¦„Ý BR0î‡ÌLEËL€üoÿÚq3¢pàê‹/þuÿgÂ=¡F„†-9‚ýim÷Km”Ðü…I úE—©VBŒï#Ká¦ÄÀRSQ™pUÉ7uÖ侚@v÷(õçf@mð®Öô6;É?¡V¨ý,™Âw+&‰éi»¬x~gò¼Â2¡Öì¤3x’Y°Ÿë;þnDZo‘¥ªZí'_‘VZ.ÓV.å‰"L³ÚÉÚœ™ÝCÖã5úxèšÜí;ëtÏÉBÎkÛá·¸5/ºŒ•·Õ~ É{g†m80<»ƒÛ 5W¶ÂvùnŽ~øëÓ×'/À¯Žª`x¬¼#å¶ 2èØ€±ð`ÒŸt²o8ŸóÈ£F*Õ®tÀ`Ïåú°™å8Y€1|ýƒÕ©±ß`ù 7¥H™±bà;"iÖl÷§ Eçl ¯ 3€ò±ë ¢dí MMúWœ8«G®úbFïÉà äæÚ¡ˆã½ã;vȈ*èÑ Š@4`EFˆ,Úb…Waí­K·Ó§™­ÇHÆ«[8ÁJ6Ì TšÕ©6)_3 äa¶&òœþØ£¶Õ(«tðž([°+WTîÃFâ'h¾V–ëÆÁÆ·sAÞ#ÛEM͘¨»Võ2R¬3t:G¿½b¼=—ðOOå™fïÉ1¨sý|³÷|MÏKÀ•B„>g»RVü¼ï­:¸“h³ç²u^Õ¾™6‚ì1YœÊPD¬Éýñ½'‡ôÏÙ†Ë!tâ2ƒÁÊÑ/7o#ºÌ¯§Þ¼he–â_ÂBòIHO&TuÀˆ@²à.a¢ p ›õ&ÀªP¢p¾)R©],äï3™”#PÎq¢z,Û¯ìC*ë{Sú§Ý(ý»•…mW†mã)ê7BBUR‚–³£Êð£þ¿à%\óoz‹µx*”<Ù¤qgòQ³¥ý*?æßÃ#ÁÁ„¤ÅE·zæ¤gI‘é$Z}“®H/=íeÈìá Åd–HJWã$m÷{­ÙÞèñòÆç.ZGZ×È*úò'Cl<ƒX )˜•¦rÓPgRpH &äKG`4MôÄ5:½D|T²UuÓu‚úò­"=œ(ÁgfC\~{MNÄLÞ±›Ò^V<Õ ––— §R9F€¶Ï[œ“~Kƒ†˜x9ÈÙ%)¯YU}C‚V{çë,5~$±š/0]³†ëÿ„,¢@©1/+ØHW´> ‡íÆR\Fgg ©+ÂMz½J¢U+NÁg‰WÕamë Üý ªNUsBÓõpK70yÙŸI/]íÎŽ)Ÿ)ö׼㠨žš…<ÆLŠ•;{“ÑÐÅ(kZ.† ”’¼r‹’ r*#¤ ^$Ïes Õ5æ1ò˜r£Ÿð'²*8°c[;#C¨gJC:x ¸òÏ[®–ûòI/[œ{‹¨žÀÏÈøw`æ 4óÿ‚.xÂøûÍvb*7=ÛwMm„=©×%ð´Ã‰2èeÅ=2dÈ@ ú—øÌGzé+¥ò–VuǰŽ{"÷Æ`4sИVÕ•u\—}¸®¸>Ú¸n¥~ßh<<›rû³®ðöHÖ° b:‹ü‹Ì'÷gÎÅ32cÉÄ{ŠîåCñð~J”+žDk pÄ?fJ?û—0÷|0>vCZR‹\k¬>´'6Ü“ÜkN´çÄ;:“/Ê™ÂE³Z^v&ÃËq$aÆ{‚…©WSw·ù÷f=ÓuaîtVêÀS$åmËôYûÆ/´7û a½`e¶»‚[¶‰0÷•Üaê:sîQ=Q¡d3Éåè eŒÊ,*I6®N]× ¼~QæD–UÇ΃˜€Zd 9iIéÒÂnÒã®apß ¯‘Rû¦¥(j}U3ôÎåj‚JîÃ*/OZÚy3– 3f>ìéÙjZ÷¢£pDù†¦¢ö£G¶¤-å[Ù”¯¶#>Éɧ@’6£péÃUb–BLBõõÂv5ÇêÚéï < Ÿâù Üé¼fu–ªIôýBzCW¯qÈ’7•%ÔX&Z´ Úæe–½ŽIç±ç$Iïç’K ´K‡pRÚ}7–B¶ÓÇ÷û¯ /a!›nŽ©Áçs˜ ƒ kkEà? DÛ~áHCÞ,Qj‹:ƒKÜàö‹B.©‹ Š\§ý9ä”§émÇÁH@®IRù J-ä›ÉÅžæÃ¤«Ä´1©Ó‹B¥¡»–‡cåí4ž¥rÖÕÍŒ˜¡>ˆ°%íæ6'O6€§UùIÉFyÉv7UG9«vË{¢ršÈ¥E§›±˜Z:hçÊum”Te‡jÁWÑoƒbÆCGoqsvøjÃ)Ÿð©ÆOúÚ ‡ñŸÈrgÃOnJB6ä"¸½HL/͘©u»‹Â3p7PiLaw¶ ü' ê$"Ò“cµ S9‡³:º¦ªÆ&Hš3èžtK‹1˜xz²ah4 %¾o€Z_j6ßÇ¢ÑnÈgŸ5´]x3—ÚJŒ†Q¤…&l²¿ÀUµW+>–uG’'WS“GptŠÝ[½1žzài„-ï >èKÙVOB_-k)Ú8^7# 7œnâ׸ò]±œ:­Ë+šÿªU.iïkYÓêö×îÕ®j%»–¾ø€]<DÌ1ö2¬´Hà4,a‚Z°¤óZZG›UW!­‡c"6[üí¤’W  ZN.K7B)Ø6!нµÞyЄµÍÒ£º"Î$Á¤@ÝÆTeé:AybèjNI*€-CQÙC¡ÒRNtö¬ï\Q ®ŒÐ¦` TìÂ]Vã¯Úú2>òÇ5:Ùxc´Ò¡3L©¾s,Š/ŒÆfuäµK¬RhF ¦GIì;ÎÕC,àCœ&»¼ŸË ;9"}_Èx{°0Õ|ˆ=ez÷Ì *"ú{WZQ®"ÓÚ/ 2cºÂ[v"#½hÂÞ=úµÌ4o17ûÆŠœ•˜:•XòE?ŒÛêq3öôK‘õanZ^AG kiAžA^…Öxˆ­9HQоêIG´F8­ö€I£Æ¿T¥\DrÒòçºô2ăUûe¯x´•YÇí=f¿’Ž“c§¥¢ÂX[ÝYÒŸ~ãºvëûùò†ÑÓ¥^÷œn‚BsÁùô8a ž#\«ùœÓW~Þ=¸>Ù†aÙÍ]~»{•W—‰óýØÿ§“ôò÷ÝJW£A_¹{óÀ1Ž ƒé•Rj>Aؾ¾ÆX k°.l8m¾1T®Ïàe×ø&7§DTøߪ—5XuÈ {HêÚê›÷+Éáô³3,lt6G»x x.a{¥m¤JÐÂh_9+£±Œñ@ †@Gz‡ñ ÈÎ^ž+)$"G¯?Y¦y¸¢ACi¼Vµ\Z‹v…'}…ðœ‚4…Z:N¸Ø†‚Sg5\˜F8P8‚ð}Ý5g²CÛ †&„bÐ>paV‹‹Øç]³µw…¿„NuüJ&‡·–d”¼ïXX>’x‰£Ù Ò®(ºÎHÃW Vù‰Ä[`F6ì¨ÓëÆŽù Ú(ôQ»}ý š®ëÔ(¸!ÏìÔŸ„ ÞÅP¬¡»¦»§;¾û=aSç5ïÕ±¿ÿª{¾Òj̶~û¢cÛÕsofî»%.4f­©u¡»¹Ï«Zÿâ5·áËåÏyº”cÖuZïlÀíPhV°4“oA,Å=‹0 ÕìöÏÜÉ㯵¨£·ý{ûæD@øDXZÕ&§çnD¶É׌ˋñÃÒCŠà×î®ûa.-„ÑŽ«ýñÛïBÇQäßZ…×>õ^èZ$EÞ‡/ÛA]Þ’º‚úªâ€'¡"hÓä}3ÙÜÛe3ߟÛë°n[·ÝWLP*ú®äÙ=k­0(ªiÇõAò*Ås`:$p#HK|øÊ@€î½fà¯d6lð[ w÷hµØ.u@r²IÃɉ‡¼Æ È.ÞÌÕK¡¶©Ìáv¤ädÆ´ð nu•HÐüH¸Ôp&K§Ñ'²§(ÞÆ€œàF/ 9‹ë˜¿Á±³á‹‰]ÁOÃ2#i®áí“T¤÷#¿ Û¦:„¥cëØI=«"±ç6IZGæ_qYì• |¨ÊòæcPÆ€µtâ5#°·Ç„H¨»ÑAуµùÒñ@5›æÈ2Цå[ì®`S>ÖÝâÆÇkaÛ§=D…Œ¤šg¿¶YcÜt6­òËõ¬crügÈC}§t0[2¿jŽÀR_‰ÿ€im_=ñF ñš]9 ®† ¦É=Ÿ6j¶¤…†OÑÀß–¬ƒõpõJçÎ`UFÛ¯Y"¯…¨æ+ ¤W§krgr1ººy¬zòð=¥90:X¡”6B°`´V¶T¶bÙDò…áNyÍ[©GÛØ(`,uMܸܹÄ|áV¨nHåa»¿\eÉ-!DzwX§§ÛΟ1rZD£Z ‰ ½1ùr/Z3bÆmäàˆ32=‰<åÍb{ ‚ÉË+T0Æ÷tŸÍs S?SˆhÊŒO1oºŒð†«Ï—OM£4 ­Ãرyîà OÚmº ¡ ìýzfj)-L—šnÈ'øèâ ·Û±8ÂG—Œ#QRä±SU'²Ô“_|dEü‹aý=à@Ánd^VÂ((“¦øMøã–/ {;WÉdñÿw–_æ\ägòJëÂàmV˜cØõVCA)ÊP¦ñ–4Ó“ÕJ7‡\'¢^ÄÐÅÝa‚pªµ·cØ×NÔ´7Œ§•â”i¿Îá|?}¹OÉvKC·Y¤*öìy§KsLœ,"nxå‰f sˆnëdh¥È”­±ó`•è=öwÖWà;3ÀwÈ™£ °¶Š'3ME6¡=£½áNàKëgöñwÏ“æ.çöÖ¿MCÅŒp¿JŽ|“Ø­ÙßOýn° .&?MN-)t GUPCS…{eC¡s{äh˜Ù.GŒ~€ê¦V%$ŸÐ‘5wÚ-Múæ A¼êOã–ˆ2Í·BŸçôsfl:˜‹$­N¨¡Ý9§´7‡(.¬ìb!P³²AÎc(@®Téë‰Å¹ÍmñlŸ°Ê8Ž 6˜ªòèÁÚ-d<5¸u? ±67/ÇÚÁYÁ û“˜®­W·_ø‘(Põ»êé¯B5¡vuÜücˆÆñk*j긋öE*vGCÚO€Ø_ëú²óö€•µÌžV—»MSÙ™ªÎºCs3CØ®Ÿï~,Snû߈cU„ÔÔæ+ÝúwØ5·4›[L(0ªõþŸ ~·A¶–mÑ+Œš&Ýtëʽ‡o¬– é+¯ë- ª¿¯øm—Ú(°æö•/>»Sý©wXo­Q¥ýïR9×Ü=šÙ»ÕÛÕ’ï6Óo&#lÖ‰S‡-D=£³1Á4à›Lð–œ12éûì¼áäèW³u[5®{=_dªk_jåΦI­" ÞaP(„cWè ½ØpCÒCl­Å=ŠªRîÅ܉É2ØefÊ.-š¹ˆ}À9%lëÏêÞ•"p±JCÛ,>†Ú¬ËÂ4¨¼áWD ,-È¥líDiu¿ Àè¼×Šv!ô…}Öd³®zŽŠé¸Ôê‹Ô+8ßø '–…3¯>¶Ðg¾úuÙŸ†i’a,;-¶oN:k¾Ì¢ìZ†—õ$¾JÔfÁ_& §KRÓVÓ_£%jYñÆ!ûª¤GhëìcsïAgÑ¢ZÓl÷q'¤°ðXz‡ºGÖÑ™&Í™˜ª9TІP¾×RS-'i¢ sW«À-!ßÐ,ݪ'N ƒÞ㈊ˆ"ø¯gWf¸5¹hø”j«¢f•<‡ôë.Ñÿ¨éË™³:êØ¦Ìš uÍ:ÇP“ÝŽÁÜçëñOöU¹ÌŒl¾ÃãAƒ> zQ£%ïòG–ê)%Îúg­{å³Aا¬Ÿ=𨛠²sGd«K¤“ÕÓ]óì²…;þU%GvZs&ƒÜ¥5zA=´›~ÅùaÑ #spVc ‡+–¦¸±·4¹BÂQ§1¡»5îòôâÕñéNJM¥â&;hWlu”=¡tؾ¸ÕC}Ÿ×ð‚—ÑòIT‚)Rd*•ɵÉ|rÍðxÊgÅlØžÞ:_¬IWêT|Æ×Ïd·Ø´¶B= Fm¿¦/ ëÉ=IÓŠ´™IyhæÕÊ&èuÊsV±þ½»,¸ý®ØÉ¥;¨Uµm/ødSm W¯Ý›æÒÈVdºLÎÍô¶;¦»¨àΉ#Å6‘|qžöyÅPåG€l—šÁ´ÒyÀ÷Îh9›Ç8ÛIiç;³<—ß“oR(6%³)Ck Ö¹d…Ö’¼: duIí@ëþ)+7Ô44“™+¨·ÇJ§âÖ´#PHWeX‹¨R“?³tnéÐðŠÅùع/÷>bK‹*æ%ÌCÍ-s®r̪4X&…©Ua¯_<<È^à«:R†™á"=­AqM“OÙŸ°`td•“`?×6c0š`Ô/’³T›Œ×%¥8úÛæá‘eGŽ¡bÒ ^­RÞ·óÈfS5G¡9UÕ>ýŠªþýŒð)Tƒ0J© nŒ´|‰˜$(Kϵ},ê¸W$Ûÿ~Ç~xÿ1ƒ VTZ[Ú7:Õ&Ûší"M‡ vBÕñ')ìFqÿÃþÞ·'¯–tÃ,¯î¼žWzP½Á‚a\ÊnOb“Ò\ŸlxwÑî3޹ÚÎh¬’”‡šãsÎï—ùÓªxºÏ7FSëÝUKAC.<ò‘ŠÒÄ7møë B¯„»…ÝkÿfÝÓîë1¬j¿®âj"ýOA¯è¡ÓÁlì·Aeû‡v´¦Po¤Ãí­D挓q Ä ·_er¯¥øhùwcôYqÔÌ}%¥}ÊÇP=[žsœJ£NSNù_wÈ Šy¤óÀ†wtᣳÎ? L{ s)Õ=Ç‚ÛÜhÂú1Ís+}øMÁ»©ä@ŒÌ‘¾C[Åärܵ^jÏ)Ó‹Ú¹=KvkC'ì©–ájÝ+hVœšõÕk¸ùIòLël ;m#ó¤k³>“ìMñå­”´)o‚¬²Ž)7s¢–Ô8ŽÁ’ú¨Ïó·r IeØ^r!ÉcÁdÆjÁ÷ö*…©w`ŒU'§ŒUªL·“u2î´²!ÏäÅïÅÐXr­.ÁÐXʱ½$ƒ¤N­Ö ží·ô"&“ &q "!êJ&æñú ê5)ú$Œj7qú(j7©ú,^¢!,äƒDƒJ¶Ú„ƒœ'—ÅŒU¢É/[Á¶')k§æL šWº)úHí; uvÛG/u€Ã[òTùÚ§6Éú Ž® j ×”¯0ŒUü¯ÓÈ*Hím±ôcß<–yÛ{>Ú!(ä»ß´ÁÍ{Ò}0@ß=Ü0Bܹ%Ñ.Í“ôÓÕBzrtÇéÇVMÿÉ›¸ÿ“ÊdéE[½Œ×«}Åý'úC:ˆ¿«W è8jôOÞ$Ø9HÕÌÝ-™MZŸ§I³CŸ}Saÿq›rß׉+Xúïc!Ûx ŠÈm½è)é QŽ*E–€[N.Á`ߣ[°uPΗ¨ÇC&…U¨‹æÉ?îeN6ì«ñ#K~UòæÈý² [2iŒî”§ÇÜ” i„~FÑ5À=ìËÑC\ ?ïú¯{±È3D_áÆcôwd¤÷u~njÝÐT'ú0Ç¿€ÐO£ä<˜µk:¤×J|0ˆ?HŒR¹¥ôÉ’W¶ŒR+ÙBXóè˜Å—¢9åÈ’=ÀD–„öM˜ý>¢ÇÔ+B5 õ\ù=é†áóŠáFFÿXä]ãüN¦@%A˜==åσdé›íâ$­P &ßQüaT’Ò¼Ôû‹—tÚ—vÍ?Žã ;ÅîUÊÞ¡­ÓÙaF“äI>ó§É¹ïkò¬C/¿ûiЍr®Ý©y?x;ùsð‚ìÔ&”§˜À,}‘Mß«âì—î´§IóÇÔêžaõWPsp±¨Ñ"ˆwM°‹ Vþ“/­*€F"~¼ŽšsÐÿ˜€g?F¼é6p$?]¸v-ƃs [2]~\”ä@Õ`Ò ®{ËÚèñùŸ*I3Là­ú_D*qç´½ë|%k]µVÕ–8ÀŠD¢¤^3ŒÄ÷¸‚NÂÆ¡Évdß (3˜c¼}#¾m¤ùeÄ9åQJÿ%BZׂ`v«v¤ª¡W‹ãô£³žã9»`2.#?ñq¦ß±È«~éQ†Íu䬨ÐKÔ‰U’€Ç3éÞ¬„úåïj1:=á+tÖ±Pg™OtÉð{:W™,ž³&Úñ.~òøšzšv’ÅĦ{ûˆYÀ%»è?ØR¡Ža½ÈˆQ-Ré[§Þ ûÑ|‡˜5ºu±£¤ Ùûè’`TMÔׯË0â#ïý¥e¿kÀ2›@E(¬ÿªU}é˜'Q“•_VV@ŠÐ_Û Y&ëí°(1ÑÅô¶Çyp¦ÝÆ14è -\Í»©\Ú‰œxåZjcñÖ“î]ôåÕw–ñW:Nê—†Ç'±—&Kyè˼|s•^nO°bBqÊ"ý¿³šÇËaÎ7)ß¿¬ÝžÇc$Õºö&?Y-¢1mC)hï}Ã:: E6êš¼1º@=࣠¦î î™ßgÚ dâò“xÚwè›FãÔ1Ùx·þ³'IÈ’¼(xÚíª-ò쫳‡¦28Ì2Äåºoͯlã †xñÁÄ¥³A dWyÈ0 3/:G‹cZqÎö;±¶ºM´÷8ÚI‹ Ü)¾óÞ¾\Íb¶³Y’?,¥e«gßq²|L»^]Ø%®'Jô·3z°jœp‹±É_©¯D ,ë]nº‹†|Ç+@¿Ù¥gWò}› ™>ên¾ 2cƒÆmÜæmŠÄÅ‹7~ 0Šx‡n™˜¿aæµR±%Çþí1òé/Kô/‘L©‰û§«6v¾]Ñcr{Éþ0ìI¾™˜S’•{×ßÖ2žÁãØš9ŠÜÝ–ñæad’c1vÀzSPß ¡i0å<`ÊÐÎâ?îK{9iy­ÂAfH4†¨‚¶OÄFmØ7ðÇNJ”ÁX âF/¿:UŸ¹¢#ï=o¡m­ðpl10¤\œ‰B#9EsÜs|ô€Ò‚ßüvøË \stÐ/Î#A!1ñ1²ü‰ãA0@Þ…EÆjjëp‡”TPPøT1ÎÞÞÝ›ž"!ÁŽ ƒãK7F«#;ÚãúúÖ±v3}51?Æ@ÞÚªÞe²øÙʧ¤ŠƒFí júùq‹[›é˜è¸g¸ãßìGÃmMxæ¶“!€ #pÑrkig7b×È ¿Ç:ÁJž·wbŒÉWˆŸÁüÛMfçè„#&úÚ®®Rgg·ˆ Þƒã<<~¨šimÌu<Ë kL-mÌEœ/O/¶d†žø !ñ1?=Gèsq8¸g³h3×רb–œ Úã?zÖâ÷#9;úyÁq[õÕtQS“l,Ü„Nа¤\F˜œã-Eу¸«ÐO®ý1‰ž!`\ ‰å^ùX*k‹‹›ëDO1àžœ!Á5Gv·/?FªRð*1àõõáè%×@ àE>;Êá °À/¤I¸Êñ[¢½ÓÈðý‚¿Å„¿éÍùü€ 7=ùi â~ínoˆ­Å‹ çô,üâ ©7xæZ3Å <™A „¸ NOŽA˜½¢†ôÚqû.ùîÍÎÀ´S!Àfœ!²ûnægø1qH‰4ÊûÆÌÀ–“Szb~âuâk‡LæÆž”Ðq1€æ]^œŠ8õ¨§X69¨­°’ç—ÄL°1à[½‰é.¤^žãa^ýcl.¯/q0ƒEÁ>ÿ˜ËÞŠÅÖyëýaF2ÃXcUÊ]ÝLˆ•ôh°ÖÓ;¥'1S}‰‰ôüLLP)JÐ ’Ècn²ÌsTÚ ¸9ÃÂÍ3Óî ôÆ1T0\½1²(J½bYabdTC‰½]o$k‘41‚V"Ûaj¨¤ë± úB™«Ù¯&úë”KaϰðNv#þ‚ ]$´R³ÙÖ`nì½=’|¹y™ÄBu‰µjc†|‘’»¸¶¶ÕæR(oº$¿©Nø*Góg`”?oÄè¼ÆC²¦ÀǨÐÈè´Ïæ’8™äÑ/ËEêþÃOýÛo§ž'Y 1d8H}e`suô<[úÞܳÏ.؈n™_üX@l»/…韯(h  I‚6:˜ ºøs@D߉Uv$²)d•‘? T À ¼‹ëS´Bêw¶:7'©`$°$Fæ.È•øKP^ÎÀ^C#šÒQ!š_äßÑÖÅzmíþ/EfÚ£Ääcšg<ŠZWÕÚªsäØµã»°£Äð“ùž½<ÑÖçgØ×8g=ÚðíN~øå½4üòò©üF´‚/µuoÇ!Èþu˜Eã>3ÎÄrw0Št›î-&zÈ‘ ‰››`?A§½ÂM27üqâµr«H`[ýX~Tjz@Ÿ¾\†8ŒËÛx|4$îúüdo­ÖãïázÙ1ñ”uæB8ÀÉYÝkät”äåÒÈ©—,تCF þ><$8äwU554-”}ÒUÝèb•?”á,„,0Dc·ðžÏ­¯™!cå?Ëb»‚6by?HBŒX\Fy¶Dê òŽóò‚¼ïËb>lÍêÀ{¿œ5¢ýÍíÆÿ¸™7¥ ãõxZ‡*øHýFêÂÓp'ž ÿf“±#¾`’“ÀàæÑæ²ÿ¥(Þ2~…ÞgÌ]ò?Ë<áNÝs6y?,f7ÞŸžÆ{ÑÜ“çº3ÍÏ‘E'Ö©ÈÕ¶eò&~1˜¶Ž‘ùË{&s_‘MßÙ¢J^'¾z¢¶¨.‘§_ G@—©U¨U—}aüD2侘ïbL•È3AX²ìcÓ¤Èð¶kðÞ~!k©EÒœµZÁ{ðêOîüŽŒü 5|êVî6Oßžäqðy`zOÍšn%d›YQ£’åG˜¥ÏŒÓðJp.¤«ËRª¸°ßÏù-ÂÂ] œOåU ’>@FÈ.#ƒwÄËダÒI‹J EìºC^¤e ›D‰S}ö›.¯¥C8ŠÔ«ÖÿòÓÄ¡3ÏàŠ(þséë‰|4øó-בF=„›1—ÁÑç‹÷RÔÉà*c®ô÷n7gÌ.îü7Uó¬ é| XÊ®æ …ë+ïÔý¥”<Ìí¥Uî#Ï V‚.‰OÏ\ÞÎ}7(jìÒg®w&F`lݶ­Ð€ψN7⥲I\ãTõ<«ë€çóÏ'Gã¬ÀÕþ:¯‹iGZŽÞ-„ðË4DuÎÜ«IF#ßxÏÌÍ_Ö>N5™/âÍØƒ¸1R²¦‘ƒ@gPÄ«}‘¨›‚ö±f¤~¸úÏ8 ˜úT®$¸ÆÔÒë¯%¤«]ò O#TµÛ¦’ d$Å{¥ì”n4ÑõÞ?£¿‚ª\<åä"Î/Jàá%?°y»ñµýu¶O¨aç4ðÓšø­~­qOV˜%d3= ë©ób®U~£ }ý…ÈàÇÈxÆ|³>°ìý<]ƒsçA+É?µ62æyÙôéQVyÇ{ÂV üÞRI³jüÉÖ°ÎU`eÚ¸úf5€ênÅQéï ÉÖ½¯Ž“Råa£|Ñ*ÐÓשÎöÚ!‹ÝèÄ|‰”Àð{!ìÅl…þlˆ¬ Š4¡ BÑ ‘:Ö¿C¸@át1ZSîç©ö­¨ÊÙT‹m%#ý©ò І‘hˆÑμ9ðï{6G—òX•QRBAãí¯)ÆÒÉ< Tpä”ö/iRQÈù€È¦^åG(4g5î4|$µš1ÀMª°)òŽ ^„.ûÌ ¶ër h ˆp­Nº2X‚=·;ŒCÆ3à§@-LlžJéÏ™˜Ã&FV Ùñâ÷D¨vB**ÅÏ**ª¾ŽC7£ BÙ à÷Ç8<úðÓ Z‚5Ç+ê$ðKÀÄ1„ÒÃyèÎ{À u ÙÅÅÔ{ôñzئTág rkJm×®ðœ×éJ]Ÿ¼Q¾R¾9~Ñ`60gïUÛh@wÀ§²]…µx5_¬.³žoÈR™c<º]+«”{Õ¨®¨HÐeëÿòa²¤á+ã,3‰YÆÀ'Îò{b%FÅÄ^b/³?ðyQ›Ïhµ\3œ&ËØµ1‰ív8HnÕÕEÉ"wëôCÉäQ>Ö¼ÿ³i;‰;jO£Š¢¹àBç‰T¹·Î® Õ‘g¶ wñz®þPJ£ùD]Šö4%ð½ýaÿôÓ¦ÃQ“¡€£š­Ô]¨ÐR¶¡Xá]XÑ©L©ƒ£ºZ!œ|Ýáqb¬w±+d0Ç;§VÈ£‡WúØ´ûŽ_×*GîNà¢_£ ´óÏAB·"Ü ®¸%þà_È Ð½_}H!ƒ°}AÁ0‚/p<¸lh†Ãürx&eÕᮿå^e^á ïˆQ^á5߈SPðEørŸ„Þw:Ent€¸þ¼5Õ8áÿóŽih‹ýŽÙ‰íH&[ïÚq2üòØb@M#HÐ'2cÂ'1áW_Þ‰S8çàì‚ä€ û‰ÔRmL‘­+éé\zÿnˆ ‡Ë [ÈŽžþ{;9:”(Ê$dJˆ 6¢¡ø f8G®òä°î4Á°òØ‚ÞØ‰È˜A_7ÁöšCB¼|)Æ7~³×ÒÓ#óŸ ägñCÛ— êecK=«¿ˆÍ8C )ŠÊ&õÔöºã¾Ý‰~P§o ÞSÍ|p§§ x;DçÞ¬æÞøçÔJ Uòá™{…WÍ{„˜ƒ£jƒ£øÀ1zqw°yý}Cçöј€FvÁ¢~1èyQéy±éw°éuàékàÐWÇ”8ìu¡ÌsH8AÁî•ûßðîCîqû×(Ñ+ùG}ýó̇tóß«™é¼©õDlÞØr rsY‰®‘ gEETRg*÷ÉÔihÐ};8 …U:Úa!e;ޱɧ)ȦÙ5F¢àÃL¥ÞêB’BœvŒçªS!¥9oɓλ$|gK侘¶¯ƒ¾Zá"^¶!ÈwH†]ïtåÙ€÷øJ;{AÙä`ÓQ„ÓD󲤨V_ .Þä•ç²Dó}™²Á†öÁÝ9ä¸ãf ì{•¼x²G…ìe;¸@‡W¶Ù„î—^)ÈoÄNŸâßmD9œr6å‚RÏMàœàŠ3Ç¡ÏOØ‘ÎOÈ]J,}€…m;N1:pðFòÖÛ°Žå%¸¶Ý纕wİ&ò·Ñò8œ˜ôőҋfÂ°Ž£™÷<ê'ïô9#Óf¨€›2œt€#ŠO+·åÉ)8ú µrfÞ1íj¤tðSÇN¹ØâÂλDÿ„˺ž>Á;± îù—A»+SUäÿU_pŽ”<…ør­Çu^Äp&¸e˜íëú­J:-y‡‘yôô?ÿ5ydý‘N´«:Þýƒäsî (§[ ·ôØ;Î5Ü»…³÷ËGÿî×K'Ê—“qDö„»_–ùómÍí•õGŽ-{_=åo*݈>ª§ÓÆWS'©—…QÜìÿ–̲ï¯êqô}ª(í¡¨ŠHFíqèc×—ÞLðW vC\âí4Wè­´U(ÃøÍµ—èδ—'EñÛ@Þ=–Ä9®Šë"NuU>UÇì~¦àY ?âܤ:&•wÀ8ÊÞG8Þ}æDï ?òÈ'~ñPÚ‡øsñ…õEô…1¼mò~³îµâ&³ L)ƒ94f¶H¡¼92¦H sX}<Ÿ ÉnJk9»B2±ª\šáÞf¡n‰(ô ½ 欞*S%5]lMŽÒl6“‚6O ‡ï£~”­Õpåh)çü…õÀ›'V@¢Z«Ïdct¬¦já2Vjû÷¬vÄÂèL²{l ûäЯQ_Q„‘wÕí=ãEe¶¦ÂÑ Õêæ…õ¥;õÎ,6VêÃþ³§ª ׯ”e,ÖX¾+k:Ò¾¬Vçg7Ú÷Y€XF縉‘Â)…&ÞÏ´žG1®s3&d˜ez.–DÀ¹4(9À(ÔüýÁ¹-»¶AP8$bú gdóÈ ¥^ôðnØUÅ"Έ'ó~î< Tdq³ Þ$Q´)Åyf4.ìð>©m„‡6v@ðP¢á&ÏïôìòÏÝ¢_¢íDôÜsºh±¥“œú’µsmpFÈ›÷›!÷gÔYz̹Îöè“¿ ?ÞfîZ›”žAü²%>ñ”~ÆÉ @ìŽÊ,c×r¨ʰàÙòB´ÕbûQ])õß$‚XµK¨W ›òÎïÏë¼zžÛœ÷äs"âÄ$ÐÌ>þ±_häSàÂA V/€2,4õÓ/wL‚×%¿ûÁ¨±ø¯üiþÿ u66V–ÿÿ¿© ÿ×~5‚¶VFÿavÄü¿z±°°ÿ¯#Ž{±üß"Õÿ§#–ÿÚéˆõÿDª³þWHu¦ÿ©>ù/¤ºð«Wpê9! 0&ÁqðP\ò PO‘ذZõÛ§‡É‚Ê SHºgû;ïJ鯋ã×…èvuÇ&Ü¢Û#æHMwtbÉêcwA+®Ú°À&õ"´ug=yÁÍSÉ:A3Û!nÁÛ0Ö]wwÄòh=„³£ã h]£XA2hÇÞãRÇ©‹_šÒåF Ý%þ;–›.6¸pó¹3}»J í5J¡¸„¼’2†?˯¥ñxñbn5\!cé8+ÕUdÁ ŠÃD—ÀÎÃ,‡aMœã Ó‘=e¼kÉ,c„×Þ»èÑlâoxoâËT‘êä/ Ò‹x‡¯‡àɇIbßðW£éÝí;ÜX–®¾Éø;Š»Ãw!PW.2.É͇©°ànÜe>”€gôªÏmNÅÙ;¨ßl¡ÜUvÿyêÿ]™ý×\õÿlŠÄÌÌôïã_ÆGÿ2<ú·! ë¿sÿºþSÿÑÿ³$ÈÅ8þ™÷¯>++ë¿×øWì_¦JÿÓXé?›'±þ'¦:ëbª³þg¦:ëËTg`gúßÍ“¶=!Ô”V×ï8^·]h7ävÏþŠ4¡"U–Õ¯z®Â ÚÑ Lb"óI_ú¼Å‚C”';KZˆ®à‰Š1HM&\”L¨R͉¤Ý “?øS–nßß¼Õ˜ôV褬XÀ+(èóU”èƒÜ[é€>‚Ul£¼ÀmÖ6÷}Ñg®®©.]Ï]¥ÂG0 h•7¯²çj™t<÷¥Ë¾0[ ëQ™Ì^^d;cqê^¸ãfˆ© ÞéóÕáMZÑÂË'°hdïè==ÂÑ§Óø ôäëï#ÄËÏ3ÔKhy À[ÌéŠFI þRùrÎe~yÂÛ†z’è®W î–eûŸ†^öÙм¡GÁåé´Õˆ®ÖåtEŒ–!½Ì ê9cmLºÚ忉8jó×¾ê?AÌàסéòÛ¿õµçBž­c´ÕfÑÆ ³œ™qëi0o›Eÿqͦï¥É ]n'}»â—%b‹Â0½ôÿ…›‡*ÃÐå ¦2ãŠÍoìƒþÑÁlÃÚ– $é˱ꜷéÂu&Š «Ãŧßëûó“ãØ r–äJçÝÛ¡ùÆÉÁG¦qÈŽ 8¿þÃë/ |x¥<1tDðÀ»ƒO69ŽÒ$ÍoÄàŸØqŒß$è×2žÜ&®5ÅW™æ5à`ÏÀVógÆádêЯ±[Ë H^¢H°Þ¨&¨¹û«cÚ3YúÌÚê©G;Ï-#M´¤/QÙ`#üep&?#h‹§¬sžÒ¼ò5vÀ4©)¹ Ч†[$-ê2¹ £­bÃ)›×쯤ê<èà]4se¼è…98N2I<8: EÉ`Ë®ýšË®Öq³»}aÂM%)Ì_ë{ý^ÒâÃËÚÈÓˆø…®A…%}ƒe\ÔjˆÔžëýbyq¦ˆSJvf6QáÌNv0TÉÓ¸i=ŒGGéJ³J5˜uñ@Y| F±Ñz º0òâKƒY3£Ž5Çz2–9lÓ–‘ôÊ¿K—ñݾ%Õ˜¿tµË“ƒp{`’Õ`‘iòñ4r+èàŒ‰¬¿2ÛæÑ:)ñ+kˉ¬˜ÅZ=;Ž÷4Ž!DË&,H¬é(EzÃY ÷èŠë!Vì›FLh…ZV”¬š¨{FYðt`™HS†·Ã† ‚‚ÃϯUS¾7g’t$¹Z¹VØæ¸÷­6ºðh½½^Ò_ðbÅf“«x‘‹~X@Â}²‘ Íè\4N)~Ø=lSNÝ¥©á _i5%øðÈŒ¤ÖZkCùæ¨âbahS¤‘rËk†4šxžV—Ét6>Ç'Òóž·.½¿>¯·×Žù¾©g Ü4 dWm¸MyÉ9ýÍšh¦t@ã Ç¢d-c/šAqÎYÕ¿¥ÐÈœŸe.$&Çþx?iåG0˜8jÙea´¡#eÁç¦ÙÄHÆ2j^—B¡±æt:Ýà_“Ÿ[·núÊõöeX¿éı$ôÞ¤pã_ª)™bñXÅ…ÉØ. Ç„ :‡Fä ÁFCGs0{}Èñ¹§!ŒÊé60ÛÙ+9tœæìÐ"e/wX´õÑ™_j^\ÜtXtdÄïk‡ì*eîj˜Ww©4²ÚYÏmèaÐõ"!yu@I‰½ŽF=צ%/¬±æ·5Ù í‡d«ÿ6K¦Öh¶:o¤ŒV]JætØí—D ÁÚöŠó -Ök9‹\$)#ãvü©íKœÙEàyº£ÚaLžºõm_u@ëÛE‰`¢*^Þ'i˜™&ƒ¬—ü ºöQׄ’Y»×BßlÔý™w ÚpªA$kóó~üô4§Â>»=×Gxwpbw÷»@h«Ç}БTßë}x•Ô¨‹Ýx¦Ï-Ìú Îô•ÏÓnÌô­ 0wŸ|Ußê…ËXpz¿TP#¬\xu\p0œ‚ÐèI¸(\§×¢Ã¾5Néš[§Iæ7qCrwT…z§µ“Þw4 ‡ëgë ;£o8‚(Ag<ëmÔuF»ãTGƒóܪúÞ¿gd­€yþo©àúi7x6nqÞ$Rß‚œØ‚ Öœ‚fZ“˜Ö‘ÍP}cârŽýŽÆ6Y ; àÞ¡L¼ÛÖça•Òu¿×v£î>^”Õf­éùå²ÁGóûƒêƒâ;«‡l×íµûµ“Îç·ñÃj ^š[ȪŽÄ„Ð áI­S OÝ#š?.…7ÎIcìb’"ƒ0]¡ߘN7‰-ë™øå$(õ“tÁG²Ú؃†¡ùs‡¶ˆ`³~Ç_î,b‹ÐËI.S¼<ÿØê>(ÀC'vØ¢!ˆµ,z¬O 2BXvÛ›¶ñíÖæÔV !Ï_@ÛoÜ·ZÀè$4"(‹ÖãàOpÀ<¦ì9¨†4A =~ ¶ÆñÙ]:ü¥Ý]ºIä³Ð/gš &“ßEE¿\@fÔj’]>6xäÝácFm™Î Ö7âN¢yq_ zV25µ„HxéI©€•ªæ ¬ÁoÄo$ Ò´Çé÷w19µpL_nÄR¦×~C§¸Ê2Ùê0YŸ#ÿQËO=JÎ@¨»%¦¶Š{âoýý›`mýÛÃ"L%Ü?’<À) ‚©…yã-AvK%«Í꬇¬ß³†Ó!wküÕø¿ið4M°Ãj ‡jáçÁw "ÝÏ9ª˜*×ËÿzYnù9¡ …â}|Ù]&Îzý¬Ç®ÔzfÉŒŽþ|<ï\ZÔ°˜–Áð;YXU$`¦bîÆlØwÎÕØ²]Á™ÃYÃYÂÙÆÞéú}2ywµ÷´•>²Ê$ÞXïÔÍq¼—ÿj©w?º-H¾~\Ëî~\àéîEs¡ætô>¼m4&"¡èO 9Ït@† mé¸öÕŸ%ÊŠÚZa²µžÍ%$4áP»xôŠæ ²æ5¶„oÈe£RV‘*…0bI[—Š@&ç'A>ŽPÓ@›7‰ÒYæçLpROr¯‚;Á•ˆ,Ïõï€öÔÖ1Š.Z¤&¨`‰ëo8éW1Ëd’)AF0Ž—iX³C  Ùq"•àˆ„H 2¿Æyâ·ššX–&zMs»ç˜P؇Ӯ›R‘¶‹#Àb²Lu1XNAðd¿ò-”£{Ä<Ðd;ÈÝRzåÌôf“U*.EaRÐé0£–<’…ñ°Ý..Šðe[¥ÑޑӅ™zì¸6Šâ±úD­k‚¨#H„a溕šoÃṟÏÏò>m1½aìŸ]Ÿ[Çö¼)æŸW~ϮᤪùÎVbáÂÈëqF¼@±cÖ7 ag 1«LÝ™‘¢;é2ºíÊ-™5¾c€ø ™YE©™=å3Ô3¨Ï3è{IU¡GeÜS0”q¼)©Eª[Õ»d¸ÂD®`±”‘+¼Ì®Ž Ü·æáúýIU/@¡Bêî¿ Iâ"nMâ›úâ¤Ã}]›“C•”êË?'ØÈ%|ãzp‘÷<¿P†ëb÷ž\ÉÙMÂõ¶oHƲ5s|ý>ëf;ÁƒgÒ“é©uyþ 뮄Ö;“’­MkuiÚÃë4œV =Héû$ìÿÕÌ[lkh*a¢²ƒº#Ì)Öø³P±®ÉìFÔg!†þ¾Wmys†þuZ:Ë.šac›hÃ×Ýv€x`jGÜCã\ †Y²©P(ÈB,ÉNª¶Šˆ®Gÿ9<òWTX$(8XÐòî•IððŒÇVdL â¼`ƒS' x›¨S°ZZRrØHj‘”=Æ i*î'ØÆ0êNkFµó’ó»è õ»¬bÛy±¹6B»+jgˆBYͳðªdIüÚ ¯ù¶DÔeÞá‹øóf ¸ó¾g³v(¯ÒeO›n”/É j‰ €Ï‚[2Fâoˆ†%ùÜ{ ÓsÏñt\7Ï;ó1qU»ïŽmí×[êÓëÑÖG`è«·ôÏ;ú[1ï_‹ ç½Ô“Â2mx†ÇIZéa»¼X×O(ýïEp(TL§‰ù?–î…ºbÈ]2VÞ‘šÂg^21?ˆ\‹/”œÒMeêNÖfm ¤e1j@BN4 4ÀfR}éú¥¢Ð0¢ öVõCÒXûÎkMÌ;,Nî‹¥å™ÙUŒmJ@³’‚½–2¨jÄpXv.MæÛ%¢­Ö:å‡d `“›¹kYM%—‹²šŠœAQ޽ß×펽¾±>k’ž…„±¤d×|UÚÝwÜ–u¨²Y<¾\‰(+xÐÁ Öúq9€N4ý€œD¯AD‘Æiò)D N\<ÞÓ3y;&ü³LÌèØ©M6//l™¶¾YM¥ѽ:î‹õTí…Bbò™XúÂãZœÌܹóŽ­ hîà D¬›0UÊsç꽆s‘ e‰Äò°Æ`r·þäKžø8vøœßÖ`—»>ø9A^`…=1eÃyH0õ䨢PÄЩNñàBU ¾ÄăÇûÀÖ BèdÆc â#ZÖ=´³ôÛìš\ªl£ô³jmbÅg ¸K0“AÉ ×pNÏ!òvþ8mÅjÞU>Eô ªmªE¹%¹¥¾rvâÊʘ Ä5ª|sµó¡§Ç\|ú.>œ%ß|/¹¶8?Ãñƒ;Ç‚+×2 Ç•.ù/6%ã€sG sE<–™ Ê2DBÑH,W¤ásMñrgx0ùÊá–Ï:1žò4µÉÍP?$"5œ4—âãj“ÕI®.®¨ö›æl¯;Agr1kfÑËJ3$®¨¥1ÕýѼë| ØUøt³ªW‚8 *2c–Ùí œcl6t/Fp–¾QqcB#"mŸ]À|‚eJ ô(|Ä=ˆ§{$áډšÂ3›Ô7œ6€9Bò€­üðo:h•8À½,³Ô%Zcôé”ò|³ÅrG‡m÷ì»kq± 8Kv/xÇên€« ì£_×C É©š`õ÷ˆ™a±RÆ¿z[òTš!Æ€M½¥ÌºÂ¡ÃCÌlgË¿X È\¿ircÜè(¿i˜­uºPÈKc…GËM¥¥9#qã›]¢°‚O=±bëœDwbš_{UZ”??×ègò?÷úMíº©6#Tø2’¿WΚ¨èèx—ãÞÑFl™î&eB%Zƒ%UÀqØ›$ R–BX‘Âùqc XH]êÝ‘c2¼G E £ BFkÒ¹]?¡^\:–x>W8͉/­ÔGÕúdóód»i¾ÔLÍ#Z‚ûÕ½Ä줊°C   BÿÈÕŸWÜn¯W–·¸*ÊAÊ ¬U#SËÌ™):Q| "3ƒÈ"¬M†P, »G˜D¸DØDøD䃣ìæ‹é©¡î ~ò6ÁÝ"½Ä{ðšçvÉ6ÊvʶÊöÎÚã „ ʉqã-`/à.à,à1fý%»ß'æ ÈI’™ c!k&s&{&ƒq߉’ˆ FD<ÎÞæyÛ„‡QïÛ±-·EÀ¿©8O'é÷§ƒ¸e•&§H£7\99©ŽeeãûxÆÊõõùî°sö²¿ƒÐù€-`;ãbG‰<´·àÒXÇ[@qcòA¯íÏ$P­ :²§‚x];2^B}PLÔm¼—WÔìÚB]ÆìÄ$GÂÆªd ÕîNÍ_{êéÿ t xí>ð›’vs¯OswdzÄ(T[“çä•'×l5íˆÎdÓÎx8›R4U„]ù¹±ý}B­‡;7}…ªÍñùX›õ}ùÚßÔŒQ=`ǾQ‹x^ý…º<#Z’f“—¥~”x)qæ¾ã†–&‡´‚Za»„}k¯Ÿžð/k RqW½kD€€ŒgŽ{[Ã×¾² + }V*ê’7%#zö–ö]LI‹‹ÛyÝ(áK sÕùX&¹¥-ÿR~÷'u¹ÙÙîrýqÏV–¦;í -¦!v9«”ýúh"¨-µÞþÔSYqSÁr‘’ßù=®(ÅxuœW©ׂe@1mCDˆ#æõ-‰Ö'Ä„ìg.epëcÔ×ã„9w«A]- êrH¢åæGë #{dcËÎ 6cGEÃõû«¹”çÃ¿Ï ÂLK/-«æ*ÖèM‡{.üsÂrȵÃ+XŠK†ºûzÂè‘ó¦uï †–Íãù º>³ß;Ú©:¾Y—Ù$…çjcÂË#ê ƒ»ŠƒÚŠ·—À œpógÓJPöÜ®ó))=AØœh|LGÞ¿­ÊñèÙÛ)à•ç â‹qÚ'ädé6Ã1öi¯QeLbV@Ÿë¶É˜ŠY/\DHwE۔ăƫ^YöiŒ_ðI=ÌlãN¿Aì¼I®A-¶"Qá tŒÇâùüú¾ùÕbýþýþ0-äα$Ïú‚œ!‚3ºj;¬\Ãå†ÌþkéUP™WÌרקçå%úâQaÀ:žÛ×’x‡_•àÙ;D%±U÷“¯i²é–àkzN©q8¯GHRk‹î§|IÎmWìÄÕ©.UH•×{°=Ôj§.¿Ý}0dú¨+î{6§ºØñŽgOׄÛ–¥ës ­s×7öÞø)èë,ÏQ¦%Ø2–Ð0ÕŽ“JTXA¯¯AÉÏäŒeŽO¶ãªO‰dç³P»_ .N¦üuõiåÓbÔʼn_2¾Ák±¡¶Nl Ô!K¤p¼ÅM g¾„-Æâ)¯çíz;Š€|æúÃÍ("Z߸N²°gK¨³OÏÊ\-¨`Ÿ™=ŒÀebÕÖ5:+ñIժϷþËjhôŠïýø>w«ëâS c™¿ÖgC跆ܶ4š·«÷;l¹¢gVo™úáÀH3€”µKoÓdáWihúL¾ž¦h¹i/ñû€ÅÖ(¼¾ÌÁP7è{ý•¥R`Øs]ﬥËH71VMäK™r2l•¯¨{5Ï TÚ7˧íæ~”@ß&EÇ/HŒ¯ —=eîÇV ÚK'¼ud§æ…Dzç|ÀݺtfH Ñ&Oæ×ñ¾^Qõõé÷>M$òt_*hoÈ™¤ÍÒÂìɯ—Þù ,òTÅÞZàrŸ\@®ôQ¢ r¦0ÛŒµBlt)p¦Ï‚­ ­ 'FÕÏ ddNö:Èk³Íe&˜\$§f>ªržéL¶ óW+^…6Þiá¯Lä½Aæ[z“ÚG 6ulÜëxïrÑàhz»Ò£îô5žSqG¨óí…ðweû ÆàbÖû!ê¡;ßxíÒ­;í8yì7èÁÖ>ñôüßLOÌ€ïÏ£ì52.ÐIY¥Ñ¸|ùpNÒ:^QœHz—JCŒyRgêÀ—2C áÙJCšQ_¹Qе͘o'5£­0Î4–3¡e‘ògèv_-$5¾Êaô]¹QDH&µåú¨ ÒJC «»2CšF­æë::_™Š3{å\×XV¸Ò”[È…(C«.((Ž’KK¤Tö/ÊÝ6­S¼7£ŒK˜87+ äqK„/Véž8¿ö“7f?»/é\Ê›tXn“Ü`z^·ò|­HÏX”n*‰ÅÊ\êãCDu`i¤ŠÖSÁ<»„6Žèô*٫ñqg*³0/IK‰«÷x¹;´ß 8wžxž÷Éú²‘¼ÿj§|=ù|˜zGOÚ·[bx%7î@<¥,mZbeÅ7ìœaQU\=Kf‰KjÖ¯²R8ÏxÀYÙ¥²­i7×lð—¢›ÆÁ(Ë/hCE­Sñçpþ1ý£ì×½¡¼_2 9k?.TïÉ}™ŠsFë*lÝØÒ¿Ó÷Vî¤ÌÅ3Ao+'± 3yý<ó¶Ÿ•èaªÝ½9¯¶d†M ?­¼³¶wÄ ½WÔfAU…•x\BéjD¯x¨ÁðæQË `]¶—Î..(©½ª¦sý D“?dVÒv‰‹$0G«§Œò »ÅÁ¯9мWƒ¤¡s\ó~j21ò¡v„…c3|(£À} ÇI…¹¡:º¼|s,•éZ=8|t'eJúâðÊH–Ú)™Ì+ZæÌæGÀ½£l’Úáƒwǫȭì_¹U~ƒn¿Lù"ÐaÙÏt‘ŒéÚýX@#í<#Á/ñRÏòæÝ._fÏ¥•L{bõrjLJþG£s°¿øµ.luýÒ¿qÄ“‡ìØÎCKÒ‚ƒH²"ó¢‚Û éQÁÿ¾‡7†ÛUáÁ#{°’Ùóy¡ÌÓËëÉèÅ¢2y_Ìþ"ûJ7Õ^õŽ^™qóMpÂçô‰±wKûöŽüÞ{}Öì—üžÚíP¹Øßr©ŒÂU›¡+ZŒt€±µ2–å44 kXGLÀ„®Ö¥Lçà'¤¼\ÙNÏiv2îòÕ>tAíRN=/Kó ›Ñ§Õ˜çÆu{ËiJÎ¥8Ã! O/»Û Ü" ô^íòf’`w·4>`Dг’ªÛJzRù!r2i¡ «#¤]§\AK™‹ŒM4|–:ØÝ+‘¬×Jc¾Š_‚jò„BØ]ÂdŸÛXÄG_&!«µ¸µI‰‚RþÁíèt@ÃGߥ¿*ëk \õŠŠšRòÁô$`¡°GY89Ws"nzŠf<tqªê+à«Á]\BÇG`0LŒ)ù`*¡áëJ$Dâ¾*á `A«/È êêQÂOâfJ Þ e–j<—³š‚5¹ß AA%!=‘‹©ÑHñ°ü/­<»£{ÒÖº•ãyÓT UoUS1›#i ñFº†sâ¹™Š¾Œ‰º|~B2T‘ Ê‘‡;|ã£))G:SéIÊ ’¾^û÷…VÉ êIŠ*1Eñpûú»Õ)ºñiÚø<Ϲ¿¦leu̯=êuìxõâÌÛ³€Áã z׳˜h}Â&ï’P’“žëÿyƒÌlâãÔãtÆÔGÉÈS9uãìPOãQªe“VC+=ç8†]*OŽLPÿ (ŒŒ°éµû×”Ù­°Ðÿªne» >›Ìß´ïý±ánFXí Ý[„×ítoÜÚƒ%— Žö R~ Õe$3„“¤€'²IQJéäæÞÉÇ^i[þ8­WbX[áXÔÙäö¶‚…¥ÔÕ¯ªtíO9~®£ˆèFP¹Ô¦ZNЪM©â"ÑSº9êÝ9/Œ8Ö8©²‘]óPYe'«ÙpõަäTÕü«Kk-—bw ¿¥ÝKŒa,”©š!ÇãO2wņ#„bƾµ˜×/-l¨§ 9˜zäoËã@ÈU! ^—kÀ‹!’>w¡:Ø]—:›eç:clçüçˆížÜnNÞ=y 9å6 ¢ky ¸‡ÅëywûýVÑ!:Á¾*o¾ïÚl®Ÿíþ­îÍî>êcg«ƒÝ˜Õb Ömàe¹°¸î$$p©wámûî3ç»·±oÁ×v’qí×¼æ…æÿjï/À¢ÚÞ?pTE@E¥a$¤c†aN)Eº†înPJ¤»%”)%¤»AB¤;î à‘ç|ë÷¿÷>÷¹gëì=³öZëíÏû®=›=Sl†lB˜Ê‡/¶3¶Ai4£·%ÞÝ[2àe‹võ ç\±a"ý¬x{º¹¹$óÇÞ<«ŸhÕMy‚ï7tJBÒøº‹1u‘¦_µ[X¨–в)ßäqÈÙcbÒcüÙmöÁ%¦à=&ÿ ÕLaŽ"Ààüw[ÌéŒ+›}Ïvîlç†ÅO–öaÇ9ÑÀdšk™öìÒ>'¾Zu µôeƒ~uŠùÂ^üé¥(Ùt^jXüË=e(‡ñwÉ,1|·‹òLÀ ë³ZÐ-W“ɽpƒí/ÇíyÔÔ°A¡ìšäË=° |ɸ¬ùKï£'6?/%›½“l¹óY„Ùz·ûE±Ê{YT¥£Ò”Mý–Fc•8aÈИõÁmúéÝôÕ;ŽíR×™»,M7–9îíÓ#_yØùÅõ’tÕŠ= eÔÛ¼8^ …œ¶ø°TF‚rSP/âS'†ü0‡ðú L]u®·ºÒøî¦€JÕîxŠš!ÒÚçÏ/г3”»WVÚƒÇÇ÷FaO=ú£@s%8%ÏèÕހ´˜R´^E·]…äo¨ìE¨Fîj¥IÏŒ|ŽPÀ{öreCÌåì(}Nñ[ö&dÊØ ¯ÒÙT´R†¸Z6É>ä!»ôVD&%»³>ÅfIP•d³½ø©p¬3uçgåiÜI}“´Ÿ­Uêì¶s&eïX?Lí)tÏw£ON:v£WÏ+­¬Û§0š,e’J€G¡¨úNаoU[u£)¬µ[ /¦1P~f”­ØÚEBlЖÒÁ?86‡‘¶e&ªtݵ—»¬q² íhÙóŠàÄùïYZ6±*1/¢ Œ4nÙ ?¾Ûº‰Vù\†8f(ªbX®¬c˜Þ//Í—ÐàcèÏTm—M”•Jì<âXÁ¥ÜsŽ·¾²{ÍU¨&«q5‰¶RÌT›•H Kœ~.ðRæö‰=Ë®uU$¡EÕ7Äÿö#ß[ê~nixH´ º¬»ó1•ÇTN ª¬eñ æìMŽXá)B\ Þ½º_¥¯Èy<'ÎdB‹ËëGwa¼ªldÊÙ¨?y¥ð †Bµy«[÷•âé÷fF¬™M¯ ’‚™7:hÝT´9<ú¸jß„u2ËÿLcÏp”h¯ëréP®óñç˜ÖàaPqj_ñG¿VŽÎÇÙdcM(oѶ}²Z^=~±x¡cQ>tÏoõ{‰\¨rËËÞÛªosÇTÑóñ¾È|Ï𱬢 yç/¢Ì÷@c´5½å{†j‹· mž9w¨Ö³0U®ðCÝ-i†«8Ÿ…ÇVêDñŠÄ|ßÛØi‘y Ó‰#i_Õ¶à¼cަ°×IÚ©ËK¢Õp)CËò°ØbÉ–ò8; £Tt¼†µÝv ƒÄýr…rö»+*hMÈÛûw?pOY*ä=v0w8™ÇFFÊjÖÖ -ÇLŸzÏÒ` ‘G3uc:ÜKYV<-×ÁlS¨Ü¸ñl^g«ÿÓëçýfÏ: Ïo“… ‹|Ä~£4Óy(vá­oódŽèæ&Œ>:½0ŸtÝã*©yÓ(üü#²e1÷ÝtÂÐMý µ‰ÅVµ© ší´FgÌìI¤‡ ‡Öꌑ,€iŒô²~;Ϭ¤lçòŒ#t–a{Ù¶WFˆ++±7­ÉF—8io.¸•énmºÅV0|UzóÍâ0¿²â¹¤'˜PãûNkͬ…¡ŒíWã î€ýôçtû¥ÄæŸïelcqçí·ž(´ÔÝ4«ž4”õï~{(Ѱa{rÓ1wŸö ×òyï;]:œ^¦yÆ geõ¶ªó½×;y(œe-=»÷Êý`Q¯—Ä"0BÑ¿%(5ï½ÑJ>\$|ÁyÛ¸|ÐYØYDþ«GÏ7Ÿêªô»·Š³}«fóW£¯¿""‰Ú_ZôÈà&Ù#Ú1z¬ÿÕIÜäS&‡$êš;ÑiNµ9mù ìHg‰ëå'X!±íýYî£11Epà`†#…š´­ïþÛŽxg\çÍ:2…ZômÓ‘pÊa£û^oH6¥(d*€€uÅt§®àÎ,IQ}ÁUµàý˜ÀÖÎúnÀj‘lݦTGçʆý`ï¸îÓ†49ÂŽ´¬Io×ý@ÉÃÛÔ/øÔHâ¨KÊ‚7Ľ sFc&uF§$)ä–Z½OARFB÷‚§a¯j²\vuoY¬2mÒVéfÇÏW ¦rA¶”&áV’ä8Ô%ËåV/GgìEG&‡Ètf•ïìX_³ ,Rlˆj–‰ŸÜèž‹ª¦LÊG/RZK[xv—ñ5P¶¥yÍØæ'\æ¨ñr}Õ|uUqå‹y†•Æe( ÛO©(ªj@A4Èœ‚C E+J*ªg¼Ü{#x¿š÷-[}ëAìT a­Ë&jß'pšjç;aòd¿î~Šng=èp’µ©CÒ#Igu"MX„[U<–Xð0(ð ˜CPd¯ÂìÝ0ï&§~à¦{¥ø$÷º#Ås—zbQ›½•‡A)*Á óTf…äs™+¤L^_΢|DÙãíÇ­B¬ÆC\äõjçÅ ¬â€Í¾I´ð%s¡v 0k½,’þK÷{¨ú()`¥2\÷Àæ`£D­ ”¼ü€À›:«ÎT ’ ˜Í ˜b ì¢=ÙCbUú¬„ɱn-kôËû ¢¢¡Os}kQ’ºpyð2å–S²âŠÔP·kÞ'Ž’—làî’ϦÊJ.Ô —Q5ð0 M @¥6O x¯¤Õ`þDÄ+è6*ÅÝÀ'È?êÉCê°R¼¡Vh¨ÞAÑOàVøÀ,Zòl8àÕªxé"Üz“Ô’*B¼!³ÞL_Ÿ:)98ð¶è’’¸755©{pÓ—¤äˆà;„” €ÙXÇú 9÷ éÏýX‹hjµÑ{×É*ºŸl 7É ýA±Zº)F7–°ŒXcÂCðÚ"²ž.ê¡úkaÌHtP¹Aƒw’´Ö( 1Êîé<õa×÷x<û™5Š¿,,°ásõ¤’ÖmŠ-t2lê†KȺlõní·:Üð´8(&„E&¨DâxkS…iêúÍøEð/¥J3ãû½¼»EŽ,D #WÀ!¥ ¡L ¼’"Í«%ãù¹þADÓ“'…Bo2aù”€`ŸKõ·d™•én¦iáRðS‹ züé"Å<®ú_Gº’l':,ýdÈ Ñ‡4ô]êmyd U›ñŠõz³Û¼"l°`·+m·osˆ¢5Vñ> ¶»s9€Çå:é½>Ó„/èQy¯¯S…7}™ìVq©”F»¸¬:¼³7KáL¶²5ׂi5Fé_Š#ô7ÂÔçfð ïmNŠOX‰à[7)˜ãC©^]»oáß;¼G•xÛ¦‡IG7|Ç>MýÕµü|-„…åwóúóµÂ'7Úòé[L,«m{ 3±751¥?¾íö×MÝà?ý€-€ž°2± ÌlwC7üÔÙºAà³7tŸüËû¹Ïÿr-ëÅ_®eaþ·÷s‰ûE1ayüÜG#)ò¼k÷X]ÎÛ?·Aè[‰A!·‹K):éï¬V¹Ö!;wyÝn›•¿‚s¾fj¾ðÖ‰uúþzûhíó…2èN#3m*É'{¼ÙÒÒ§¡iC¬{ó÷£ë6Ø+ÓJ I¦ô;ý(¨+kGêî+7uú-}ý¶ôw|àjJ­¬]?Œžs4Ic\잎Üpœé\gâŽ}ú­™Ä¯ ='E²yÈZãg\¼m¤rÇWáç–øÿѨ²ó«%§B }¾¾tWù½˜6>aߟ”Ð}¦ÍE’û–¯L[г|ÃÉéÙ½¬ØÁÝmßLëÂ!±ùŸòΨNºûwËt c Õ³‘;+:+…^ˆ9Û£vVÖª†Þå ?œXÇa“ß•SZÇ$;RœxÃÝb2«öŽàÀЗñðÎQ´û!Æüé ƒÏÙ3ïÆüýôíݰÈÒí b·Ã”%Á#´QéнÞÈuÛbÚ„ÊŒ|Xh Ï||ÑQ~òì0Õ¦‹„g‹ˆíêÑIDíu…Ý}”R‹Ž»áÐÿòtÿ#þËÛÅY~ß.ÎÊw»ÿý…öÿñqûù…ßîe½ðÛ½‹¿Ý ù÷¿Ý f‚0Ÿ ®…r{ÇgÝl[\ÓC–­1ÓÔסÈÚc þk–||—Ȱ´üãÜ&=”ݛN”%7Z11+^‚üõå!ÂÂ’s¯ eUß$ʪڿx>9òs`/=}±èç“ó>W¥Ÿ×ôB'ü_j ×þá~l>”ü[æô•‡Ûb¹èå„®ðõõ§*î /i¹HÈ% MÔJv7ù îSß‹ Éèc~21¬·º—ðŽ[ÿ:˜…Óf˜›/oé%Wj>OU-y'¸ƒ‡r?=_|XÑ«æ¾W_™ÿ>+ÆeKÍ—‡ÉW¡o]3°YDçÜ `©C²/)€YëaÍùÇyK©š?W’ÛzHòn°QÁ3€´JÆÝØ!º¦0^Ãx¼”ž`_½òJ¨K”½¼JY)“µ´fÖâyYEñ¶›ˆW.7µt`dhl@DHLPT˜ßáÑ&!ïÊBÿ›ñ™«Žob _ŽÍˆ( ?]¥Æ¢½¿™Î4#²Ž!QÇŒÿEí>„ݯ܎9ÒÕnùQ£Ë«%AŒük‹_Xƒ}”õ€‘®özAîKŸ1…z¸yÃâI„[¿+%îÅJ¬3³c‚¶±ý¤‘¨‹8IX»n{XÝB$<Šùx³íVVÈ6 (ë¬ûN–ÅîN/M}b=¥‰fîÍ|¸’´:s£ýöpÕO¬·b’I¥myòØÆ>íäÁn…™3ƒí˜R4ï¹%«£s—™¸ vÚ­gX­H6-‘¬wñ¼Õ‘ÞMиMÃ’¡z× Ëõ<\‹…Ø¢½F°8ÝYêßvÀ$`ëת-’Êî;è¯sZÝe÷qº9ŸRY³ÿl_uìý#…õºvœlNWëmèþ+)À¾ÙäÔLM{éÓv­øz\]äOwË]TÜ%X ñýz÷}öi£]´£Ñî8GÙ58äÆ>GúQm0¯^È©™C}KÈÛš˜ÄÅ=ÿðÙ|›¹ìʨû‘z²WY-g=ÑäÈ$ðˆ­í¡Ë¡ái«k›k?Û å]×ÌoVÜ+SÙ®Û¾:KÀhGïL{æ&M’¨xÛòÆé-G½'ƒpoÒMžÁ¸D^+¬ƒ‡p=€°{ÒÛјTaN89^q"˜ ²É âujižXWýþfoßzÿçö–Û û¢†+nÉGJ ÛÉé€Qá8¯Cb­º«i‰4ñ3Í€»~‰Óím{¥3/Öui¸ÛެÊÐÕ’¬Æè·‰÷-÷i÷ŸïSgÞÎ%gŽ‘©N7NÆž±ºQ5„”T‰RYWÿ|òŪÜ*”ÒLdèN¶ëóðùibÍ!á2wÉ–; ûŽNx$È-w®å¹¹WkVcW¿©Æ—*Ò™½#_&8”|PÐÀ-(•LÑA‚wµ¿û¹ fðöu‡«“‡€åïžvgO¼8Ý—7¸êô™~ÞVvÏXQv¤¨À÷\¦·ÞªÀä~MR%Y…¦~SÀ­D7À"ןÅ«_€5ˆÄßuù¹[™èȃºýð’¨f CZ&E>¥Þ†î`+ËùqžþÒâC†CCÔKšK,fÍÍ’ãÏ·çwövÊ÷y·ªáöÝ~!`§Š{ωQ‹AïumšWï¤~v7GêN^ú­´×àqÙñ•Ê‚E6bÝê¦^¯ì`˜¤{¯]G]$ùy¯LàÉÆ";Ê_+¯”ÆÃ¹\®ÇÙÔ,RV£ap§‚d¹t%/ýÚšq ÐùÊÍïÜØûŒ 3ôÊ~åûªÙ϶o®$Qò,ZtWЭüƒ)&ïcŒa®hÎðûoÌ;-ñI~ï´Ã¯¸Ž3qsF||üÞ¸x:Ëîµ±·éï k:.&SQmà#åã·×…×E&)Ë*o­wb \'|qû… ‰°ìþ.ž#_I/â!ψ)¿Ì|w×ॽöøZ!S`+Å;9ò–ÔΜì®qׄyu\pÞ½¾Æ=vt›þíÔg Wb×½¯tªÞî`%ORMê/ôͲ·Q Ì0lcîr3¹’e+—Ͻ+µÞ3óI`—þ»“{ÞŠ\1¦Í,õàDµšŸ13*eD›c™G1w_³$ÀÖ©7¿ßQ¾´7–ÿQ¨ïþ6Z%–SZz׈ñU¯CУ< Ù¶×N~\‚jêô˵or¥±¦Þª¸“EZ0$-óòE:ѦûÌ Þœè§ø0%Ì*'.´çè3^íŒmæ?Èv]›0Lˆ3ˆ_³Ð+Ge³£%v |Ð61óᣠv¬è'²ÃÀuECÜ)¸ânI=ü6rÓ!á6ãËŠ ¹Ä¸0´÷U¨.HžR×õzD—F]Fú^é|'ß}Îð$2ໞ.°}ãÕCfFö·’$uÍD©ÔZ •WÕïéÈ×]³ßI§²@n»é¼*(ÚŸ,'Ë\PL%/Ïåf„P>¤‘qÒƒ) þ“aØ”_êvʈ‹g?fÞËkÐk›'ÏJ¿e÷V=˜$õŒq+­gkíÝËþ±'ï犞UµÙ^º•ó ¿$ËFŸ»â¦Gð¬èÌ{ùÈ:»†?œ¨™ÈÚrÔ}M•¸?iÄ8ž~Š!, }l?˜˜rÙÙYA9¾0ùÐɸTc8^. §wR‹ä£¼¦§:s†KÛ­k¾ÝÀ×à4©»šÁ_­„^BXIè |°xʇ6™ü,s×8EtùñÏf•2Ò•Í̾¡O÷î½í_ôâ-Ï•+iÜ¥ÅɼôíÉò\Âw’-˜Hq£méjoèê<¹u‹Ö%©NL¡‚Šòy3VÄm½¢zÿK;R7Û l‰ýG¦¡p%ÇßõŸ-V×§cLºkŽô¨¡ñý"åF¾Ž]~mIxaÿLr­—.yjQì›qýã¹­F=~*GÉìw2>¨â¦«~4zÇa©T.:8'ˆóCcãiÝœLú¨jNð2q6Ù|öòš‘ 3ÓO¿sßÄžKuÂp ¼«^ò~ô ÷ÃÍìîƒ#,ÝG]<øÞ± òtxÂOuû¹éçÇ÷×߬â4·|ôuÝY}—ìОke ¥§§-¢PXHØŽ),³¼÷å¦`-”É»ÉâËav¹„Üç[—®[ÕàÙñ¡³Ç{ `³Þ„M ª„ëeÈõ†==Q —Ÿ69í 4:ʇê«þÕû°Â(÷¡ÌfHy>´—?(s a§øÓh…Ù,,Ù7–->,M"ë«õBZRÃmë®»¯fÔi°Éÿ¬n}UD6>MšFöSøðöNÙä+Ï½í šÃн¨Zßî(|V»LœO„Š9Ðl”¦1 wU k'¼3+ƯöêÆ·k*ö‰¦ŠRmzvI#É9XZ½Ë6ß =}óô}Ìn[´¬åýẢ¹ Q…Ÿ“-l9ØR¨AO\Yž­½’(¬ 3ó®#Hî"¨Ö÷ÇØdnîÿÜ+;žvU;3!²wåIå ›Ç*=C·ËƸ[ëË"¾ùåæ²³O+¶Ä h§Í¯j¬×Þ6¼ž; jDqéõƒFô×85V¥†M䇹o®÷^2åNÔÅz ýÌË‚Æ]O‚‰”Æ-ûXÈ0bXc«ºqŠ)5Ï ]@2ŸÑ³T4x Ïbþ29ŸEV¬Ýâ]÷²¾Ç;Ø›ÒE>#?ŽÏÇ®Aê\ɃћULÞ°+¬)Gß÷÷y¦ ‘±0³w]HîÈã! Ũ[Î6ïG©õ;ò¨”-+‚&n=ôꜽFžÒ· Øðê–—˜a¯‘Û÷LvNcî» @n2&¾G=‘77·\CVCV«qëpx)Y£íôÔt ²p ÖóŠ˜>Ïúͪ膚Z)‘zÙ)ÙʲuCºó ,CUÔ$oÐ's@‚›…Ùgá _Ô+¿n>2ôüÔuæ÷¨«‘/ËP­˜ïJËèná<&•ð²zi›ÑþÙéÊ,U,s Uxd(þ÷L'p~ŸÃó9¯æÆÐÑŠ¤¥1p˜ —®ùîîÖ²ýos¹Î•˜ƒ»¹:t2=Blfr\!›C©IDBôcjo÷•ý³£Ÿ•ÈßÂîõødÊn¹Jï;%ÀíñúàÉäb~ý[/ìîWý(¥3±Ùæ–çú(ú´ìÙíÓ¨rÚ§¦’[&£Ä£cÅrŽ•oAz,BibÖ)úÌÑ#ªû“ì×rk——Unˆ|QjÜFV%Vkãø7%è*&^rÝšÎÝ ”ï‘^ÆArûtêaù(½>c@¼v"à²wÃ]–í)TóÄ6[˜kÀZw¼Ã&û/ Zmm‹J EELÊÄšìFÑñþsS s-Ú_-(ßñ^¹òLVÅÀk)/ˆ•抓Ó?Ìå¡d³§´…ÕvD±Û÷ÄÉT­'aÏ”Pq(„ ÅOú,‹‘ŠG_v¦>¦¦»4b‰6ˆ‰.Ù©wEä`õSï4Ž`üGê4÷$<߈ ®“zë”Xòw++‹N>-œÔ9Ê\ÒãØNRSQí”-¿¦úÑ99žÖJ—?ÓgIRœf3 HxhRÊQÑ—‘õ¿/wµ{…ÚbÿUD'Ù‡A4ý”ΰ;É÷(=]hÞðâãÁ/›áFk.(T”{5ºQj²š¹¶DZŠÎ¨¤ºY¦i?¸æ5…½H¹)= Þ|·ù®¥Ñ(¬ä=GÇGÞhZ›»Ï‹UzËŠ‡Ëº÷Â?Ì\ôc2|EÖßÍo¾År_ßËö‰ŠŽa›ÝÕµG¨ù ^¦>zi“n†º·Ú•Ëhj+”|Óê· ɽëªÄ†Eo¹ÜÄ©b€‡§¶©OC^Ü¢ö¥PÓà]ËX¢³û«¡2}ášÞÅRÂçØ¨¢†#¼ÊKq>ëŒJtà¬D_µ¼é|@Ü=H–±­ò1ŽŽŠ˜3~R¢ µ[ÉŸÚçj2D8Aø\{(̼ãvóm•,c/C¯4ÖÃèmíè{Ê($Xê¨)îÃeÉÕŒ²ê„¢O~ë eETD9Iw7­¥Kf–Ô‘ÛÑ%÷iÐ÷ÕƒÔYú‰t<•B×ÌÕŒÉ-ÜNt‘Ë@¨HÃs¦u¦evÃLÃÐøPðŽËÚH“«*R­ï¶ÄQ숤u¡µ§ø€á+ɵOÁÛ_Ôo8_bVHßèMŠà{rG’ékXóFFvæ‚i݉2£hΆ8L Ø}*`ên2s(v$M :©iÏÀ}ÑÄêláôORBìïw}àôíÚŽ䚃RjfÐqÇ9ÄùV¶5dìã¡]±IõÞ¾ù° ‹¿Õ¨ñÚ³Ù]|’Ígý_sØ·ýØ’$U„‡`ÁwÕàÊG ¸¦Š4°8k0¡¡[»òª^HßÙÉbeB¬âVb“ÃGwl¡ Ånµ[s,"ßT}=ûmªí¿‰)úIL‹šâÑSŠ*6Úb”µ ;2.W¨½j·¨Ò€JÛmMm`ãˆ#AÌŒº­ £-‹§Ò™ÝšVð+/d<$*ŠR¯–÷0 Âz¤¼/ÍʼniÏÌ—qŽlnüaúe]nÊ<½M£P|úè&ï[d² yÝDËOåê¢Ë'g2¼“‹B":Ç­¶üs¾|g –2û,uåÎîºKÅUi­ewCiáί_›l¥±è8®õÙ2Lt\ÒmKí>‰X‹smö+ÈᤱE[’Jª–cŽ>q®º[ït:B¹îVJ•®/ŒŽUÙ+ÈÑIøµÅ`n¤ˆ-naIVX %Ò25à:*l,r;”“Ø´û}~ÐýJÜ"á2E£¶ùur÷P<; Ém1:¯WbˆeÑË^ÖÔ°•7¶iáeê,¿Nˆ{/¦æåKh{ŠEªaÈÇu¿`‹äOÑš¢½^úvk~!ÙøãuÁKú¯…‡š8;ÞÎÑÐJ]/Ó”BçÕy¾óEÒ:°Ü¶ïVOm_Eñ,Ë=àiu@¦m«É¨Åt½ŠÖꈶ`ˆ’)Éú½›Šæ"ëwLo{\ïjÝP¾7¬Z&9D¹æ$;Pì`ÈéùdðBì¶œ¢’2}Eˆ×Àí'‰¤âÀ–û–, š ÕÙ¢ÉgÍbbµ¶ $,¦¢¦øY÷üîP³Êµå >f™ŽðRxmyûöÛVÏSb„½ÏLËØ–ü¬¤ŒÙ3\¹rîöŒ])j4¸¥™-¼rÇöv±ƒì<–¯O3C»dž,÷ <´]¯Ø°™¤µÃé)¾ýj톦“ãhUýꣃ„u£­ïq„;";‰Ý·¶íy©ÕòÑø?nóAë?+Çó¶Ê3îÙTÌ‘C¾˜úQ>/®s/Ë?tkÌ‚³uO5«ñ¥ó÷Ý—QÛÖB}&¥qzù¸Vül À&ˆÆW»´\ß>°›—†Dþ4o¸@˜Ñ(6åg•Õ|÷0˜å“Owx€•”Í}¦¸¬X®UÚЊû+-_†½ö°ã$ïÜ–|]7M9¡¥:_\ŠC"<Ò$yÖp•÷žoÖgËÆ9¶Uº.vqÍwòëø{Þ.Šã åܼ’×È+PÖø–’üÑe‰îžêKc±Ø7ôµ-Èr›6ªÀöËÅh^ÔF@@qw¥òÖ€8í¨|%íjûüŠê¢%EHÚó¤5~ÇÚbQsy¦ƒ/ºSŸ)©Îx?_*Ä!ÕåJÞ¤w}„\K4þ&#M+šø5ËZ$/?|ETßK;M;lƒkÚ5»º‡w_¿ŠönÀgyg[15Ù÷ÒÓþ ž˜¸É£Ì;®ýZÀî¨× šIGXTvÊzš:@^Œ\j²P_žHrˆÝ¹ù¼XÏï¿Ò©j’÷rrƒ³[£ÁáÖŽy,èù¤¸"×;ãŽ%‘}ò<µ ÜfÞ,¯ïvî qVoÂ'qÞNZ ¿À%ª5ª ¦åWLüÒ:Øß„Â)>V‘Ç»ûJ à÷w™§"%‰·¾÷p»HÿÒ&‚Y¬_Nںט›ÞžÒ“–üö¦éÑìé^ð7òM™]S!|—ÐjßßQº7*­r/õ¾zf*}|ÿ1Ú{â¬5XrSøÐA ÷ᛯqä-÷ÒXm.‹Éb`†˜°‚ôçKϸŠù™_äH…M¸>|râÄÙ¨e%ûrĽ ¤xæB­JÅrƒóJ˜®’°`Ð.F‹•¿>Æ#ýe0ÞTM- i¹¨•FòÄk¡°·äo‚ ÄÊ&ÕÉ ŒöŒëŒÊš_=™âlçº}y_RÖ“[ÃFüµ åÚž¾X“ItÆZ §‡v“°'‘Ô¬§ÉDéŸFË?~ª|“uç1r\ÏÏ¢Ú2Å¢z#Ï»[/çid”åÔžÿÈKU ¨K[ú”;³6´ú"àè8ÛåÀQƉî«yŠÁ\òœˆØðØc0? ½¨òU¿èAËTá×¥NÎÙŠu‘þÝŒmÁ惂ý›û±&£÷;ï7ÒÖ†Ñÿ”—Px¬\&$”Ã_uß(ËGeŠßnÄ«´áø°VB²Ë­3ä±Ü[¼!bºçaüæ‘Æ×E‚µßnO&ï“NŠÏwÂò¤Ã0o c•h¶U:á}Õ 5¸ÜØßI`³Oaj~eWäó¾­#àmæ›mšvåÉ";)×d*=ÚØ'Tk&àk…}é Ôãßêw?Õ/¥q{/<·Ø^ÆT­“U'²g\ñòetÚ^±´n•õ/fò5~qùƒãÿP¾c‹g%µªugI¥pÅ[GNJ»«…$k»¼´OFj]A ~©”ñ|»ŸjC¬¦8kèOÕ-FÌE…Åæûúj,ˆ,3^sæªzïÕŒ[,¤ÈHúšÈ6)Qí?Èn×rzæ>Y…VñÕäS@Ãnòl—é¶V=™HJ–7´i®j_¥¿RÞs ÄÂÿ¸¿!§ÛÀ NãÃÞ—½”µÀ..ý rfßø©PבÆbÑíÑŸ“ê÷#b…t¼E#Õ[M·0á=U Òa%ÁŠSØ— }«pˆñ½m6 ÃOÄ•%FdÍrï†a5¶&Йnòªé%ÜñÔ—áÏ9nOª…‘Èo¢Kí}}©ôÑöK¨œý‹šPqÚ(Vn«gô®å†Õ#Ï_ÿàÛ«hT"9èav²Ñ8~®ù¨zËšæ%u\+Ž:I°Í£D”äîÓ¯_çßæ´¨Ã®MS±ûRM8æ_®Ì“m‘áhÊu]þ’AdÏüÀ©ûF(1à[×@G‰z°è$c˜eAnSÿ 5êcýÏ«6ŽXGf?ç•ýŒî&$œ%é’eÍ÷"­´A_ÍóÆ_û ÕïxoÏÖÎn&G8ä p–ˆÕZÔ¯P¼| ToS>‚^&E¦%†2ö¤9˜e¿y›­ö©`Ý÷TÍo·8}ßjîcé5I°òøöu}ïÓ«lî{ÞøùM@}ýXUá÷hkë"E¼ODÊ - Á Ÿh¯2Ûg‹üäkí;‹¥qlëË~ízZM›õ|ñ\™ZmD¤]×´’ÚÑoÔ¹U^çK”b4ÂÚçqI±Àâ²|‡ë»„|-¾²GfVI¬^îÙÇ%Z—L—)ÖWq¹b@®”ƒêC÷ gwG:¬(¹eÑäžÜ/_žÍ÷ .u¥ kãŽ:cR•k³QDw8å/t†îGM… „Ýäó³ÉÐ Í{òm~ÝDÔ•‘]T@í…òsB/ÐN» §´N öv1;}3`†éGýÒ<†S/) ˆI֮;(D¼ËHèS¶ÛïGªfT³ÚÒÓóü§±ñ°HàÒçHã¹ÃMµ5o®Í :âùç39uTÆœß?9^Ur÷~EÍÑ2©ýÆ(ëÄá*ýQ™'›ŠìøÇ’Ì¥x m|è¾Ü×ov(S&鯿Tñ—pÍšê»Ý,‘ËS©‹uðz¢MÄ]{`gŸ>M¼hR!LÉËúÚšw»–$ÅÛé­àR†¹òGñóªÏÃÝ4…èn|yëTW&…:ø…‹/N_MÌÑy‰çÔõöü>çXYB7t‰î¦1MI¥®±œÁ×”°;凓]üŽÍ‹Q÷TjQ+HºëC­nªÍ~¹±×6ø­l0%ëmVöð W3mGÔPÔn+”ZP’L"ÄË&Nþ¤ÒæmÓ¾XX:ŒG$vë‹Õ˜,lf‹¶\Êcº†¡´šïø„H—X);ì°–kÔª©ÅœÐ²ñÈw%x­MPŽAw ”½eÒy{òåÓêº ;]Ÿ×͉ጤ:æó8ªZÍ2˜>Š9oƒrž*]š“Á–E5Á ñŸ•Q³É²øFðèl3G³‘­–¶’:ûUõNX¹zd°v\¶ƒ¹?ìõ,™ËÅì\õ “uü¹d¸›Áy¿C³:«û°Û´Þ‘~áÎ@–@é`ÅãÁÄžv„](ô!¢k”÷xk®hfè1 ;pR9¿zªò¹öšpB³¾>ÓK.|&ü›š+9—ã€ê5_WVÂ3,¥_s¬Èäº~bž¢±l!ÞÔ+$AÁHÑ#x½hÚÅÉÄðØ™¥@§“‡<@òÅŒº`c*ÁŒ(vIP©‰«ÐvLàe^Œæ¦ÎÇa͉½^Ñ^C¬¥H¥Æ_^rF“Ï ¸zW‡”¶ÿœš»gci‚ux™2Í :»õâã”òà´F~*LFÔØÎÅ€¥±'Ã…_[˜ëLêœPå~öÍô±M±M`ù׌ûí`ŒŒmd-¬>VC¯²ZþA&lŽ~¸§mÜlÑ·a¼›l®žðÉvŽÉèŽÍc¥…žt˜XJF>Xƒ¢s¾s¼“a-³xeu^l½£¢Iç@š;š¹]¼Å|§)Ú‘Ñiñ ÷#ä]Ž+#&4M·õÔÛTój½¡„=Ý{”m/D{|N)ˆý+€ëéw§ØéòOdvŒKüIcøåíŸYX¾4ÝEB1ø’HÝðÕ]Ù`² ©Òô+ù9I¹?·Ìoûy‡eÜć~ïjÝFèÙóˆ"+xîØ@Ív†è[Û¾W|÷ ÉC¥XXO —TDËi‰ Vƒ„‡ëZ¼ Élv³êˆØT ˆ×i,™0)W“Ö¾ÑSLA§h¿nôâÏ\Uûî?[œÜ©f¿æhýôñ³Tš¼ô‚G²ž 0ùÔðÔp݈ççéLgÖ S FÀו’éG±Yïó~.jëÒÐúT·þ4H=Ðuáf^•¾Ué“…{¨Ù¶ëÐ×5/µ¸·[¾è1Cî¶YŠâ3+ú|ÙÖQVŒKUeBSÇÀQƒHò“¨>ÿ 6_¨a»ôòµ™ÎEªò{.øÉ·ŠÑwjúb¬Ó:Ý&¹enYs8J…ƒ3Í/¯lÚËZ• Ùp‡‰]¹œâÞ·@`¬N©¿7¯D æ˜p˜jˆJÆm_jÙnNq¶›èØÉ ÂŒ½ý¶é¨™Þ¯›{c“’Ô¹V[²kW˜x|Š‚1ƼÿÊËUN¾È=žŸÉc³žÒ(Ë…e—_ÿ@—ðmÄšäÙ³{õR”ùéÃΧxLõʪµ|ï¸45k—|âô7½Ÿ£KƲ®ë6²ÀøðìñÀR+޵¨x‘è(Š(;¾™O°ÊfÄÌ€ô»Ú‚oº#ïáÄE¦"íé&d~ËMt 5ABux-C­£¦lÀÇ\ðu¡mDʆ ÙæÞ†-*–èSÉúï¿´OãídI!Œ}bØùJÿ[¾Ø+{ý)/ük#:ow²¦šS‰K4J¾I]ÑY¯ ~ë­±o-ƒ뚎×g‘Äaµïï¯ré²®F5d£…΀Ù<4æoXÔ IëÒ¾Ø@ø[B¹°y`Ñ‹b{]§n2¹-ŽÀX,hˆ¡ºƒ˜ÉdWr.õzhÀSÇ9?FÅ2»öÕ ÚŸï(•ØŸïµ\»ÿ “¡1{Àê¨ù»Âý>4$½×W•Séœ,ëmÙ– û´±š±š#š\Æ‚Ê‚Ž‚Õ·ºî—Ǿص˪¥L²Ï1eU{·,»ß«ešëg™²*¶Më9NìíLSÛ¯¸šåët3 ûe¦H_ðG²±Å«ô{2Kl®×¿h0Qʸ´£.ÿàygjù’â&æm£K¥×ÉñW۩–œ6¼ìKc;]Ì‘~J^¶3úJ ³‚o+sêÑ]«4¢oòޱ¦ðDͤ°Â@Ò ¥z výCAð5EÚdÙÒY»|Œ_í«näk:hªp&iÆ]¿/Ãæ=žH)ÅÁq½{ëú°’5VšxNæà\Àp°–:`Øèc†àˆÙý掭~ÆÜTwƒ°B=2’ç9›?l[[#•p¡Fw÷úÞ-õYLÔoÙÊŽâ3ã܇Íi’`Œl*n¥ QÉ÷ï-o,˜ðǦeΈ÷Eó¡hîMúP®©o>riLC~EûÙ6¤¤G:*—\SDÖxÄzj­gÍfÐJ3ÁÐÁfSÓñ,F5âE-ÓxÏVfáˆz—.¾þƒ°ÁM–b•2J¼ŒQ³û> ßÞÚ lxÈGÆâ­ÅðxæYY0Ù8R@'úBÜ~\Žòšb‰ ·‚Z½™‹I¬ËhhjK{¥{yÌgrc×ÑV¢6†„AÍ.¾Pd¿V§Ñœg/²ÕÖÐþ]g2óS‰PåwRV(ëö%¶÷ m×;M‰ƒµƒu)⿊QÚ-Ô~„.\KËlÒkA›Îaô0©úA\ uµV–ÓÆg‡w'È%ZñN¾äòD1ßn`ÏøRóƒåAHÖçí5ã/ã)‹­#x;{qŒCŠ•«žJŠ|E²)âÖ«¥P³í—Ò~:Þ˜Ïù9½¥¾1`(†yÔ&`òb¿óh²½‚)']û›–Ú óûäGeñ¯“ÅÆŒ÷ Mæø]8$ßÍÏßz““$«é›3R_@Á÷2j‚lâyú–Ìh²j†/QÄ÷p¶ëÂÏ#XNôp]ÄÂx.Uý5û°Mk®u;çjÁû¤Ê‡æìï¾ÌL©+ÇRþP{%Ìú9’áÓ—#"Ç-#ÇÇ%“­©Æ¤vû9Øf$ÌÚbb3ý±Död‚mÒ* íª§wÓ³ÒK¼¿™iL{0¶Ò_~½ˆraÀèòf;ðÑSŠÉ drúkeþë|.?Í]ж²üÎxHßСˆDQ«¯ˆJ+s«¸-¤—ß]L¯ïgòa%‡¼49È'’\üsÆ¾ŠˆÆÝÉœç^Â+ߪX1MÖ6±ZoÝ~uÀ™‰ëHáeÎÆPW•пpc”0ŽÐçÅhFÝC3’¥ºù7GArŽ÷ˆ× Ó¦ÒZÛ6úÔ”] ìÎ]ºƒ{ïjóÞÛY¨Ê¹ºÍ¹‹a´ è÷]BCïÈ”xÿ¹oº,('4O~ðª.MóÉí62†±'ñu@ðA£KŒÓ»‡—¯Oª}l~5¢~+¼Mô¼)AsY/¡I>q­­.Û ,óÍÝ8ËH“È'áìžf ,ù3äÍbÉŽûï÷gŒƒ®“Puô1¾Yòí’tiª‹ÏiÖ–ß\qœV S&æØÛˆ‚ì/š£WãBM*gS£¦'µ%¿|غ%Ø*Ìå[æ;êWiØø ¹ j^84+ÖÃÙ}Ø8UyTÛ×.,sûqƒ¡ê «½]nUO`·çÖçØJµ{Á¯Y©Ügù>™„>üâ‰Î—Zú…sðÙ¼ùuŠ%jó¨‰%›9wÚòGºE)áôЦNŸ<5Šfô†Rû »9 ­[Ø•Ý(¼n$lB¿ý˜­©Z†fßÛ_Ô¾Û”Ix¦'T¾ì+(»^>‹b³G¸üø ™úµÙÒTe%/b¶»¹×ªªªøÉ`,Eò†ðøS5ŸèÝ­4ÓEÃÌ–Ç“jeRZ¹ô;Ý/<#|êÈ ÍØ¾¿“Euù@žº0$08Cå[é#ƒOKöÕpÛüUÏ,FZÁ'Z_±…‚%ÅÑ@7ÎOœ+Ó)–µŒ„(´Ì_¥9›æcr&(ÚÚíö’+¡Hº ´ëÄ:bî­PÔÞY.¹nËØâ41'(ëé°CJçfݼùñႱĥgÞYø©\š¿ôÃ/U+†"CÑãqôΟt ¦<>¼øxÇ2T›wµ-'ÕÓŒG˜5šU]£d(«S{fŽêüi/àð“KZŠ€“"úŠŠ†ùÌ-íXØ&WÍ‹ªoƒzè:8 ST£ú UæËbFß[Egh¾±|óüÍ´Mo¾>Qb«þÔ‘8M2—Âôm}IǦkZzÞÌdŸB>é4%Õõë‹× °wo³XTG\ T™çÙ¨×Ý][µ½QIxc%B ËÚ‡èÌÁ5½…˜.Un­û31ÌӸɮKöξã^e/Ó7™~„¾„o3낸Ù;3þä^kº¾:åµ7€š›*¨H†)àD¨ïº\ñ£ñ ~¹Ù¡¡(ªÙÕmÑå.«ú×\R¢è(\{ýµåõŠ[–u]§òoÚáÄU§è|`^si:öSÁQ|¥ïX9$½FGI2¿Æ²­Ñ›8KaL¦K^†á…Öç§éo/Œžtûûý/k..Ë®Õ1ì™IqbÆ÷Ì‹¶Ë꽸,ñúy®­D¾ýÞ¸btï~ìB¼\wßuÚ¼hz¢wšc×ÂÇú8U#XQúÎÕXÊ«†}_í“A!¯7õ)›RèÕ${¢Ù±Si~XI©éÃyȾl?ýv›ƒŒŒëçkÏáél‚Ò«¦ÒÎýn}Ï–ÂË>*Q=²Éåeh›¿qb씄ªÌæ­“öš¦CZ”êü¦%a¶ïºæE“m«§¤ªÇ/-’æþÖ~“cLùþ½beGýûÍ¥r>D®{€9˜¬y(¥÷ƒ¡¼œ³*¼×ß›iwomì!Y¥/iÌ~0æ!«iE‹dÅ[ÇmÕɦRÉ£eÁÝóªmvÓýa!l]Ézn5okˆMB]äÐLo|ÑF>¦jáÃ|Öôh~7TÅ–âÁd¾Ê£á”±©_ÔŒêèJ™M†«FC†“ä.3âqÓ—à]ŽÊ–‡¯³í{+v«Éöze”A¥é’lã«••ÎkR}s„öžÎÀ'q¥‰e©_Ó‘® Ï÷_J{’z×ÎןôÛ€ãâÚf¾c¬g¨^Áë‰Yòä»,k`KdT‚€m­±3öúã¤VØ8UÑý[+Ú‘îO•‚ê†gu[–Öë­:XVýÍ&'Û×Èl¥ç-(Ù>Ç| ™í{žUPZöܽ| N|oÁSÜ3Sx±¦§wA(é«¢~ÉÈ‹ý°EÓ}®7ªHVÄoˆ£($<ü#¼çɱêŠ9u4¹1Ëhh¢9µoË&k>‘O¾ÏálÆ]ަó„4U‰¶úóβy×ÃÙmÇ×b/¨c'XMW!KD1¾îÛº°¢¹›Éí `öÜg €Ñx߯¥òI–]ÿE¦kõ†ÆÀô£ÉÜ»Åa·à¡èÈò'‰±j¥ºªØ^"òâ~µÈƒ]!Ÿèkܤºfô#Þ?kÅ[U·µæË{M+×X_[$â,sUø]+m8bîý˜Ò îíöZ’g¤T®ªLR‘‹ÏÞR²¦©˜´2hºt¹¢W U¤s¿÷RÈÕýŸÞ"ã,ŠŒÓ=«¬ôc<* ¼Q±ÃE©Î¯¿¬ÛOU òfó\åê²Pä¸òŒŽ×#ÍÙõKg%ïø –9 áEo¹öÞ™ù åR§¤ØõYÏ òœŠ"‹2u$7pPR^½IvÓ RïöEññ×ÔøšÏâÃTÝ:¥Š_ùø_Àc}Þ,½Œ¬1]ò][åõÃ4‡¯ ì©%§3jÜ(ýFÐÅàö¹¤øŠÄØ6Íkßµ—E$ÖnoEnrÒSZÚ *ÕöÈLŠ ý?,NÇöÎ ÚLJÉß!G+À¿¿†öJ •±]ú*EéçkS( ¤v9=Ï‚´ê«ÄÄÚùÙŠÂÅ;¾Ì&KE ßY1È&$—ˆÓñÆ|-ùº’âg _õŠX¢$Ùì¸ö¸¢ù›mòö6ì©–œú뾇æ²ôÎQ‚ššFå˜ZÔ…Â#xPa‰Mùe“EKšY¨>täqu2ækÕømÍWO›ïHL­VQgÜ¡[0Ré¶?kÊyex9(BÙKõ:å¬å‚>©(—¢½k¡Ö¦…åì´â‡Ðƒ»GdÎÔD{½ ‰8¤7X¶^bŠ¿ àØm%«âGxÒÔ¹šîó_ ¯+qYጺ?®¦Õ£L¶ü®€Þ*{È8ƒºlDI1cR?´Oærë³Ø÷ˆ‹úöÐ0VCÑ6´ž%F½[ýËF²krþ/ꑌ .UÜø.ðÙþêÜRü„×u=ÎÉÔÄÍ7ÄÚ´ýGv2â[ÓÅÜe^­Ó¤—â|³½ìH1z·üÒâ’ëá'u¢§fœ"ä¦f?W ‘ƒÌÞ èßþŠSú¾EñN…=aïQ¥ÌQÁ˜E˜PÞ j­ZdjT,˜|ÔòDwÇëGkÏs£¬M áØ §éŒl1•ÖRÚ¡º²è-¨ZîP VØ­8bî8ifš«¡í$WÌõ‰ÑUg9Ä‚oQ)µ¨ùù+’ÀLßJ+×YÖthd¡YêQ¦3ù º#OVAÞDùþ\F^€qŒò™ü›'"¥hbzýïªÙ%daœŠ÷Üo¦+(½«jæ]äºËšÑÜëÓðeíÖÝÚGƒÓ™¨œShšŒÑïNÝÿhð vÎð™<Òòƒ»÷ÚÑ1îö–þøIC—!Ëën\å6ß®µ‡­y¤ñ4áÀÆh2ÚDcÄW²ÄÔtu-xîÁÃÝ©€Ö*WdªÞ’89qÉÐfy¤ø[¨4MdQ{ü(o‚—epíÆÞ2ãò‘k-÷= &–$u¥ø¦&™ƒ†ä\.Hƒ~`×(ƒ»=“RÝ–f[„ht¥Aßß8”uY9R™Î´Ü[$[1úŽi²$zåð›*bŸ’;Ä„¿MnT'Jy6àüŽ©†¾Pˆ3x°£œ¦)IdVµ­ ¹Uî¼{%*}2ÍS&\xe–½ð£ÓÒØ@«$žQ‹¤3¯ûË©ÙL}¼ÓA‡Ùh5Ž’À¾ë?Þ¢gVs‚¢ÑmÚ7ñà¬.‚hp´çe ª›†¼ô)•ÁÕLð¢P¶íhJÁÂ%kAèZ‹jÏ`‡7Q(ŸâÕo ¸ó¢0séš=H‘L•vÉ‘c?%%î‘P½O•©½Nâ ;Œxì”o¤qC&<”1íãý§´ÎÕftÇGy÷Ùb¿$A="áû]>øRdÉÌ„±‚éîU–ÒîiJÆ®`=þ˜K¶HÁž 7ðDe/;1ð„d–Á$í5AAí|é îµV¼ý„4'Ä…(–£ü,dÒ]BB‘ji}¸Âäiâa¼Þ‹,I©ˆôó^÷õ›m(I³JkѬ ¾=øŠÕ³øQÖ÷}"1+™E=¹ª™Š$Òg_¥áí~ÛnQ=”&™0¶|ÐdØî¶š†«M=ï×ío“8ïóv³2­=å~¼,îTWó”>9lá¬ìsô}é›5aÒ4¦ eÓ5}Í>›7ÛbÇÝãåDU=Dž7¼…~ÝB™è€ðZ}|öºǸ·Q§ÀÂhè#M9Ô§ÑXÏ}Þ>3Õ]:ç²N'ë–„,¶Nà'¬N£àÛ/É4õO£D I«TÃì0æ|)mài˜ÐÞƒ0™)Å~yì‘ôÍîÏSB@¹<âÙ2%õýîÌ:Ü mÖyVÕ¤o¾ò7c¬Ã¾ù¡ÖâŽ|z>Ó7•ƒâjäÏjtà* Syâwíl}l×b­þ¢àkÛá1õ\a”;ùe¡±æ‹¯µÄô~Ò¥J)’hÞ©ŒôÞ|áû¬Ú¿v×DFÌ7å€o£Á`IŒOb.W õiƒ»†VnB•BDàÌl–B}Ì =Ðü"ùY6•ëÁ®7…=âP‚iœ ŽgF6pZIŸ èóMÀó¦e˜ï³ŸêÆwTès* §$±jh~ÐJ:~ª©¦ÁTê«0”ÓN¾9b%üAúÑ˺5(®ò>Ø×°U~Ñ 6ÅŠ³Æ¯9rs°Þ Ž‚žÕ;Iöá Å|­,݆¶3"¿â¥tâS"ץܻ¥x‡3>ÂÏMu™ó#g›ÛK~0öÞ´îüœ:Å[¹ +ˆL‡YFØ(Þ¤WìSUDn3pQÄóä£ëuá½6ç°ÍüqTJ¶ÎìåUpSW›XoWP}+ãû»K IËŽT)÷âÑFÂòV#¤r±FdXu_R„_¯´T˜%ÕÒ²zæÖ}°"þS7Ÿ£PR㎱œ>CÒQä]‚é¦t!âkh²¥[>¾ýl©ƒ«&~ çp{“rd­uÂ>¥Éåªòi²,õ(Z³|*íMÿdõ5ìíÛ Izÿðy]þÓGÃæ Àˆ%­Ý$¥áÖæ÷N8åB¶÷Säõø'J·'Àïîù‹~GKÅÓÌSM°H}¶¸SÊ\+k LÑÀß|²² lMЭq³ ÇØEZî]×Òëznü¨'ÎÀ`Ÿù~ëÑqÁGÌv•‹ÃŸuºZ¨d9cœÝ§siÞda[ Î˱E=)åZ®WXlÊÏ·\·xhRzM™YÍ;þñ~1ñÐïèƒæPôŒçÛ¹º•Pã6Ö©„M&ïÎC¥Úì¨^YÕ,ƒ.ÜÍÒ÷*!~æÍÏsÐòîú6ÖnšZštm%²R&;èåtÙ/ªíî1]Ž_#X'þYòÆ>ƒ³iÞ¯?Ü™Ä.Ï8z¸%¼7/sPùÍ1 g˘«6¼”ÚËUYî' íY¾©9òÓy(ñÃÔÏ,§¼ÒNè~÷›E”AÐ’¯ä7iºÛPI¸^{ß? ãij€b©!+ J ßK2(Ðæê’ðˆŒGÃág‘mú˜Oúy†ÿùQ\ÿ§£rm¬Æ=”Ú…±DëÅ„)Zÿýâ²t òVcCãyҒĬ¼ò¬¼„‡C=4E šBh >2ú®êé®V>ãÌO‹íD³ ]µüþÌŸ6ìƒ"…Ú-ÕþŽaøaÍ:û:8ÄÖƒ@¶YO©›e`šE–´3Q_Œ/F%¾zhßú¤8'/+#«ä¡fY×{™,ž¯«üÞ°Dòe°Œ<šmúÑÒöÍLÎ6ÔEî©ôÃDï“]‚rg}Ú·7SßÖ¢¢,xô<€æ¿§í¼ÒlQçð \”µ`¤gªÉ¡"—³a]÷P’{2Ää®H4F¶âdêë(|Ÿ@µ9‚g‚©¿Aà ¥~Ÿçý;仯Yà ÝV&1ó“Ù(ñ¤X4Ct÷OÕ´Jy?úžñãÒÈY{L†ÆvßÌbzQ’“’—p+ôr„™&NfRÂ3OÖV0Àó‡tL]¸ss®c3¯ðãbkïA3mqF;ñŸ× Óû g DR±ºÃ†™„½z ¶¦é _õø^Z·Ên ÁKìqc'pŽIðXê–¼…ÿÜeZÅŠé[eŸeëûg—rKª—3ÕýÍ ŸJÊA” ä¶>=Kð Žï¾nQ”{/Ƹ—·è&ÓQ-1.UíçT³šo¬Š£ƒWX”ø2^’¯~[ŽÄ2üü¥‚WyÒ_ÎÓÅ&kLHo¤m„ø s!«‡º?nIM°¼…=޵^7;^Ornž÷f{™w@‘Òð#•9;_—Rò-«—€ì3ZÏñ©~L±Ê3¯_{3·”jqEì'úåÑR^qæ±þ*ÃsÇvÍh­ˆæLšÞÞºÙU­²˜ßÓ~ŸÊÆ0½(£<(E 7 é¥oÅx$e‹ µ|Ó-)UÈ#iWU/CÍwgÁc‰Dóh¤x{ÈÉýſٯ÷åiÚ½øU¢5ü.é—H“ÜQÚXÕ;YÕ›ÿÿ²&_n1uÌj{àWJ\`Y®‡ÊQzy*Alú’Ô2R`ç KÕ¤à7òk¤æ‚[éÄ^ŽŠ¯¡ã?FÖ.a`!!¾ô“Øê9Ú@ìæþ'¹¨œæw^ð0_šª^J|³ú §U\#ø‘úFI7!†[°U Ÿ'že¼#‰û)GJ ¡‡²âÎi©GÄbI5“}ŸDô€^}‡DŽØ6gW{¨=É"ÊkÝƪ.òÀ­z¬dû0B!mbOGìdÏûônëõ.G½^üZzX·Ã÷\‘„pTèv~ðÈÆ¢‘§¦»<”@6Ê~µzg•ÔþòW~œŸ<‘è³7´p)xw9’‰yŠ@vÁÉ’A¤,D®»·íß~c±÷ ¼ñõ±Ú©ø&¦žÞj×%ï«·ãÈ$(ý1#ݨ)~7SPFƹ2Ù[a’“|³ž„•݉sz1Ã&àËÁ,õøa?p9:Ú.|ððŽdN`â$Õóâýàýè}ÎèhÒéú¯«ð…6ŸÓë«b#ù\¢õ¸ë09Ð8Ö÷\®(®t¼Ú#Ð+Ø&d·L'YDç[.ô鎅Iá•Ülöôä!™ë¿Ô*dg"ÄQöÁsÅÎÄmúlÇMILŽòx®ËIDLð] ,Iè¡:bW£§›´õ™8w9‰˜ ¾‹…u! qª#v5ñkY¨>ÒH¸`Ä= …Éàb‡+tS½ë’ÐÍ©Kü—Ñ‘™îx$\ò¸‚‹¦N…~ú9wDZC'#aZ½ŒØá q©O" qÕø_æGAÇcÒò¸äŽË¬^~úvý†ºü¥šj&:—“÷¸î+i¨ÄÞæW˜$.Õ^ÿÌVuz¼ŒK‰{z|ôóò’ü·K?pã ‘‚5ÑO.> 'ÇÇ*2”Ë9 ˜­(v¬Õüæž—mqÍà»ð1 ÐÊÜKÏñM$!nõuø®fÕÇé3bçÑŽ‰{àÒÞÊ´{) ±óƒ­_"QGìjf4_ "a~ÝŽ»ñã(<©S&s£6Äè ݰ —欃Ut $‰®üÖ"löæÌ½PU?o…7OÐrò ( Vx4rJ†iÔ¹8Çô]u!{cöÄļªÖ.sH?I&_Ñ1¯(VbäbDÛûŠM 4œ2´1Ü4Œí½¶¦d`ð#Jæ-ˆØÿ©ì7àèéÀݯùÆncƒ4‚XïPi(à=Dm~£¾) ÉèÆLâ¡,ͼ-u¿@2#8¹[U^î]G›€'‹~‹¾㵬(™¹üÙë‹æs´Ï§e†;Uûâ´ŸŽL-Úîý\*ÆÜúþðQiXˆ¯íì<Íe¼¡¬¹¹ù;/"+Sg&îÐZwPàß’c·ë‹ëÊ‘ §ìÇ ÿ¸:dv¬gD©I2Vór–'“^„Ësuâ>”¾Ñû®Éã?FÕ¬»R¡¹yŸéЧ~é.Å#¸ç[YÓÈŠ{·ûΙ;<Øk/?_"н|etëû;Sº\e‚aR.”vJ³)âË\›ý nÎc”qœ¨}ëýTŸ­GÙ˜¦ *ó”}T)è^ ê°!U˜è’\^ônE¹üxôéCd>vÔwü6—7„ÞmñQ4ëN\:à*u1rǵ×§y3E„ÄoôŽÏUµ7;¡Ç|ÓûEófüv\[Ç¡”ëµÏbDr/ßä:ì>L‡Ç½!%YÂTE‚Žc&|øø3žá½iŽi?Þ&ÕwUw›×®Ç Ë!ÍX{kê*@ËE˜•Ô;>a8Žhšë{<þDØ!ÿkj|5æw¹®ÒdÔD/œþ´& ”Òøžç†#?½åäâÛØJp~êࢹoð >FVôPL5hz²ŒƒÁТr?{ˆÒ–VEÔ³#;Z*Ñ;1Ÿæ–à“×¾?£¶´mi³;ÅÉÂ…*ø«eÐFÛCÒIÝâ r(ž?4>GÉÁyv¸ås¯m&Äp?<4­¬ñÁ¦Qld‘g$n ¯sjµ+zBz· ™1¬âU„õ•O~Ù¨åÅx¹Â ®Ý#ø 단Ùb㜶¹+F½ZÓîoÜ48’qæúÈìëú9k¶²í×q…ù©ëT¶ìL?³g§þêíÙõö;{A½ñ¬–9þAÀ 2Ž^é+”¤"½/<—")i:¾; ãG'ù‡¹\bÄ'©Ò|…“P¿fÓ¥QõôYUûõþ[ÑD1„cŸ_¿ÌEfŒ¸C'ž‚‘›žô.æšHZë¨óê ò€FCæõJäN9/ÞÃdŸëY÷v=jø$¶>_N{~‰mêˆ#tõA†\h‘ùÖ¬}VñUÖÖÊ2KPËOú÷ï‹”õÄb¢ÕÊ̱Ía^rù)WÔzÐÞCʆÉË|Ç`YßÇ3'Œ•Äûð n EƘYu(B»,Dʰ¾ ¦‡4ƒ¯O3E fü¢5Ôv—]¹÷vºîé#bÛµ;ò!o…aè­[†%ëEÛ VK¶ÜÚÄ-†¬v®Ó·jªãŠNw@Ñ«Žð<Ì߬QUpÄμ‚$`˜ÙìÔ€µ®šù¡DQêdCÙdŠ Úö£C–±ÆØ¨Gh>“82Þpm•~@ æüÖüV4åIu²F5]ÿÔ;¼¨¸"2-¼ä© Û-„ÛŠŠn­††´({Œ=>2=¼LºO”Z¼«3ÅB{”zµ¯§¡ý˜›­tzùp[WÉ/hi—_£‚n8ksžÍ™Zγ–Ïm‡:Ñ™øŠö±Vùù·O¾ä5dÂÜbG›%õ$ˆ„{åöC”²G]³—µ¼l»‚yoŒrí¯EÜ›á«&ƒòsaxìèTï™7ŒÖt‘ %J$‘¼*•(j{hm5Ý®;[c tvž¢ó+ñܳ6+矷=ò1ì¥ê®Yc".¢;º…‚#ä,‹YÉêÉÆ54µãWÊV™Nj¨.~:4Ui‹&Ö3áôºqÂÂÙ‡úè ×RÞS¡ÚHš–âù¡ «Ã…\ké|ƒòÁ‚ˆµgC›¯ÁÝqßkêI÷ô_ƒH~\C1r7¼™]OQü Ò…`JöûÝ’KÜ×£ä–AÓ[î%´ËÀÛ•wÜÕnWbg¬d§}|„yXx¹³²VÅjµH}W®ºAÍbVÍôóú[õÝ^pøá=.ö ÕÈCß…ˆ}iZqgZŒ݈I¿H‚Ý0fÉ#üÃÝ$ÀõfÙ •_<o¶0¯™±®3RxÀLjݶ­Òåw~ó0r’Ýù©4ìðÞý, »ú» 6»jô™»J×r^ü¨z4š¨)üó°â}dÅ(e.ñ Ý_95ø¿{æ_;ñ_?ø’í_BÀL6³ ÌÌÇÇ_/V Óñ‹ùãó¯÷§ýÑG·#Îýó«/â‚Ϙû×{ÄÄ9Äx6øØ_s÷aeý£/ä”Ö¯#â¢ý,}Ä|„yùzæq ýùA˜¬„ ùwÂYYΆÚâS1Clé;‡S"¾›·®s±Â®<t¾Ë'µº¯JQ‰ô΂»}=Gu“þÕÎâöœOú X·”ïØªÓ¢@/ߺ…£ Ãü@p›ƒiËiÔ¶•skÈriÛ¤†çŠ’ü\¨¬äõ+z?/åÖˉ]výØÊ›Þl1×F2×ЛáQ¸’s_©ÊÓÆ=­ü¹ù*xuöýÓlMzÞ A.öO¡|o¥[xsò›n]9|åj`j>Xi¶Ôº-cËÁíN)Ì1€Þã*€•êùÔM´þHé:é€K—ì·Ò gp„ˆj=9Èøzyèk~ª£s˜36î¾.˜Ä¨}éã0;ÞUàîã°Ù$£À@â4ºdUè.s´ÕÕ-¼ÕcR4¸×³èñì^K¥Ãö`EÝb…Râ­;êOSò­·”· H›;ë¾{ã_UçÃ+nkÇge"Û'±\Á¼…-~Ëù¹“æGÝ+?QÍ;\神³W“f0aüvC®5~é%#œ ø„sßEãDÞ¹ÚOº$,¯.ÁHF8¹ÎZ}{£ÞõÖÛ1BÓŽ:´ 5Ðn&{Xcs†»GC¯UïJd¬©|ì'­ùþO0œHÙ œ¯hM×P“€`͆ÁÛ¬©¼!©VM«aášíá”4ñ—£Ö˜ ”-Ç>óÅa"ÝÓ½+¦ÜðãÙ|—Ñ’žEŠ€$v† :ßĈ4qR>éݰC­ª~{¡VPœtŒ‘ï¡CÓ€ÏGnÍȵ¿ ú_OsCÀ¿ÿÓg9 œ@€¤™¶ÉS{c S£¿Ìb@žÅ€Ìð<ÄÆ €Yÿ.‰A Ð³IŒ‰éOOo²þM;ÿôfÈŧ7333ÿ»æÏkèÆ„åþsç®å'tä–·viJ!7àAÒÍ.܉*çQ ¡ÒÌý—a[“âVƒœÖ“AN?ÔŸ?fr°ð“ï'Eg³¢Ü2h•^6Ø/â¾XD16ÔŽ;§8 Y ýBM¬‚»œ¹îK”›v]‚Ęñ“šæ~55oB$ á[‹Ö’-·ÊQlþ¡we—½×“þÂîlÿåCÿÊÒ õÀ?ÌGT(Ë_¡å…ÇC.<6˜íÌcƒ…€0Ó RÂÍ >y°¶±XOÛÀp˜>yëÓ³ð”Àtaræ3“ÃySƒs$/©a ­io1f°œ:£”…©æSm+%F)!Fm;+øi˜®6ÿÉAääÀ§r–ðœ~¥à=à; „k³œ0ùDÛÒÔÚBSÛÀvò@dqm-}"d ƒ¡` ³ £°…©µ|š§p³ÀL,ÍshÚ3ò?…‡Š¾¦öa>F€Üd\\Œüpià$,Ìç‚â|Hÿïü𚘘ZY*¡1«À >9°ž '¶“ñY ¦“ðäÀ|rNfÌ:™t2 èÄO@'³°œÌÂr2 ËÉ,pçúßô=§àÿsûCA'²€þoœÏ›Èòÿ8«,,ÿGÁçy„ü?ÆãÿÈÛ9Ž˜Ï#ÞÿÇ#òüZ‹íÌr‘ßÔÎz‚ªp2Çä¨ ÏšÌð‹åüHæ_Á¿²±žÂ¬¾•‘6‡ <©2€'GÄ‹ÊÄÄ*ÈÄa¿à ¬ð>¬ˆ÷pôf´³BOÛ¹ŽÓ³•Òq°Ï&è4¡ 3ÁþÁˆü ú|ÇBœE΋(ÈrÙÀ'Ït¡òÚ¿Ð;øo¬§ìž‚>äÐÿWâIg£ë²÷ÿ³þV|N¸i!`ðŸ öħ:`9¯ƒ ú?ÐÁYd`:“A¿ác™ÿ\çù„CòŸ²×)Ÿ'v;“˜¡çÓô?IÌçy> ¬¿õÉü Ø~gµ_‰ù} ~þ{³°ü9iüI– :¿‰¿ r& ÿ&XÏåè_@ÆzÊ5äŒÿq0ØXXÿ.˜. Â…üü/ Áú•BÑÐÓ è—ñŸ²yj°?> ž`1tÆhB¿ ö߉:îW_þ A9Tç@æ"˜\Èþzƨп1ê÷ƒOk3Y‡þøK#Ìç²Ó/í@N5óçòæ·$'å |ù»òæLí'p^Âÿ¤xýL žÑºÀ)Äý+Øüï!òg'f;ñŸŒu¾T;îßÉÂòŸÛä¯ç áß¶ù;{€XÀ¸3ö€²µÇù²á¤åß q&Ý‚Ï¬Ž•Î|ÖõO1Pèô<ËÂ<ÂüWë©cæOè÷.0]('þ‚{Ös`>-Þ˜~«òG¥ÃÌtnµw¾0øwçÕÉrþk%ĺÛíÂZí÷bpfµ¸&!¦¯e©8¹îsra餸œ ,xzщé×EŒ¹YÏ®þÏ5{|åã žõM ùL-´´-Ž—úpážhkZ)±!ld@|Õd`ÙÀ*Œ¼L¡i¥ojÂø”ñÙÄ‹JÏÊÊŒ‘Q fÁ`¤obmÇ c¡­ ß1j™j2ZŒ͌´-õM´´íô¬Œ¨áüeñüeÿŒEÄwS D–DìáÙŸÊÀªò¯ w–èyëüçD„~e…—*ÿÚ—ÏeýŸ‰"ý" 7 Ûßýýg‰ž¿"õŸ…0Á‹¾cRp²p¢lÌ Ð‹Dÿg‰ž¿ìúŸKŠ ôKR(ËßÈù+ëœ% ýß<Á;œÐ)IV&0ð"Ñ?'í3TA篮ýÚ=¦u¬]V +³Ê¿® Î=Íö¿õ˜Ô/a™!  ‹dÿ\‚ž¥Ëü ‹ õ‹.|Ïòwt•ñgéþ vLAë]xè‚ÿŽî¯eÎYºÿ2AëĸðØeýãþ•GýoÈtBAê„(½UþõŠý,Ñÿ ™Nˆ'Šc¢ðØeû¢¿®fœ%ú@¦cR'D¡ ßÈô7WƒÎýßé„(‚Ô1QøoHþº~v–äÿ†Lˆ/˜~¥7KôÃ$„r„~©­,£Þ_ùÏýß áŽp`e!ü…‹Â…ü6ë¿~–ÿ @ ã’…W1 <ÔÁÖß8øï3+鼸ü0+˜‘©îII{ú%#âKò“ÚPé¤L=žÕÄÚÈèdǯPeõµmµ-à%³Ž6âË#mKø”ú–fF0{SÍãzýø $DÈ(im¯'•òémFa&ºTÚ&ôÂ|ÔúΙé¬^ÎÔü,'XÎ-qÕýÌ‚¿W*Çë¦?_Ýùc™þ}u‡òûšõÉÒÎ)¿…6ÌÊÔâ”,ò{µöëÛóÉ÷éZÖšÚ¿úýºvüëÛÄçãUì¯ë§¤™Ϭ!çWÆ¿ÈÃõ.³Ò¦`G܉ȃ´L`J%õ…ÙÁõÆO|`4¦?6xHƒáÉGpÜÁ‡ÂáøŒ à~@è…6+èbÛ…63ä/ÚW³ÎµA—϶ÁyaežïÇÂd:߯2Ø.Œ…²\à¡Ùóm(r~,+ù/úAÎÏd‚Æ2³— Þt^@ó_Ðe§ ïw‘.\õàscY˜ÀÌ,ÚXÙÎÓ`abcºØÊ|^xøŸ×) ¾@È 9o7 ˆíâX0Óű¬ÌÛ ,ÛØX/¶A é?·13A¡Ú˜¬Ú@çíoƒW¾ÚÀç}ÞÆÊvA^¸‰.è™ ¼ gx5tQ÷ ó~°/øÏpëÁçý Þ9«`fÐ_´A/ľf<ïW¬6Ö¿hƒ2Ÿocc:ïWð6èy‡{ßyabƒûËù˜†·]àŽaçýÞÆÆzž6¸ÍÏë ÷|?xÛ‚ÿ¿à»?os8Ðy¿Y/Œ…·×¼ zÞ¯€Ì èùxƒ«å<ÏðЇ2ÿEÛ…ø€,°&ð½ VÖ cYXÙ.à ä/` ëyAbf^ÀgV0ä|\‚XY/Ä^òŸ×)Ât'ámpG¸Ðv!ÁÛÀ,èB çs¼ ¾ /ð|„·˜/ÈÆ¾€ 6ëÅ6èEÝCçs¼…ù]¸;_Ð3Nø¾0]Äl&æ‹ ^—îXgôgeÓ7Ò¶@T[Oõ´uãSSÄeÙÓBMÄDÇpR–!> ”|Bp£²Â០bò €Ùxy…x|‚ðt"$ÈÇ…öï» .rÂëD~=mMCKkc#+/`áñ °ñ ùÁ,l¬l¬l@V´ã{]aVÇ%\V … ¤Ú¥¶ÿÿÜN®H3hêèþ¿"TXXXŽðíܯÎY/Á¡öðR€x Žû` Ë%ÓÿûXú½Y#"¸dÙÕïßÿÿÑ ðôØš¦&:úºÖÇk'€Ó:¦q{>˜¦¡µƒ¥€‰„F†FÑ;>g ³˜ê¬ôô-ûÃÏÀ7¯‰=¼ÍDÓ±Ò¶Àdx}]S m-†_½ž™˜[›Zik¬`–˜‰ÀÒ †ø&f¡ý«3@ÛNSÛÌ ³ØÂWòKm3˜bmiùÇ4‚ÆfVö€“•ñ™‘œ‡¯agNó{Â=€ÃPÛ1-;€C Þë÷ møê] !\ˆþ§=éN;Œaö m¸h–úǾ o¥m 0EÈz,ý)£p Œô-µdj¢è`ljqÒû·":¿f…k鿘€Á™ @¦©wBnA+˜¾ ‚íSUÃÎç %@ÛDÓÈÔ>§¾É/ÆmçŒ ldª3‚ÕÖÔ×Ñ×—<M­­Ì¬­ŽŠÊ’`«§W·&be·-àx ¹´ž°ÌJa+ø`JK½&#=ýIçSÖpj'<±m`ŒðŒgœ›ÞAäÔ™³¤e ×¢ <™kÛ!Ì ouø0!¸Y´íŽ£è/Äü;rð®p3Ÿš0Ä´ÖóX™tµM´ö?ÖBFx¬:ݯ.ðùMmàñei ×Ü®V÷Ђ!þ A  qÊܯè'š¶ü5Âb¶úVVpïÏ# +€™Â½ÆV®E„:OF3œÕ ,í-ŽwÂͱ™¬Ó$%è¦ðq¶úp"’BBµ‚èÈŽ8…AÁœy­ã{¥ !ÞœàÜÛw9ÀÞÑRS‹‰ ~¢ „,ôµà’œFª¦©±1 !‘ ”Ùÿ˜xÌÙ±ÕÎ8€•…¶ö©£"À©£éÀãôÏ}ŠÔ45³?U(Ý/¨Ò6²èX˜ó†píߪÿmNºcõž²I‡˜¦¥uªn¸jZkÃCû˜õá1l} MÚ–VÇ``biea­yð¡JpNÙ>æîØkaffÚp¬M´àâë[ÁÅqÛ̱Hpµ™ÁÆÄ `‚«Se"¦A›µ%€QîfŒðÿp®à!ÙÚH !€þIß?NüVÌ™YàÌHšüá@ÇŽwxFm+MF#ãßcèNœëœrÀhd ³·<ç·aà¿2 ;€ò„}MF-F1xÓÂÍc0¿%#¿%ÂòâÚ––ðÈA@Àøä=‚ˆÖÉåX¸œ:][X›ã+ÌÄþWˆ˜jXþÒ5Ü– „¥ž©í‰~͇ðK˜¥áIøšÁíg 4Aøâ±ó˜Xé›Xc¼4¢ÁÜZßê¢|§lÂ%z¤O€SØÃ]ÑTß„ÓèÂ]ôàèlSÓ_H`yÌç±›ÙÃîSã½<ÅÙèkYÃ{ž‘åð%Ϥx…dâƒ'œã\‡ö“$;3Œá¸0<Åm„ÃÃàZø%'Sç#¸àžñGî<—3„Ð-é_Ú‘”î+aìÖËŠÿEárRNœFœš%"r4áú6Õµ€Ÿ0*ˆäD,¸©tá!tB›ñ·nˆ`'P!Zà™èx¢“ù#Ã~Ï)~RwÀõz¢VÆ_ìHÁHÛòwǧÖô²'Iöí³¤Ï¥x)¸Î̳ý¢ÿ{RÞ“òp±†BÌayjC„ŸÌe¡ 5 mÆZž™æÏ÷ÇFÃÈ`dª 3b´Ôƒ'RÄÿÀ+cx£±©<ðaŒ:ð©tLíiùá¶Òþ%¹ Ý±ÎÈ.t’ÙME.âÔŸÅ5†ÅqåáâžöûoFÌòØé¼ßÁñðoÝî—¼OM`fp„Ã⢕˜>ߟ;¾PrbíãjKG„³åé@¸CÁë1„[œÆ"Mž–•ä¹ãÉárêÿžû¸¼GT§éóLUlõ§ró/ÝýûÇ8ó{Ò“ >“…©œ±§ð‘0 xíq̹–6œ€‘åq¨è[ž/Ó~U(ÇÔ•ÐÚVp°×ýƒÚïäó‡EÏä D"9}œ^N3Ññuœ_BÁá?…Ὺ0àö×?EÉãyORâï®Å ¼n|VxR0ÓƒçxëšÛèkÛ‹+Bgš¬ô¬5LV>n´Ñ±ü{€iÂÓâ.FÄ_™[ê©IÁ븸ˆSð‰5õ­ì-áZ8.×þ7§L`ÖVúFÖ–Ç'6FÄžæO'-á…¸ýé”ǧLÍàkFMÄ4,¿?›šiþšøÏâÈê[XÁ‹>8qqxÄZÎů¶ ýÂ.v ƒ–>Ú_Ä6;ÈDÇÄtü:k?9y)5x´ÑVC‡ÿ”¤Çˤcá£/ÀÉHÒ? ü›´3 ŠßÔžiX›XY#®¾þ+Ÿ±´°a<#óÉ FÄÙ¿ @û]rIi#bBÛÄÒÚâ$žOÆlNfƒ‡þ±úúú' íW¹hf¡ozìð¿â¬'`y}ÄŠí²(ÿ¹ ûÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöØþ_¹ÓÀ]èdar-2.4.8/doc/samples/dar_backup0000644000175000017430000001064612010477201013446 00000000000000#!/usr/bin/perl -w use strict; use diagnostics; # Device that is the DVD drive my $DVD=("/dev/hdc"); # Size of each slice - DVD max is 4482M # MC - for testing # my $SLICE_SIZE=("10M"); # my $SLICE_SIZE=("4400M"); # doesn't work # BUG - Linux isofs limited to single files of 2^32=4096MB # my $SLICE_SIZE=("4000M"); # value used by Daromizer is bigger than mine, use it # my $SLICE_SIZE=("4189500K"); # need more space for parity data my $SLICE_SIZE=("4000M"); # directory that all paths must be relative to # NOTE - all backup paths are relative to this my $ROOT_DIR=("/mnt/backup"); # where all created files will be stored my $STORAGEDIR=("/mnt/backup/backups/"); # list of dirs to be backed up # NOTE 1 - these are paths relative for $ROOT_DIR, above # NOTE 2 - this is used for naming; everything after the last / is used # for the base name. DO NOT have two things be the same (like /usr/bin and # /usr/local bin). Otherwise, one will be overwritten # MC for testing # my @BACKUPDIRS=("test"); my @BACKUPDIRS=("local","home","pub"); # this the path to the slice as expressed in things that dar will # substitute the right values for (it's just used in 2 places) my $SLICE_PATH=("%p/%b.%N.%e"); my $SLICE_NAME=("%b.%N"); my $PARITY_PATH=("%p/%b.%N.par2"); # par2 creates a bunch of "vol" files, we need those too my $PARITY_FILES=("%p/%b.%N.*.par2"); # list of stuff to be compressed. This must be in the form of # -Z \"*.mask\" # with -Z repeated for each one my $NO_COMPRESS_LIST=("-Z \"*.gz\" -Z \"*.GZ\" -Z \"*.bz2\" -Z \"*.BZ2\" -Z \"*.zip\" -Z \"*.ZIP\" -Z \"*.ogg\" -Z \"*.OGG\" -Z \"*.mp3\" -Z \"*.MP3\" -Z \"*.mpg\" -Z \"*.MPG\" -Z \"*.mpeg\" -Z \"*.MPEG\" -Z \"*.wmv\" -Z \"*.WMV\" -Z \"*.avi\" -Z \"*.AVI\" -Z \"*.jpg\" -Z \"*.JPG\" -Z \"*.jpeg\" -Z \"*.JPEG\" -Z \"*.png\" -Z \"*.PNG\" -Z \"*.gif\" -Z \"*.GIF\""); my $PRE_PARITY_MESSAGE=("echo ; echo Caclulating parity information; echo"); my $PARITY_COMMAND=("par2create -r10 $PARITY_PATH $SLICE_PATH"); my $PRE_BLANK_MESSAGE=("echo ; echo Done archive, erasing DVD; echo"); my $BLANK_COMMAND=("dvd+rw-format -force /dev/hdc"); my $PRE_REC_MESSAGE=("echo ; echo Done erasing, burning to DVD; echo"); # Command to record the DVD, with options # -dvd-compat = make the most compatible DVD by closing the session # -Z = create a new session # -r = generate sane rock ridge extensions # -J = generate Joliet extensions # -V = volume ID # %b = dar will substitute the base name # %N = dar will substitute the number of the slice # %p = dar will substitute slice path # FOR TESTING = -dry-run my $RECORD_COMMAND=("growisofs -dvd-compat -Z $DVD -r -J -V $SLICE_NAME $SLICE_PATH $PARITY_PATH $PARITY_FILES"); my $EJECT_COMMAND=("eject $DVD"); my $POST_REC_MESSAGE=("echo ; echo Done burning $SLICE_NAME ; echo"); # deletes files once done with them # note - use AFTER record command # MC - for testing # my $DELETE_COMMAND=("echo deleting $SLICE_PATH $PARITY_PATH $PARITY_FILES"); my $DELETE_COMMAND=("rm -f $SLICE_PATH $PARITY_PATH $PARITY_FILES"); # dar with basic options # -y = compress with bzip2 using default compression of 6 # -s = slice it up # -R = root dir that all things to be backed up live in # -D = store empty directories too # -p = pause and wait for user to change DVD before continuing # -c (used below) = create an archive called whatever # FOR TESTING = -e my $DAR=("dar -y -s $SLICE_SIZE -R $ROOT_DIR -D $NO_COMPRESS_LIST -p -E \"$PRE_PARITY_MESSAGE ; $PARITY_COMMAND ; $PRE_BLANK_MESSAGE ; $BLANK_COMMAND ; $PRE_REC_MESSAGE ; $RECORD_COMMAND ; $EJECT_COMMAND ; $DELETE_COMMAND ; $POST_REC_MESSAGE\""); &main; sub main{ my $backup_base; my $backupdir; my ($day, $month, $year) = (localtime)[3,4,5]; $year+=1900; # compensate for 1900 based year $month+=1; # compensate for base 0 months my $targetbase; my $pause; # garbage input... foreach $backupdir (@BACKUPDIRS){ # this gets rid of paths and such from $backupdir, just in case $backup_base=$backupdir; $backup_base =~ s/^\///; # remove leading / $backup_base =~ s/\w+\///g; # remove everything matching "someword/" $targetbase=$STORAGEDIR.$backup_base."_".$month."_".$day."_".$year; print("Working on $backup_base\n"); # MC for debugging # print("Command is: $DAR $backupdir -c $targetbase"); system("$DAR $backup_base -c $targetbase"); print "Work on $backup_base complete. Change the DVD and\n"; print "press any key to continue..."; $pause = ; #Like a PAUSE statement in DOS .bat files } } dar-2.4.8/doc/samples/JH-dar-make_user_backup.sh0000644000175000017430000001154712003275573016342 00000000000000#!/bin/sh ################################# # # # DAR Archiver script # # # ################################# # Jakub Holy 25.4.2005 # This file: $HOME/bin/dar-make_user_backup.sh # IMPORTANT: This script depends upon /etc/darrc (options what not to compress/ archive) # But the file is ignored if $HOME/.darrc exists. # Additional options are read from dar_archiver.options (see # $DAR_OPTIONS_FILE below) USAGE="echo -e USAGE: \n$0 -full | -inc" # ----------------------------- OPTIONS TO MODIFY DIR_TO_ARCHIVE=$HOME DEST_DIR=/mnt/mandrake/debian-bkp/ DAR_OPTIONS_FILE="$HOME/bin/dar_archiver.options" ARCHIVE_NAME="`/bin/date -I`_$USER" # Ex: 2005-04-25_jholy DAR_INFO_DIR="$HOME/backup" DAR_MANAGER_DB=${DAR_INFO_DIR}/dar_manager_database.dmd LAST_FULL_BACKUP_ID="2005-04-25" # The last full backup - the unique part of its name LAST_FULL_BACKUP=${DAR_INFO_DIR}/${LAST_FULL_BACKUP_ID}_aja-full-katalog MSG="" LOG_FILE="${DAR_INFO_DIR}/zaloha-aja-dar.log" # PARSE COMMAND LINE --------------------------------------------- INC_BKP_OPT="" # dar options needed to create an incremental backup: empty => full bkp if [ $# -ne 1 ]; then echo "ERROR: Wrong number of parameters" $USAGE exit 1 elif [ "X$1" != "X-full" -a "X$1" != "X-inc" ]; then echo "Unknown parameter" $USAGE exit 1 else if [ "X$1" = "X-full" ]; then echo "DAR: Doing FULL backup."; ARCHIVE_NAME="${ARCHIVE_NAME}-full" fi if [ "X$1" = "X-inc" ]; then echo "DAR: Doing INCREMENTAL backup with respect to $LAST_FULL_BACKUP."; INC_BKP_OPT=" -A $LAST_FULL_BACKUP " ARCHIVE_NAME="${ARCHIVE_NAME}-inc-wrt${LAST_FULL_BACKUP_ID}" fi echo "See the log in $LOG_FILE" fi # ----------------------------- OPTIONS CONT'D ARCHIVE=${DEST_DIR}/${ARCHIVE_NAME} CATALOGUE=${DAR_INFO_DIR}/${ARCHIVE_NAME}-katalog echo "-----------------------" >> "$LOG_FILE" # -m N - soubory pod N [B] nejsou komprimovany # -Z pattern - soub. odpovidajici vzoru nejsou komprimovany # -P subdir - adresare kt. se nezalohuji; relativni w.r.t. -R # -X pattern - exclude files matching pattern; nesmi tam byt file path # -R /home/aja - adresar, ktery zalohujeme # -s 700M - na jak velke kusy se archiv rozseka # -y [level] - proved bzip2 kompresi # -c `date -I`_bkp - vystupni archiv (pribude pripona .dar) # -G - generuj zvlast katalog archivu # -D,--empty-dir - vtvor prazdne adresare pro ty excludovane (s -P) # -M - skip other filesystems (tj. namountovane FS). # -v - verbose output # --beep - pipni kdyz je pozadovana uzivatelova akce # -A basename - vytvor incremental backupwrt archive se zakladem jmena 'basename' # Misto archivu lze pouzit i catalog. # Soubory kt. nelze komprimovat (upper i lower case): # bz2 deb ear gif GIF gpg gz chm jar jpeg jpg obj pdf png rar rnd scm svgz swf # tar tbz2 tgz tif tiff vlt war wings xpi Z zargo zip trezor COMMAND="dar -c $ARCHIVE -R $DIR_TO_ARCHIVE -B $DAR_OPTIONS_FILE $INC_BKP_OPT" echo "Backup started at: `date`" >> "$LOG_FILE" echo "Making backup into $ARCHIVE; command: $COMMAND" >> "$LOG_FILE" echo "Making backup into $ARCHIVE; command: $COMMAND" ### ARCHIVACE ----------------------------------------------------------------- $COMMAND # Perform the archive command itself RESULT=$? # Get its return value ( 0 == ok) ### TEST THE OUTCOME if [ $RESULT -eq 0 ]; then ## Check the archive ........................................................ echo "Backup done at: `date`. Going to test the archive." >> "$LOG_FILE" echo "Backup done at: `date`. Going to test the archive." if dar -t $ARCHIVE # > /dev/null # to ignore stdout in cron uncomment this then MSG="Archive created & successfully tessted."; else MSG="Archive created but the test FAILED"; fi echo "Test of the archive done at: `date`." >> "$LOG_FILE" echo "Test of the archive done at: `date`." else MSG="The backup FAILED (error code $RESULT)" echo -e "$MSG" "\nEnded at: `date` \n">> "$LOG_FILE" echo "$MSG" exit 1 fi ### KATALOG - import into the manager ............................................ echo "Going to create a catalogue of the archive..." >> "$LOG_FILE" echo "Going to create a catalogue of the archive..." dar -C "$CATALOGUE" -A "$ARCHIVE" dar_manager -v -B "$DAR_MANAGER_DB" -A "$ARCHIVE" echo "The catalogue created in $CATALOGUE and imported into the base $DAR_MANAGER_DB" >> "$LOG_FILE" echo "The catalogue created in $CATALOGUE and imported into the base $DAR_MANAGER_DB" echo -e "$MSG" "\nEnded at: `date` \n">> "$LOG_FILE" echo "$MSG" ### Incremental backup # -A dar_archive - specifies a former backup as a base for this incremental backup # Ex: dar ... -A a_full_backup # there's no '.dar', only the archive's basename # Note: instead of the origina dar_archive we can use its calatogue ### Extract the catalogue from a backup # Ex: dar -A existing_dar_archive -C create_catalog_file_basename dar-2.4.8/doc/samples/dar_par_create.duc0000755000175000017430000000200512010477201015051 00000000000000#!/bin/sh ### # # this script is to be launched on dar command line when creating an archive with -s option (slicing) # you need to run this script from dar, adding the following argument on command-line # # -E "dar_par_create.duc %p %b %N %e %c 20" # # note that 20 means 20% of redundancy, tune it to your needs # ### # # if you prefer you can also add the line above in your the $HOME/.darrc file # under the create: conditional statement (see dar man page) # ### # # usage par_script slice.basename slice.number extension level # generates a Parchive redundancy file from the slice file # ### if [ "$1" = "" -a "$2" = "" -a "$3" = "" -a "$4" = "" -a "$6" = "" ]; then echo "usage: $0 " echo "$0 builds Parchive redundancy file for the given slice" exit 1 fi # change according to you need PAR=par2 echo "creating PAR file for file $1/$2.$3.dar ..." exec $PAR c -r$6 -n1 "$1/$2.$3.$4" # script returned code it those of par dar-2.4.8/doc/samples/JH-readme.txt0000644000175000017430000000056112003275573013732 00000000000000The script makes a backup of user's $HOME, either full of incremental, not compressing compressed files & compressed media and skipping some unimportant directories such as ~/Trash. The darrc is to be in /etc/darrc, dar_archiver.options is used by the script. The script has a part 'OPTIONS TO MODIFY' that shall be altered for customization. Regards, Jakub Holy dar-2.4.8/doc/samples/dar_par_test.duc0000755000175000017430000000236012010477201014571 00000000000000#!/bin/sh ### # # this script is to be launched on dar command line when testing an archive with -s option (slicing) # you need to run this script from dar, adding the following argument on command-line # # -E "dar_par_test.duc %p %b %n %e %c" # ### # # if you prefer you can also add the line above in your the $HOME/.darrc file # under the test: conditional statement (see dar man page) # ### # # usage par_script slice.basename slice.number extension # generates a Parchive redundancy file from the slice file # ### if [ "$1" = "" -a "$2" = "" -a "$3" = "" -a "$4" = "" -a "$5" = "" ]; then echo "usage: $0 " echo "$0 tests and if necessary repairs the given slice using Parchive redundancy files" exit 1 fi if [ "$3" = "0" ]; then exit 0 fi PAR=par2 SLICE="$1/$2.$3.$4" if [ ! -r $SLICE ]; then echo "`basename $0`: Cannot find or read the slice $SLICE, skiping, Dar will ask user for it" exit 0; fi echo "$PAR verification slice $SLICE..." if ! $PAR v "$SLICE" ; then echo "trying to repair the slice..." if ! $PAR r "$SLICE" ; then echo "PAR repairation failed. (read-only filesystem ?)" exit 1 fi echo "verifying after reparation..." exec $PAR v "$SLICE" fi dar-2.4.8/doc/samples/cluster_digital_readme.txt0000644000175000017430000000057612003275573016677 00000000000000What follows is an extract from several email exchange with Roi Rodriguez. Denis. ---- "[the] remote copy feature needs to use a ssh authentication method which doesn't prompt for a password (in order to make it non-interactive, so you can run it from cron. It's no needed if someone plan to run it by hand, of course). I've added this comment at the beginning of the script." ----dar-2.4.8/doc/samples/dar_par.dcf0000644000175000017430000000121312010477201013504 00000000000000# configuration file for dar to have Parchive integrated # with DAR # to be passed to dar as argument of -B option # either directly on command line or throw $HOME/.darrc or /etc/darrc # files create: -E "SOMEPATH/dar_par_create.duc %p %b %N %e %c 2" # 2 stands for 2% of redundancy # adjust it to your needs test: -E "SOMEPATH/dar_par_test.duc %p %b %N %e %c" # note, that you may need to set the path to dar_par_test.duc # and dar_par_create.duc # fix from Sergey Feo default: -E "echo Warning: dar_par.dcf will not be used in this operation. Please review command line options. -c or -t should be used before -B ...dar_par.dcf" dar-2.4.8/doc/samples/Makefile.am0000644000175000017430000000231712010477201013460 00000000000000NO_EXE_SAMPLES = darrc_sample sample1.txt README automatic_backup.txt JH-readme.txt JH_dar_archiver.options JH_darrc cluster_digital_readme.txt index.html PN_backup-root.options PN_backup-storage.options Patrick_Nagel_Note.txt EXE_SAMPLES = cdbackup.sh pause_every_n_slice.duc automatic_backup dar_backup dar_rqck.bash JH-dar-make_user_backup.sh cluster_digital_backups.sh dar_par_create.duc dar_par_test.duc MyBackup.sh.tar.gz PN_backup-root.sh PN_backup-storage.sh PN_ftpbackup.sh dar_backups.sh available_space.duc dist_noinst_DATA = $(NO_EXE_SAMPLES) $(EXE_SAMPLES) dar_par.dcf etc_darrc install-data-hook: $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/samples sed -e "s%SOMEPATH%$(pkgdatadir)/samples%g" dar_par.dcf > $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf chmod 0644 $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf $(INSTALL) -m 0644 $(NO_EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -m 0755 $(EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -d $(DESTDIR)$(sysconfdir) sed -e "s%SOMEPATH%$(pkgdatadir)/samples%g" etc_darrc > $(DESTDIR)$(sysconfdir)/darrc uninstall-local: rm -rf $(DESTDIR)$(pkgdatadir)/samples # $(sysconfdir)/darrc not removed as it may contain system admin specific configuration dar-2.4.8/doc/samples/dar_backups.sh0000644000175000017430000001054412003275573014251 00000000000000#!/bin/bash # Script Name: dar_backups.sh # Author: Roi Rodriguez Mendez & Mauro Silvosa Rivera (Cluster Digital S.L.) # Fixes by: Jason Lewis - jason at NO dickson SPAM dot st # Description: dar_backups.sh is a script to be runned from cron which # backups data and stores it locally and optionally remote using scp. # It decides between doing a master or an incremental backup based # on the existance or not of a master one for the actual month. # Revision History: # 23.06.2008 - modified to work with latest version of dar which requires -g before each path to backup - Jason Lewis # 24.10.2006 - changed script to do differential backups based on the last diff # 18.10.2006 - added BACKUP_PATHS variable to simplify adding new paths # Jason Lewis jason@NOdicksonSPAM.st # 22.08.2005 - Creation # Base directory where backups are to be stored BASE_BAK_DIR=/backup # base directory for files to backup. all paths for backing up are listed relative to this path ROOT_DIR=/ # Paths to backup # add paths here, in a space seperated list between round brackets. # you can escape out spaces with \ or '' # Paths should be relative to ROOT_DIR #BACKUP_PATH=(my/first/path another\ path/with\ spaces 'yet another/path/with/spaces') BACKUP_PATHS=( home user/lib/cgi-bin var/www/cgi-bin var/lib/cvs var/lib/svn var/lib/accounting mysql_backup usr/local/bin etc ) # Directory where backups for the actual month are stored (path relative to # $BASE_BAK_DIR) MONTHLY_BAK_DIR=`date -I | awk -F "-" '{ print $1"-"$2 }'` # Variable de comprobacion de fecha CURRENT_MONTH=$MONTHLY_BAK_DIR # Name and path for the backup file. SLICE_NAME=${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/backup_`date -I` # Max backup file size SLICE_SIZE=200M # Remote backup settings REMOTE_BAK="false" REMOTE_HOST="example.com" REMOTE_USR="bakusr" REMOTE_BASE_DIR="/var/BACKUP/example.com/data" REMOTE_MONTHLY_DIR=$MONTHLY_BAK_DIR REMOTE_DIR=${REMOTE_BASE_DIR}/${REMOTE_MONTHLY_DIR} ######################################################## # you shouldn't need to edit anything below this line # # STR='a,b,c'; paths=(${STR//,/ }); TEST=`echo ${paths[@]//#/-g }`;echo $TEST # args=(); for x in "${paths[@]}"; do args+=(-g "$x"); done; program "${args[@]}" #BACKUP_PATHS_STRING=`echo ${BACKUP_PATHS[@]//#/-g }` args=() for x in "${BACKUP_PATHS[@]}"; do args+=(-g "$x"); done; BACKUP_PATHS_STRING="${args[@]}" echo backup path string is "$BACKUP_PATHS_STRING" ## FUNCTIONS' DEFINITION # Function which creates a master backup. It gets "true" as a parameter # if the monthly directory has to be created. function master_bak () { if [ "$1" == "true" ] then mkdir -p ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} fi /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R $ROOT_DIR \ $BACKUP_PATHS_STRING -c ${SLICE_NAME}_master #> /dev/null if [ "$REMOTE_BAK" == "true" ] then /usr/bin/ssh ${REMOTE_USR}@${REMOTE_HOST} "if [ ! -d ${REMOTE_DIR} ]; then mkdir -p $REMOTE_DIR; fi" for i in `ls ${SLICE_NAME}_master*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } # Makes the incremental backups function diff_bak () { MASTER=$1 /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R $ROOT_DIR \ $BACKUP_PATHS_STRING -c ${SLICE_NAME}_diff \ -A $MASTER #> /dev/null if [ "$REMOTE_BAK" == "true" ] then for i in `ls ${SLICE_NAME}_diff*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } ## MAIN FLUX # Set appropriate umask value umask 027 # Check for existing monthly backups directory if [ ! -d ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} ] then # If not, tell master_bak() to mkdir it master_bak "true" else # Else: # MASTER not void if a master backup exists # original line to get the master backup does not take into account the diffs # MASTER=`ls ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/*_master*.dar | tail -n 1 | awk -F "." '{ print $1 }'` # new master line gets the latest dar backup and uses that to make the diff MASTER=`ls -t ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/*.dar | head -n 1 | awk -F "." '{ print $1 }'` # Check if a master backup already exists. if [ "${MASTER}" != "" ] then # If it exists, it's needed to make a differential one diff_bak $MASTER else # Else, do the master backup master_bak "false" fi fi dar-2.4.8/doc/samples/JH_darrc0000644000175000017430000000271712003275573013041 00000000000000 ######################### # # # Shared options pro # # DAR archiver # # # ######################### # Jakub Holy, 25.4.2005 # This file: /etc/darrc # Note: If the user has her $HOME/.darrc, this file is ignored => it must be included with the option -B /etc/darrc to dar ### ### Files that shall not be compressed (because they're already) ### # archives (Note: .tar is archive, but not compressed => do compress it). -Z "*.bz2" -Z "*.deb" -Z "*.gz" -Z "*.Z" -Z "*.zip" -Z "*.rar" -Z "*.tbz2" -Z "*.tgz" -Z "*.jar" -Z "*.ear" -Z "*.war" -Z "*.BZ2" -Z "*.DEB" -Z "*.GZ" -Z "*.Z" -Z "*.ZIP" -Z "*.RAR" -Z "*.TBZ2" -Z "*.TGZ" -Z "*.JAR" -Z "*.EAR" -Z "*.WAR" # media - images -Z "*.gif" -Z "*.jpeg" -Z "*.jpg" -Z "*.png" -Z "*.GIF" -Z "*.PNG" -Z "*.JPEG" -Z "*.JPG" # media - audio -Z "*.ogg" -Z "*.mp3" -Z "*.OGG" -Z "*.MP3" # media - video -Z "*.avi" -Z "*.mov" -Z "*.mp4" -Z "*.mpg" -Z "*.AVI" -Z "*.MOV" -Z "*.MP4" -Z "*.MPG" # documents - compressed formats -Z "*.pdf" -Z "*.swf" -Z "*.sxw" -Z "*.PDF" -Z "*.SWF" -Z "*.SXW" # strange formats, binaries and other hard to compress (empirical) -Z "*.gpg" -Z "*.rnd" -Z "*.scm" -Z "*.svgz" -Z "*.vlt" -Z "*.zargo" -Z "*.wings" -Z "*.xpi" -Z "*.chm" -Z "*.GPG" -Z "*.RND" -Z "*.SCM" -Z "*.SVGZ" -Z "*.VLT" -Z "*.ZARGO" -Z "*.WINGS" -Z "*.XPI" -Z "*.CHM" -Z "*.obj" -Z "*.tif" -Z "*.tiff" -Z "*.OBJ" -Z "*.TIF" -Z "*.TIFF" ### ### Ignored files ### -X "*~" -X "*.o" # *~ are backups, *.o are compiled unlinked files dar-2.4.8/doc/samples/PN_backup-storage.sh0000644000175000017430000000015512003275573015274 00000000000000#!/bin/bash dar -c "/mnt/storage/backup/storage_$(date +%Y-%m-%d-%H%M%S)" -B "/root/backup-storage.options" dar-2.4.8/doc/samples/sample1.txt0000644000175000017430000000445512003275573013546 00000000000000#Preface #-------- # #Here follows a sample batch file submited by Henrik Ingo (Thanks Henrik ;-) ). #It is complete for backup but does not use conditional syntax. Over comments #(lines staring with #) all commands can also take place on the command-line. #Thus, this is a nice way to discover DAR's features. # # Denis Corbin ########################################################################### #Execution file for dar (Disc Archiver) #Sipmly use 'dar -B thisfile' to backup #This backs up my home machine #Where to place the backup (somewhere with lots of space) --create /mnt/win_d/darbackups/my_backup #General settings #size of an archive (one slice). 650M fits nicely on CD-R (and RW?) -s 650M #compress using bzip -y #verbose -v #Files not to compress -Z "*.mp3" -Z "*.avi" -Z "*.mpg" -Z "*.mpeg" -Z "*.divx" -Z "*.rm" -Z "*.wmv" -Z "*.wma" -Z "*.asf" -Z "*.ra" -Z "*.gif" -Z "*.jpg" -Z "*.jpeg" -Z "*.png" -Z "*.zip" -Z "*.tgz" -Z "*.gzip" -Z "*.bzip" -Z "*.bzip2" -Z "*.rar" -Z "*.Z" #Define directories to be backed up #First give a root --fs-root / #Then list directories to back up (relative to fs-root) #If none are given, everything under root is backed up #If something is specified, only those are backed up #just/give/path/like/this #Exclude directories/files with the --prune option #--prune not/this -g etc -g var/lib --prune var/lib/rpm --prune var/lib/urpmi var/local var/www var/ftp usr/local -g root --prune root/RPMS --prune root/tmp --prune root/kino --prune root/Desktop/Trash --prune root/Desktop/Roskakori --prune root/.Trash -g home/hingo --prune home/hingo/tmp --prune home/hingo/RPMS --prune home/hingo/kino --prune home/hingo/Desktop/Trash --prune home/hingo/.Trash --prune home/hingo/nobackup #Be sure to add quotes around tricky paths, or why not all paths... "mnt/win_d/My Documents/" -g mnt/win_d/text/ #End of file #Use something like this to restore everything: # dar -x /mnt/win_d/darbackups/SIMSON_backup -R / #something like this to restore something (etc-subtree): # dar -x /mnt/win_d/darbackups/SIMSON_backup -R / etc #And something like this to retrieve a single file to temp # dar -x /mnt/win_d/darbackups/SIMSON_backup -R /tmp/ etc/httpd/conf/httpd2.conf --flat #Really looking forward to having ark support for dar! dar-2.4.8/doc/samples/available_space.duc0000755000175000017430000000171712010477201015222 00000000000000#!/bin/sh if [ -z "$1" -o -z "$2" -o -z "$3" -o -z "$4" -o -z "$5" -o -z "$6" -o -z "$7" ]; then echo "This script is expected to be run from dar this way:" echo "dar ... -E \"$0 %p %b %n %e %c \" ..." echo "where %p %b ... %c are to be used verbatim, while is to be" echo "replaced by the path of the mounted filesystem to monitor" echo "and by the minimum space required to store a full slice" exit 1 fi SLICE_PATH="$1" SLICE_BASENAME="$2" SLICE_NUMBER="$3" SLICE_EXTENSION="$4" DAR_CONTEXT="$5" MOUNT_POINT="$6" SLICE_SIZE="$7" FREE=`df $MOUNT_POINT | grep '/' | sed -re 's/.*[ ]+([0-9]+)[ ]+[0-9]+%.*/\1/'` while [ $FREE -le $SLICE_SIZE ]; do FREE=`df $MOUNT_POINT | grep '/' | sed -re 's/.*[ ]+([0-9]+)[ ]+[0-9]+%.*/\1/'` echo Free space on $MOUNT_POINT is $FREE KB echo "Waiting for disk change... Press enter when ready to continue!" read i done echo "Continuing with slice $SLICE_NUMBER" dar-2.4.8/doc/samples/README0000644000175000017430000000011012003275573012303 00000000000000Please point your web browser to the index.html page of this directory. dar-2.4.8/doc/samples/JH_dar_archiver.options0000644000175000017430000000201212003275573016055 00000000000000################################# # # # DAR Archiver - options # # # ################################# # -m N - do not compress files smaller then N [B] # -Z pattern - matching files are not compressed # -P subdir - ignore (don't backup) directories matching the pattern; relative to -R # -X pattern - exclude files matching pattern; it may not include a file path, only the name # -R /home/aja - the directory to backup # -s 700M - cut the archive into 'slices' (parts) of max. size 700 MB # -y [level] - compress with bzip2 # -G - generuj zvlast katalog archivu # -D,--empty-dir - vtvor prazdne adresare pro ty excludovane (s -P) # -M - skip other filesystems (tj. namountovane FS). # -v - verbose output # --beep - pipni kdyz je pozadovana uzivatelova akce # !!! The option -c , has to be on the cmd line # !!! The option -R as well ## Obecne volby -s 700M -m 256 -y -M -v --empty-dir --beep ## Preskocene adresare -P .java/deployment -P .netbeans/var -P Trash dar-2.4.8/doc/samples/PN_backup-root.options0000644000175000017430000000117412003275573015676 00000000000000### Options that are appended to the dar command: # No warning when not run from a terminal -Q # Don't try to read darrc files -N # Be verbose (so everything can be logged) -v # No warn on overwrite (should not happen anyway) -w # Compression level -z1 # Keep empty directories as such, so they can be restored -D # Blowfish encryption -K bf:secretpassword # Directory to backup -R "/" # Excludes (must be specified as relative paths to the directory # that is to be backed up) -P "mnt/loop" -P "mnt/storage" -P "mnt/tmp" -P "mnt/backupftp" -P "dev/pts" -P "proc" -P "sys" -P "tmp" -P "var/tmp" -P "usr/tmp" -P "usr/portage/distfiles" dar-2.4.8/doc/from_sources.html0000644000175000017430000006701012010477201013355 00000000000000 Dar's Documentation - Compiling From Source
Dar Documentation


HOW TO COMPILE DAR AND LIBDAR





1 - Requirements


To compile dar from a source package (from GIT see below for additional steps) you need at least:
  1. a C++ compiler (tested done with gcc-4.3.x, but other version and compiler should work. Note:  gcc-3.3.x has a bug that avoids it to compile dar starting dar-2.3.7, more precisely generated objects miss some symbols and cannot be linked together [SF 1890720]). To compile dar-2.3.8 and above you need at least gcc-3.4.x
  2. a linker like "ld" the GNU Linker
  3. make (tested with gnu make)

In option you may also have installed the following tools and libraries:
  • libz library for gzip compression support
  • libbzip2 library for bzip2 compression support
  • liblzo2 library for lzo compression support
  • gnu Getopt support (Linux has it for all distro thanks to its glibc, this is not true for FreeBSD for example)
  • libgcryt version 1.4.0 or greater for symetric strong encryption (blowfish, aes, etc.) and hash (sha1, md5) support
  • doxygen for generation of source code documentation
  • upx to generate dar_suite upx compressed binaries
  • groff to generate html version of man pages

The consequences of building dar without these optional tools are the following:
  • If you lack libz library, dar will compile but will not be able to compress or uncompress an archive using the gzip algorithm
  • If you lack libbzip2 library dar will compile but will not be able to compress or uncompress an archive using the bzip2 algorithm
  • If you lack liblzo2 library dar will compile but will not be able to compress or uncompress an archive using the lzo algorithm
  • If you lack libgcrypt dar will still compile but you will not be able to use strong encryption nor hash file generation for each slice
  • If you lack Doxygen dar will still compile but you will not have the reference documentation for libdar after calling make
  • If you lack upx dar will still compile but the resulting binary will not be compressed after calling make install-strip
  • If you lack groff dar will not generate man pages in html format



2 - Compilation in brief


Once you have the minimum requirements, Dar has to be compiled from source code in the following way:

./configure [eventually with some options]
make
make install-strip

Important: due to a bug in the autoconf/libtool softwares used to build the configure script you must not have spaces in the name of the path where are extracted dar' sources. You can install dar binary anywhere you want, the problem  does not concern dar itself but the ./configure script used to build dar: To work properly it must not be ran from a path which has a space in it.
Important too: By default the configure script set optimization to -O2, depending on the compiler this may lead to problems in the resulting binary (or even in the compilation process), before reporting a bug try first to compile with less optimization:

CXXFLAGS=-O
export CXXFLAGS
make clean distclean
./configure [options...]
make
make install-strip

The configure script may receive several options, they are listed here.

Note for packagers that the DESTDIR variable may be set at installation time to install dar in another directory. This makes the creation of dar binary packages very easy. Here is an example

./configure --prefix=/usr [eventually with some options]
make
make DESTDIR=/some/where install-strip

This will install dar in /some/where/usr/{bin | lib | ...} directories. You can build a package from files under /some/where and install/remove the package at the root of your filesystem (thus here files will go in /usr/{bin | lib | ... }).



3 - Options for the configure script




Available options for the configure script

Optimization option:

 --enable-mode  --enable-mode=32 or --enable-mode=64
if set, replace "infinint" integers by 32 or 64 bits integers. This makes a faster executable and less fond of memory, but with several restrictions (about for example ability to handle large files, or high dates. See the limitations for more).

Deactivation options:

--disable-largefile Whatever your system is, dar will not be able to handle file of size larger than 4GB
--disable-ea-support
Whatever your system is, dar will not be able to save or restore Extended Attributes (see the Notes paragraphs I and V)

--disable-nodump-flag
Whatever your system is, dar will not be able to take care of the nodump-flag (thanks to the --nodump option)
--disable-dar-static
dar_static binary (statically linked version of dar) will not be built
--disable-special-alloc
dar uses a special allocation scheme by default (gather the many small allocations in big fewer ones), this improves dar's execution speed
--disable-upx
If upx is found in the PATH, binary are upx compressed at installation step. This can be disabled by this option, when upx is available and you don't want compressed binaries.
--disable-gnugetopt
 on non GNU systems (Solaris, etc.) configure looks for libgnugetopt to have the long options support thanks to the gnu getopt_long() call, this can be disabled.
--disable-thread-safe
 libdar may need POSIX mutex to be thread safe. If you don't want libdar relaying on POSIX mutex even if they are available, use this option. The resulting library may not be thread safe. But it will always be thread safe if you use --disable-special-alloc, and it will never be thread safe if --enable-test-memory is used.
--disable-libdl-linking
Ignore any libdl library and avoid linking with it
--disable-libz-linking
Disable linking to libz, thus -zgzip:* option (gzip compression) will not be available
--disable-libbz2-linking
Disable linking to libbz2, thus -zbzip2:* option (libbz2 compression) will not be available
--disable-liblzo2-linking
Disable linking to liblzo2, thus -zlzo:* option (lzo compression) will not be available
--disable-libgcrypt-linking
Disable linking with libgcrypt library. Strong encryption will not be available neither a hashing of generated slices.
--disable-build-html
Do not build API documentation reference with Doxygen (when it is available)
--disable-furtive-read
Do not try to detect whether the system does support furtive read mode. This will lead furtive read mode to stay disabled in any case.
 --disable-fast-dir
Disable optimization for large directories, doing so has a little positive impact on memory requirement but a huge drawback on execution time
--disable-execinfo
Disable reporting stack information on self diagnostic bugs even
Troubleshooting option:

--enable-os-bits
If set, dar uses the given argument (32 or 64) to determine which integer type to use. This much match your CPU register size. By default dar uses the system <stdint.h> file to determine the correct integer type to use


Debugging options:

--enable-examples
If set, example programs based on infinint will also be built
--enable-debug
If set, use debug compilation option, and if possible statically link binaries
--enable-pedantic
If set, transmits the -pedantic option to the compiler
--enable-build-usage
If set, rebuild usage files (requires libxml2)
--enable-profiling
Enable executable profiling
--enable-debug-memory
If set, logs all memory allocations and releases to /tmp/dar_debug_mem_allocation.txt  . The resulting executable is expected to be very slow


4 - GIT


Presentation

To manage its source code versions DAR uses GIT (it used CVS up to Q1 2012).

Dar's repository Organization

GIT (more than CVS) eases the use of branches. In dar repository, there are thus a lot of them: the first and main one is called "master". It contains current development and most probably unstable code. There are other permanent branches that hold stable code. They are all named by "branch_A.B.x" where A and B are the numbers corresponding to a released versions family. For example, "branch_2.4.x" holds the stable code for releases 2.4.0, 2.4.1, 2.4.2 and so on. It also holds pending fixes for the next release on that branch you might be interested in.

The global organisation of the repository is thus the following:
 
 
 (HEAD of "master" branch)
    new feature 101
          |
          ^
          |
    new feature 100
          |
          ^
          |
    new feature 99
          |
          +--->-- fix 901 ->- fix 902 (release 2.4.1) ->- fix 903 ->- fix 904 (release 2.4.2) ->- fix 905 (HEAD of branch_2.4.x)
          |
    new feature 98
          |
          ^
          |
    new feature 97
          |
          +--->-- fix 801 ->- fix 802 (release 2.3.1) (also HEAD of branch_2.3.x as no pending fix is waiting for release)
          |
         ...
          |
          ^
          |
    initial version
         ____
                 

Usage

To get dar source code from GIT you have first to clone the repository hosted at sourceforge:

git clone git://dar.git.sourceforge.net/gitroot/dar/dar
cd dar

You will probably not want to use current development code so you have to change from the branch master to the branch "branch_A.B.x" of you choice:

git checkout branch_2.4.x

That's all. You now have the most recent stable code (for branch_2.4.x in this example). To see what changes have been brought since the last release, use the following command:

git log
 
If you plan to keep the repository you've cloned, updating the change is as easy as using (no need to clone the repository from scratch again): 

git pull origin branch_2.4.x

Having the sources ready for compilation

Please read the fiile named "USING_SOURCE_FROM_GIT" located at the root of the directory tree you retrieved through GIT, it contains up to date information about the required tools and how to generate the configuration file. Then you can proceed to source compilation as done with regular source package.



5 - EA Support & Compilation Problems

[this paragraph should no more concern anyone today, it will be removed at next major release (release 2.5.0). However, if you have found it of some use, please send a mail to the maintainer]

If you just want to compile DAR with EA support available, you just need the attr-x.x.x.src.tar.gz package to have the libattr library and header files installed. If you want to use EA, then you need to have EA support in your kernel.

[What follows in this chapter is becoming obsolete, you may skip it as today EA support is available in standard in at least Linux kernels]

I personally got some problem to compile dar with EA support, due to EA package installation problem:

when installing EA package, the /usr/include/attr directory is not created nor the xattr.h file put in it. To solve this problem, create it manually, and copy the xattr.h (and also attributes.h even if it is not required by dar) to it, giving it proper permission (world readable). These include files can be found in the "include" subdir of the xattr package: as root type the following replacing <package> by the path where your package has been compiled:

cd /usr/include
mkdir attr
chmod 0755 attr
cp <package>/include/xattr.h <package>/include/attributes.h attr
cd attr
chmod 0644 *


The second problem is while linking, the static library version does not exist. You can built it using the following command (after package compilation):

as previously as root type:

chdir <package>/libattr
ar r libattr.a syscalls.o libattr.o
mv libattr.a /usr/lib
chmod 0644 /usr/lib/libattr.a


dar should now be able to compile with support for EA activated.

6 - Related Softwares




dar-2.4.8/doc/Makefile.am0000644000175000017430000000224612010477201012015 00000000000000SUBDIRS = samples mini-howto man dist_noinst_DATA = COMMAND_LINE Doxyfile dist_pkgdata_DATA = README Features.html Limitations.html Notes.html Tutorial.html Good_Backup_Practice.html FAQ.html api_tutorial.html dar_doc.jpg dar_s_doc.jpg index.html dar-catalog.dtd Known_Bugs.html authentification.html dar_key.txt from_sources.html api_4_4.html downloading.html presentation.html usage_notes.html INSTALL_HTML_MAN = $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/man ; $(INSTALL) -m 0644 man/*.html $(DESTDIR)$(pkgdatadir)/man UNINSTALL_HTML_MAN = rm -rf $(DESTDIR)$(pkgdatadir)/man if USE_DOXYGEN DOXYGEN = @DOXYGEN_PROG@ all-local: Doxyfile.tmp Doxyfile.tmp: sed -e "s%##VERSION##%@PACKAGE_VERSION@%g" Doxyfile > Doxyfile.tmp cd .. ; $(DOXYGEN) doc/Doxyfile.tmp if [ -d html/search ]; then chmod u+x html/search ; fi clean-local: rm -rf html Doxyfile.tmp install-data-hook: cp -dR --preserve=mode html $(DESTDIR)$(pkgdatadir) $(INSTALL_HTML_MAN) uninstall-hook: rm -rf $(DESTDIR)$(pkgdatadir)/html $(UNINSTALL_HTML_MAN) rmdir $(DESTDIR)$(pkgdatadir) || true else all-local: clean-local: install-data-hook: $(INSTALL_HTML_MAN) uninstall-hook: $(UNINSTALL_HTML_MAN) endif dar-2.4.8/doc/dar_doc.jpg0000644000175000017430000002055012003275573012066 00000000000000ÿØÿàJFIFHHÿÛC  !"$"$ÿÛCÿÂð@"ÿÄÿÄÿÚ ý(ðõ•Œ}o¿­ÖãŽøcküOY<ó‹’A ÷›¿÷}t89ø¿µù£¾Ô´ÊoÚÉ—8ý óÑmäÀƒ½ŽUÖÊ·álïäþß3e_÷ßœïúT}ËÉ9Îó*üyëAÐÂ/,¢dÆwžÕ¤zšyxùÿ©‡ªÖlËV® µ_¨ts´¥\pqòšü©§›«Çúß~on6NƒÊuZA>/ Ú¯==>|UÎö~_èçùŸ¤¢Î»†¦mºþå^ÍêYãŸ—Óæ}¬ ÌÞ„W(Û«\”íÒñéP³f:ÿ#õ8{||šº”}_;rKõì’¯™þ³så¾§¨õ€žÂ*“a{-W˜¼õ|»[™ø½²aÌåÛŸK±D!üïc'e?Iö_övÙÖ˧R¥šÔú0Ù£jÿ+Î=¯Þræ )—6¤×Óbþ}Õ­™¶ƒ ´°ëñêüÆÎo¹óµº—rÎÖhgaßðz{Ed!I‘‡Uö,ŸºqINYbê(Nžo³Ã|»;WùçÍ“UlÞ’ìù4¾–…Wæ{rÏ.—C.Õm<÷7¤€ <ù¾ŸÌÁÔýKwF7Å*Ü´qÍŽaUEç;™-Éc¯Ž-Ô§W¶³4/Á^Õ+4ß^ì[æÕ̹›/WbÖn¥9yóÕ˜)Ë[#ž¹ ä¸v<{ÓµðìàW°zr>ºw•éê'VÛéC3ÍDè§4žÕ¢œñÍv:7iÙ³4±É™ ´<›6hçhÅ<Þó^Æ"¥è;æ›(fóþ˜#0qÙÏœ¥k´êÉÑ•Ö6É3ÿÄ*!1 "02P#@A`3ÿÚÿ¤ÏéIÀ¿T©ˆE×F°§#ôZëö-ö–h˜›ö·¿rþ…¼qGùrUl6g |2øýx×!kŸáE_ȵV[XSÃοÇôÆ´wL² 騏ütù)9¯ô:ÑßJ;FœAIm9Úú+w×ú`éÔãå*§3ˆé¾7e†^Â&Yv™´ÃÛû˜› šÚÝ.ÒÓüb¡•\MB‡M&ŸOÑ «‘2ÇŠ;rÏß¼N¤êMðû–j«•þ<­ü8cÓ™ªfZiÕj5¯û7W­±%V¥Õ“3ˆŸ¶Ó‚¨H®­c€¶g˜—ZµV‘äPØçª8§† ´™aŠ›æ&y<7r–|Bð[‡úÏ`f©²q+$>#~' %‘X'¿3…Ü,Ѭ°bÅ9UÝ4½£óQU…Ú—éß5iбcâÆT¡íúÜË›ee²s*îêLÕÙŠ…ÄD=G¯²êmZjwg~ Çbgt¡¹_)üŒ0‚U-µ#ZbUm§MBÒ>¦8[fçå¥í‰Þ¾£¨'§j Æ5îEœ+ãjò_‹Jÿôåm‹XëW-¸m¥âŒÊº*1õ±ÉÔ?N¢y ÜÁp.³£KuKpÝ5—\ªË£ µz¥:jÒ/+N%ºÍ5Û¥î·32÷]Ÿ'&Éx€w–:5*ÿSžÓVw¹Hµ–4éÅk5Uõ£ÑRÓUޱ‰•)Ípw õ zhÑbղåǺ&W`›DÔR€TªÃ¢³¤±ß§9'ǧÓ,(%–µB#ªcX ÚÄ¢ÃXˆ‘VZåJ·&øYþY?Ë{¦”æž}=§šö€û­<ûÎð¢Æ{"iÌéâm›¢c0'-Pø«b!ÈÖӨ/‹üÛ8qÊcØG13ö”dÛ1109Øøm±z¬Á.xµ"û-JW¨â0ÊéÆ+³ñOÇQÇ8»–9ØvÁìéfÚP ýJW€Ç 1-BÁ©yUV%ÞÛ°¬:žXä ÜÇŠ“ågTØîûik0öm^³dZIf ÁÖ ϰ9«úv‰¶bbbcÝp.­ª7EŸCaW%e§ùœ¬¹Æi;…•™`~“«ôÓ+WWtOäÓföºÝ‡OÔõ p£0ºˆ?ÑÀŽŠð(«É³JÄz[„¢»-G7N¶¾Êi;«6a•²®äðDQ^ëk[.ÀÞ,©£"7p•÷cû,ªÐvB9T ƒNÁ«B¥¼)´A5vtØwR1˜ƒÅgç`øÔr¿ eV`K4ᢌ-´Z/±w \.ÿ¿ÿÄ3 !1A"2 Q$@Baq0CRS`±áðÿÚ?þ…&GÜëa¤eÇO @…ËçÄ+ø¿br”;Á˜Ÿ¬žÈò΂UÚõñ©ôÎ!Lop•â˹ö™GÕÝ"\ÄŸLKCk¸|ÙïÙ]ƒ¼vIÒ±,Þ9ľêéÄjGª1ÇÌUõºëCÚ2ÛjLCw(Æ—æ.¤Ec\þ–ïMZ‡y‹] ü·°¥w”órlfØ…ì^¦÷ÆÚbän_é<ÍŒ™gT™}¤½àµ™ÈJcéÎÑ+¥ºŸ/ÈýØf¦*»ŒkÅD®F±T,æc©AÚWJôÛQï7İstÜyË?NQ`úm@¡Šâ4'K@uéâö…St¾¶$™ðÔZÝ+*†i¾â VyÜùE­íè áæµÝcÊ`Svêý`YSl DÈÃ`&Àr'™¯_ lÛ¤éÚ)ÕhÖ¾ UYÊÂÃ+øer±ÁÛS21ÃÆÒ`áËxÊ_´x¨Ò;ö<¦;TÏPå8†)±ßA¶Ts>ØÜe7Ü#xGòÌbÞ‹ézòðK죭f6e\??ÛÚ©;zÏ%´ ‘(ÇÖÛ,O9\¶ŽU´øì€}Ò¼Aæûôôý"3rmp.10rÓmã”Gó[åšé1¸‘cM-_ÓýËfý +þþÑÙ¨M¶^Úg.¼É”½n5©×Àã(ö‡ '´*ÇɤÙ]uÒ e‚[VíÕ@9\jÖâðÒ' ª½½\ŒÆÂjZ-¤ÈóPýô® é8‚…m_¬ÏH«CÈÔO4²šŸâ,ÛméÞ.ûê-÷PF†Wtè!ÇY®Ý%((4xÿÄ)1! @AQ"#20BaÿÚ?÷Øö§Ù5ȼű[g¨ÓØ^itÅ<ž˜›LØßáì¿Ôì?×ULͳ`Ê4í`È•èØË(zùXnc©SƒêÓ¦NLJAeÕ솰ñôÄJiÞÁe‹´â8ùŠ2cÞii"-ˆ2íBºí²öÜ} dâiéÉ 6ÍA•¶\ÀøIVÕÏÜÕ¦Û ÆW¿íy€L™¸üK,a ϧNŸí4À*Ën"Ç̹•GˆÍÇÇ1Én`c¦Ë%€LcÌýc©³1ôçãÑZolM¸âîZ¬~ ÂgˆÖ¢Á~æÄq.à4È*!þ°X8è:YPhÊWžŠÅxçû×?3µc Ä}‡3òäkYºƒ˜oS,±JàJlm0:‘Ì·˜®Wˆº¸Œ¬¾FPÜË*+Ö yŠêÙÄÚ¬¹Äì«y"5 õ?vÆ©1àËkUÊ6 æ=\ˆ)þ=Ñjf&G=;;­;­éÜq‰Ý¹ßxÚ¦c ™\G²²ܶÄdÆec¸˜Ì çõ2‡ñ¶mÚs8hË´ãÙ5¬ÜÎëg1›qϸÿÄ2!1 "0AQq2@aPp‘¡#BR`bráÿÚ?ü‹µ/ýë*ô·ÑcõR®®h>(}êB‚€ÙL wZ „ÿªáhë€HÆáFª,õE”»iû/YPÇüC­kaµ.£› M,i#t½ØêaÈ©D|Ô·²ÒÕŽÀŒ(Å;M±W_ 5ä­Ÿp"Õ„jt´Çuf«ÙIá ú÷Vj“d/Í%M#ºÊ¥B/=s²S™ó4;ŸmÛ f|+Y`ù+¹êyºFuhËMnõ:Ÿ€ ›Ÿ4ò7¬¸¯XVº¹²ÂôU¹¤õéXPŸú"âd•¨ŽÖeÄŸ•ÿP _ÍuT ™Xpû)¹òT鬂¡Ö<Èè)IWÍŽ¢`䫬 Fà3Y¸QËÊʱ4ÒÙ^•1L,.«­¬÷Þ–Ç.7nV–4¸·VµdtCt·Ùåb¶cÙ[ge‡9q»OÀ]üî¿ÄP…;±í@ŒòàoYa§{„¨#šâ¥Ùì¬ô4Ÿ*I‘OB<Ë{MP§¢sˆ¹R<ÐîËfϺøRWÙ|!¤À›¨ÙååÅ•© š舕 g€©Q¹V>ˉYL®§taâ:)Ú.ÈÏTÆL­N R‰¸R¸H¬€%C‚Ò0(4:+ñð¯îx€4›ÝiV}–d+eq0Í‹æåtGÐáÀ™… ¯ÙuBÍ 1ãòÿÄ+!1AQa q0P‘¡@±ÁÑá`pðÿÚ?!ÿ‘Ù)Üû(XÎCÞfå˜öP‹pî±ÙÓ˜¿2üÌá™Wn~Ŷ&=˜ÂÖ¦P•9Ëí4ýˆqY ïÕn%¨¦!‹©pÒæ¼È¿aÍJÛÄ{DX€k˜T%ñ=ˆÕOk.bËV„™«ÍäŠIZþÒ㲇Pö"%Àò|3F[/3W˜‚¤‚M®‹Á-ƒË¡D¥Ôj˜\ÃéF"DPͨ»„P}‚¢Œ"Ô¢Îã,€úËEôéÚ{'½Yè a¨áEW¾ÐÑ1]‚쨩Áûª bŸ7ü‘Ö½ÊG8¦µý ÉAq;3Ø„í¾>­8NÄó4C)A`âéèÈ‹ÑÛ*M !J¹Á+®3´Ÿ"£›§àå†lcy® ‡)Aüá„&þ¢É2naz%8™˜c+ˆÖ,&ã-™všDÌÎãý2õä~"•O31ÐÉ«àé]™88C2\N?ÊUÞrÊh—µ¸¡±[—xó]sõ.j;â"'q7¿M^çG¤–6óüÌ}­íž2ý‘¨žäfZWM#§ £HÍÌ€ ¦ØÍBœOÙ˜EL0¿T íþYqfîL¨«X5óæsÁBÂüà]@|dGfQÅÇpŠŸ€‹[P]`¾¨áìžþl{Âü„o)^>¬|"„UvËŠF,ì>LɵèEîöèÙÖåšÊ•ç.iNm0ƒ «Ú]„g‘›[1‡ T+ØRÃä ~ Õj£Áp^Q5k¹Jš¿Ô­ÜNH¾cE¿’ Ù.Ph©ÜÍ/tfø^'“ŽaWðG®…íƒÞ-ƒ" Ø% 4†Öøí~SdÃ.öLOl}†´ÞɃæ¥öJWœgÌÅSPi0Ë)ƒOýÿÚ óÏ<óÏ<óÏ<óÏ<óÏ<óÏ<óÏ<⯼óÏ<óÏ<óÏ<óÏ<óæœóÏ<óÏ<óÏ<óÏPS±Üb”Ÿ8#Tó‚%Ÿšlߦ¾p[WØ™c‹ÒH±R°ÒæõUò“£Â(¾¹hXéÃþëòz?–Pϲ ÐE~dFÞ="gÞ^ËYR¼“©Î!;Ä»?~ìr¢P`*îs-Jí}Ä„Ƶø¨ÐMÅ:™La¼ÆK*Tk˜eˆf~ÄÄ»™`:Mã–#2ʶìžíÀ~7=<¿P+à„w¨<¨úŠf ;¥Ù3M‘ñ¨;©LRžÂXÿp¬vª^ð¨eƒï~,yGõÛGYnÒ[ Á2V"°’ÙñY€ö; Ë»&ñP[æõ,LôK-}–‚m½OÜ̓Æß{€È_C €Î9ï€Pã»>pÜ=„Ë™bÌ·TÔ?qpýÁ¶•ƒƽtù·à¿gÔzûéÙæ|Ç–ZUvª]¬¨SØNܱµ­c=:÷ùÀDiŽ ‰§dSƒæ0Õ‰Wv]x:ÔZ*ÞN¥b£lk¦Ç]N"2¬5ŽWË©I0i±-âÚ{ q'ø1ãKL’‡Q³Ò ”ÍÉ›iNŠ9g¾±2;ÃS/'‰ÓØD Ç›ŠñC²®’³M³UºÖ`G’ÏÄã˜8,óëïß- ‚ ú%é¡6qgSÄø€g'ðpÆEcÄzþ}÷Å/néÄoäÿÄ%!1A@Q qa‘±Áá0ÿÚ?ïj}‘à×d#´¦õv)c»_ •-ÑÙþpHD¸Diéºê4hˆwX‘ ML Ç“P*µž>%ÿyt $O•ιö޾ ûéRLèH4?©ž÷Œ2Üâ+=.C™PhÜU|A‹7à&g¿Â{ã0(¦mÆZ: ±Dˆ­ôÖ[âX³fË2v9•÷k-ŽÞ|±kwÉhH9^ËpîâÇ0EÊ#OT‡BR×XAj€fÓ‰‡‰„wl磬Oi™æ)] [®™yE©¨\¨Ù?nŒ‰“XÍ"Ÿ‚Ui¬Ù'ƒ-Ù%Àê¾:‹ñˆ£í/ÄýIJsæÜ•/|~âm¢øÿ%éüFÖˆµÅ(Tº€s>¬¸¥ô NS'GÑn+¨ð†Ž‰õˆ”ÄAž"ìoó+wŒûÆwÇ`)¨(Ù1ï’=ÎãÿÄ,!1AQaq0‘¡±ÁÑð P@áñ`pÿÚ?ÿÑ¡´9‚é½¥dÔU¬ºWëJèÜá´j—Q²—™˜<ÏôgN-[ÙÏÒ,<œȵ%õœ`øÂøÅA&=ôÞúÿD©‘hä@§ô€ naŒÃ.B߈+'£õˆhoú.úÃ’¡ÈdøÎ6‡!V¯;•è[6‡î£ØV©òÜyžês=ñÕ.kH~bÈo?ЊIvu‰•kŠéˆ^¤¯{yßýLTXæ¾qÍ;9o¿ÂÈjRî¨31¬efÀ×'ô.£ê4Ñ“©ùŠK»S) q©’¦¨[¹°Ç«•½9®ÿ½`A¼]îß'ôTaªÅ©þ˜õºCáŸÞ%8R_(dlÌ…¤!®²ôªê©ÂÞ²ßÍ|eP¸)¸'#ÌëÓÖ9¢ˆk¿Êà DLñ1Qb÷ô˜6Á»ë/…䚟Ê9¬]L´à¥YÛYsh`øJn³œ” ¢õÊΰ:ñ”sK<Ç…F5ÃaÈBïcü1Æ¥[ù&.¦Dsð5;ó5Û̓±;K°¯¥Þ,G°z–'Î_€ú ›ï)H %êJ±Ü„2ùùKÊJ#„6' W zFÌ«I5‰=í+³Ì^”&ªñâñàÜ4â# ›Î%0©:b`øöp oˆ‘,î\@Sa/'gY‹öUèÐY–Óo£“ýJMË6‹¢z5Xköã16¬w]ÿÔ±3‰jl,ô`Uî//ëstÛ\†ûçÞ.&zrÁl»üÄÂ? ‘͘¼k–îd×g§˜ê†ºLüã¯(•h¦Â½À7¢vžc‡£êü"Rõç´Â#‰¡5§¿î!öVQs©à¸Ô„VEKС‡PžF ‰Fù+ò†6VŽ#ÝØÝß´:ì 1¨¬¨À‚#AËŠ÷c_Fãð¡Þ\V³/XFÞÔ0êƒÁ¸â7|2Îÿ"˜åóꂞ®órñVeÀye¬Í[ö•%•௷ÎËñ1aKí­ìoþJö­}It#¢?’öÔk¹ÆíÊ9ZØŽ=Wí/â0ºÄÍÔE6¦X|އ»¹yuÛeŒ s "Ô&ØžFe’Ñ_/Øô˜—ÆYLF–xYCÇ2&¦J0_cæ½% ·ëŽ™šÎÏ¥J=™¥ÀÀ½Aí5i}àÜÒ<ß·°H+=eèPµBÕÆÎJ±2f<Ë9¤çïîa¥¼x{ËÚÑ‚h"‡«»KX¨víËÐŒâŠ&Z®ˆì ¸Úæ¥J®¸‡~}!Bª®‘‚Ö±/öR¦ÕÄ"`´„ü‚O¼ÃW;¤”]Ÿ¤, *Û~ Èç]*ÝÀ䎘Ì[„%eát—‘¸°x¨üß´µ•UWè-ùI5^ð«Ô­E-J˜Ì:£’ðÀQéáK=Î(æ)Ì0xáß–hŠØ( Wðmàös™f“…ƒ£U Qj½L¬©“£”ê<£ãpõ.U -uš!ß)+ Æ©k õ— õ¿ì§HŸ=ârÓ+Rê_›Ì²@YAÚLj°ìyŠ* µ îñ0ˆ¸œô¿(jÿ™"h -½K P³¬âÐŒ¹jº¢ÔÊÞUK‹ ³}7¨ò£Xb°À`x™"áºkÖ:@íGÞX ìúKM›Æ\>ï*ÊUu‹€\ºÏÆ6Zo¬xndHg®ñlUÇJm¦b·”¸-zÁ¶Ó€ö½‰™ G˜¤ònºFQr°e±Òèÿ,6w€NX.©q nؽ²¡råðEëH°³ñÚ°·JŠBS“!‹;Ë Ã¼NE^®;<’·¤‚[prøHjŒ+Y8ñؽ#g#¹¢ Fye›ÚË©l“Ö¿µ,ÎI~a2&ìE]˜`á·ñVî[€»| þQâ/‹ï?â [”¶hö6Ó¬üËE‹V+-¶”B[‹tòqõìb•­ = çwÃ2’Í–»¿x…0¨1TeL“Ä} ¼GszæÙríå+¿±d‚Ùå•[¬•’É­žÁ­G Ïpòê„ÂZ;žèZ:!ÔÞ:ÂhWÌ󀥸䩕ÈÁÒcj%œâñ/–±…Ã_Á[!` ysˆ ¾’„³ˆô1(În ä ´±Ö@_0/åè>±Ð#`@å)©VñÞ8*«RŸ¼ÝX`zBW@UYzBô ›ªðBKAmÁu{JÎy=#X¡r»‚%žÕi½=Š“Ð›ªžáÁœWJİKò`ÃÒt\ˆ³0ȹ—wŸõ¯ÌJÀ[Yz¡ö”¸»SªëóÜRÙDî4ª‰WÏ‚U;zB`-ß”©ð( ˜ª›ºõ¶cL6À?DvDÑÍ}¥Ò{|Xnw “‰†|+0’ÐU|a%k%/r²ë˜¤89–qßw,l€­Ste°‡oðP±=ˆF¨lÍB(JÔwÔåÖ=#_y–1æ8 \½øërœ*Âö\<:9q}.<¶ãs€Ûëð%—˜´SSdëÛè¹MRƒ qù°5Ý]¼ÕvÍf(—tn [*оÌLÅZF.ªm´/W LŽfÓ 9=HÇ‹)£/Ä•'0˜a.³Ó¤6-ÃþBÈL¥°Búæ `PHv¨.«|ËÜè–³ ÛØüÊ:©¢äg‡Î€¶ëQÂÖ,tÀ6è e01™^Gå4+äI\Z^âé]ö,zP­« :›uæ\¼å+VþWô<Þœ¢A€Ð$O@Ý = ²Æ‰Œ‡dºsfµˆùt2½HZ;³üÿÙdar-2.4.8/doc/Good_Backup_Practice.html0000644000175000017430000004752412010477201014646 00000000000000 Good Backup Practice Short Guide
Dar Documentation


Good Backup Practice Short Guide





Presentation

This short guide is here to gather important (and somehow obvious) techniques about computer backups. It also explains the risks you take not following these principles. I thought this was obvious and well known by anyone, up to recently when I started getting feedback of people complaining about their lost data because of bad media or other reasons. To the question "have you tested your archive?", I was surprised to get the negative answers.

This guide is not especially linked to Disk ARchive (aka dar) no more than to any other tool, thus, you can take advantage of reading this document if you are not sure of your backup procedure, whatever is the backup software you use.

Notions

In the following we will speak about backup and archive:
  • by backup, is meant a copy of some data that remains in place in an operational system
  • by archive, is meant a copy of data that is removed afterward from an operational system. It stays available but is no more used frequently.
With the previous meaning of an archive you can also make a backup of an archive (for example a clone copy of your archive).

Archives

1. The first think to do just after making an archive is testing it on its definitive medium. There are several reasons that make this testing important:
  • any medium may have a surface error, which in some case cannot be detected at writing time.
  • the software you use may have bugs (also dar can, yes. ;-) ... ).
  • you may have done a wrong operation or missed an error message (no space left to write the whole archive ad so on), especially when using poorly written scripts.
Of course the archive testing must be done when the backup has been put on its definitive place (CD-R, floppy, tape, etc.), if you have to move it (copy to another media), then you need to test it again on the new medium. The testing operation, must read/test all the data, not just list the archive contents (-t option instead of -l option for dar). And of course the archive must have a minimum mechanism to detect errors (dar has one without compression, and two when using compression).

2. As a replacement for testing, a better operation is to compare the files in the archive with those on the original files on the disk (-d option for dar). This makes the same as testing archive readability and coherence, while also checking that the data is really identical whatever the corruption detection
mechanisms used are. This operation is not suited for a set of data that changes (like a active system backup), but is probably what you need when creating an archive.

3. Increasing the degree of security, the next thing to try is to restore the archive in a temporary place or better on another computer. This will let you check that from end to end, you have a good usable backup, on which you can rely. Once you have restored, you will need to compare the result, the diff command can help you here, moreover, this is a program that has no link with dar so it would be very improbable to have a common bug to both dar and diff that let you think both original and restored data are identical while they are not!

4. Unfortunately, many (all) media do alter with time, and an archive that was properly written on a correct media may become unreadable with time and/or bad environment conditions. Thus of course, take care not to store magnetic storages near magnetic sources (like HiFi speakers) or enclosed in metallic boxes, as well as avoid having sun directly lighting your CD-R(W) DVD-R(W), etc. Also mentioned for many media is humidity: respect the acceptable humidity range for each medium (don't store your data in your bathroom, kitchen, cave, ...). Same thing about the temperature. More generally have a look at the safe environmental conditions described in the documentation, even just once for each media type.

The problem with archive is that usually you need them for a long time, while the media has a limited lifetime. A solution is to make one (or several) copy (i.e.: backup of archive) of the data when the original support has arrived its half expected life.

Another solution, is to use Parchive, it works in the principle of RAID disk systems, creating beside each file a par file which can be used later to recover missing part or corrupted part of the original file. Of course, Parchive can work on dar's slices. But, it requires more storage, thus you will have to choose smaller slice size to have place to put Parchive data on your CD-R or DVD-R for example. The amount of data generated by Parchive depends on the redundancy level (Parchive's -r option). Check the notes for more informations about using Parchive with dar. When using read-only medium, you will need to copy the corrupted file to a read-write medium for Parchive can repair it. Unfortunately the usual 'cp' command will stop when the first I/O error will be met, making you unable to get the sane data *after* the corruption. In most case you will not have enough sane data for Parchive to repair you file. For that reason thje "dar_cp" tool has been created (it is available included in dar's package). It is a cp-like command that skips over the corruptions (replacing it by a field of zeored bytes, which can be repaired afterward by Parchive) and can copy sane data after the corrupted part.

5. another problem arrives when an archive is often read. Depending on the medium, the fact to read, often degrades the media little by little, and makes the media's lifetime shorter. A possible solution is to have two copies, one for reading and one to keep as backup, copy which should be never read except for making a new copy. Chances are that the often read copy will "die" before the backup copy, you then could be able to make a new backup copy from the original backup copy, which in turn could become the new "often read" medium.

6. Of course, if you want to have an often read archive and also want to keep it forever, you could combine the two of the previous techniques, making two copies, one for storage and one for backup. Once you have spent a certain time (medium half lifetime for example), you could make a new copy, and keep them beside the original backup copy in case of.

7. Another problem, is safety of your data. In some case, the archive you have does not need to be kept a very long time nor it needs to be read often, but instead is very "precious". in that case a solution could be to make several copies that you could store in very different locations. This could prevent data lost in case of fire disaster, or other cataclysms.

8. Yet another aspect is the privacy of your data. An archive may not have to be accessible to anyone. Several directions could be possible to answer this problem:

  • Physical restriction to the access of the archive (stored in a bank or locked place, for example)
  • Hid the archive (in your garden ;-) ) or hide the data among other data (Edgar Poe's hidden letter technique)
  • Encrypting your archive
  • And probably some other ways I am  not aware about.
For encryption, dar provides strong encryption inside the archive (blowfish, aes, etc.), it does preserve the direct access feature that avoid you having decrypt the whole the whole archive to restore just one file. But you can also use an external encryption mechanism, like GnuPG to encrypt slice by slice for example, the drawback is that you will have to decrypt each slice at a whole to be able to recover a single file in it.

Backup

Backups act a bit like an archive, except that they are a copy of a changing set of data, which is moreover expected to stay on the original location (the system). But, as an archive, it is a good practice to at least test the resulting backups, and once a year if possible to test the overall backup process by doing a restoration of your system into a new virtual machine or a spare computer, checking that the recovered system is fully operational.

The fact that the data is changing introduces two problems:

  • A backup is quite never up to date, and you will probably loose data if you have to rely on it
  • A backup becomes soon obsolete.
The backup has also the role of keeping a recent history of changes. For example, you may have deleted a precious data from your system. And it is quite possible that you notice this mistake long ago after deletion. In that case, an old backup stays useful, in spite of many more recent backups.

In consequences, backup need to be done often for having a minimum delta in case of crash disk. But, having new backup do not mean that older can be removed. A usual way of doing that, is to have a set of media, over which you rotate the backups. The new backup is done over the oldest backup of the set. This way you keep a certain history of your system changes. It is your choice to decide how much archive you want to keep, and how often you will make a backup of your system.

Differential / incremental backup

A point that can increase the history while saving media space required by each backup is the differential backup. A differential backup is a backup done only of what have changed since a previous backup (the "backup of reference"). The drawback is that it is not autonomous and cannot be used alone to restore a full system. Thus there is no problem to keep the differential backup on the same medium as the one where is located the backup of reference.

Doing a lot of consecutive differential backup (taking the last backup as reference for the next differential backup, which some are used to call "incremental" backups), will reduce your storage requirement, but will  extra timecost at restoration in case of computer accident. You will have to restore the full backup (of reference), then you will have to restore all the many backup you have done up to the last. This implies that you must keep all the differential backups you have done since the backup of reference, if you wish to restore the exact state of the filesystem at the time of the last differential backup.

It is thus up to you to decide how much differential backup you do, and how much often you make a full backup. A common scheme, is to make a full backup once a week and make differential backup each day of the week. The backup done in a week are kept together. You could then have ten sets of full+differential backups, and a new full backup would erase the oldest full backup as well as its associated differential backups, this way you keep a ten week history of backup with a backup every day, but this is just an example.

An interesting protection suggested by George Foot on the dar-support mailing-list: once you make a new full backup, the idea is to make an additional differential backup based on the previous full backup (the one just older than the one we have just built), which would "acts as a substitute for the actual full backup in case something does go wrong with it later on".

Decremental Backup

Based on a feature request for dar made by "Yuraukar" on dar-support mailing-list, the decremental backup provides an interesting approach where the disk requirement is optimized as for the incremental backup, while the latest backup is always a full backup (while this is the oldest that is full, in the incremental backup approach). The drawback here is that there is some extra work at each new backup creation to transform the former more recent backup from a full backup to a so called "decremental" backup.

The decremental backup only contains the difference between the state of the current system  and the state the system had at a more ancient date (the date of the full backup corresponding the decremental backup was made).

In other words, the building of decremental backups is the following :
  • Each time (each day for example), a new full backup is made
  • The full backup is tested, parity control is eventually built, and so on.
  • From the previous full backup and the new full backup, a decremental backup is made
  • The decremental backup is tested, parity control is eventually built, an so on.
  • The oldest full backup can then be removed
This way you always have a full backup as the lastest backup, and decremental backups as the older ones.

You may still have several sets of backup (one for each week, for example, containing at the end of a week a full backup and 6 decremental backups), but you also may just keep one set (a full backup, and a lot of decremental backups), when you will need more space, you will just have to delete the oldest decremental backups, thing you cannot do with the incremental approach, where deleting the oldest backup, means deleting the full backup that all others following incremental backup are based upon.

At the difference of the incremental backup approach, it is very easy to restore a whole system : just  restore the latest backup (by opposition to restoring the more recent full backup, then the as many incremental backup that follow). If now you need to recover a file that has been erased by error, just use a the adequate decremental backup. And it is still possible to restore a whole system globally in a state it had long ago before the lastest backup was done : you will for that restore the full backup (latest backup), then in turn each decremental backup up to the one that correspond to the epoch of you wish. The probability that you have to use all decremental backup is thin compared to the probability you have to use all the incremental backups: there is effectively much more probability to restore a system in a recent state than to restore it in a very old state.

There is however several drawbacks:
time
Doing each time a full backup is time consumming and creating a decremental backup from two full backups is even more time consuming...
temporary disk space
Each time you create a new backup, you temporarily need more space than using the incremental backup, you need to keep two full backups during a short period, plus a decremental backup (usually much smaller than a full backup), even if at then end you remove the oldest full backup.

In conclusion, I would not tell that decremental backup is the panacea, however it exists and may be of interest to some of you. More information about dar's implementation of decremental backup can be found here.




Any other trick/idea/improvement/correction/evidences are welcome!

Denis.





dar-2.4.8/doc/Notes.html0000644000175000017430000032555712023047351011757 00000000000000 DAR/LIBDAR Internals - Notes
Dar Documentation


Dar/Libdar Internals - Notes





Introduction

Here take place a collection of notes. These have been created after implementation of a given feature, mainly for further reference but also for user information. The ideas behind these notes are to remind some choices of implementation, the arguments that lead to this choices in on side, and in the other side let the user have a room to be informed on the choices done and be able to bring his remarks without having to deeply look in the code to learn dar's internals.

Contents



EA & differential backup

Brief presentation of EA:

EA stands for Extended Attributes. In Unix filesystem a regular file is composed of a set of byte (the data) and an inode. The inode add properties to the file, such as owner, group, permission, dates (last modification date of the data [mtime], last access date to data [atime], and last inode change date [ctime]), etc). Last, the name of the file is not contained in the inode, but in the directory(ies) it is linked to. When a file is linked more than once in the directory tree, we speak about "hard links". This way the same data and associated inode appears several time in the same or different directories. This is not the same as a symbolic links, which is a file that contains the path to another file (which may or may not exist). A symbolic link has its own inode. OK, now let's talk about EA:

Extended attributes is a recent feature of Unix file system (at the time of the writing, year 2002). They extend attributes provided by the inode and associated to a data. They are not part of the inode, nor part of the data, nor part of a given directory. They are stored beside the inode and are a set of pair of key and value. The owner of the file can add or define any key and eventually associate data to it. It can also list and remove a particular key. What they are used for ? A way to associate information to a file.

One particular interesting use of EA, is ACL: Access Control List. ACL can be implemented using EA and add a more fine grain in assigning access permission to file. For more information one EA and ACL, see the site of Andreas Grunbacher:

EA & Differential Backup

to determine that an EA has changed dar looks at the ctime value. if ctime has changed, (due to EA change, but also to permission or owner change) dar saves the EA. ctime also changes, if atime or mtime changes. So if you access a file or modify it, dar will consider that the EA have changed also. This is not really fair, I admit.

Something better would be to compare EA one by one, and record those that have changed or have been deleted. But to be able to compare all EA and their value reference EA must reside in memory. As EA can grow up to 64 KB by file, this can lead to a quick saturation of the virtual memory, which is already enough solicited by the catalogue.

These two schemes implies a different pattern for saving EA in archive. In the first case (no EA in memory except at time of operation on it), to avoid skipping in the archive (and ask the user to change of disks too often), EA must be stored beside the data of the file (if present). Thus they must be distributed all along the archive (except at the end that only contains the catalogue).

In the second case (EA are loaded in memory for comparison), EA must reside beside or within the catalogue, in any case at the end of the archive, not to have to user to need all disks to just take an archive as reference.

As the catalogue, grows already fast with the number of file to save (from a few bytes for hard_link to 400 bytes around per directory inode), the memory saving option has been adopted.

Thus, EA changes are based on the ctime change. Unfortunately, no system call permits to restore ctime. Thus, restoring a differential backup after its reference has been restored, will present restored inode as more recent than those in the differential archive, thus the -r option would prevent any EA restoration. In consequence, -r has been disabled for EA, it does only concern data contents. If you don't want to restore any EA but just more recent data, you can use the following : -r -u "*"



Archive structure in brief


The Slice Level

A slice is composed of a header, data and trailer (the trailer appeared with archive format version 8)

+--------+-------------------------------------------+-------+
| header |  Data                                     |Trailer|
|        |                                           |       |
+--------+-------------------------------------------+-------+

the slice header is composed of
  • a magic number that tells this is a dar slice
  • a internal_name which is unique to a given archive and shared by all slices
  • a flag that tells whether the slice is the last of the archive or whether a trailer is present that contains this info.
  • a extension flag, that was used in older archive but which now always set to 'T' telling that a TLV list follows
  • A TLV (Type Length Value) list of item, it contains the slice size, first slice size. The TLV list will receive any future new field related to slice header.
+-------+----------+------+-----------+-------+
| Magic | internal | flag | extension | TLV   |
| Num.  | name     | byte | byte      | list  |
+-------+----------+------+-----------+-------+

The header is the first thing to be written, and if the current slice is not the last slice (all data to write could not fit in it), the flag field is changed indicating that another slice follows. The header is also the first part to be read. Since archive format 8, the flag is set to a specific value indicating that the information telling whether the slice is the last is placed in a slice trailer.

The TLV list may contain several fields:
  • First slice size [type 1]
  • Other slice size / all slice size if no first slice size is present [type 2]
  • data_name [type 3] . This field is detailed below.
A TLV list is of course a list of TLV:

+-------+----------+------+-----------+- ...-----+-------+
| Number| TLV 1    | TLV 2| TLV 3     |          | TLV n |
| of TLV|          |      |           |          |       |
+-------+----------+------+-----------+--...-----+-------+

Each TLV item is, as commonly, defined as set of three fields:

+---------+-------------------------+-------------------------+
| Type    | Length                  | Value                   |
|(2 bytes)| (arbitrary large value) | (arbitrary large data)  |

+---------+-------------------------+-------------------------+

The 2 bytes type is large enough for today's need (65535 different types while only three used), however TLV 65535 is reserved for future use and will signal a new format for the type field.

To know in which slice and at which position to find a particular data, dar needs to know each file's size. This is the reason why each slice contains the slice size information, in particular the last slice. In older version, dar had to read the first slice first to get this slicing information. Then it could read the archive contents at the end of the last slice. Today, reading the last slice, dar can fetch the slicing scheme from the slice header (what we just detailed) and fetch the archive contents at the end of this same last slice.

The trailer (which is one byte length) is new since archive format version 8 (released 2.4.0). It contains the value that was located in the header flag field in older archive format, telling whether the slice is the last of the archive or not. When writting down a single sliced archive (no -s option provided), both the header and the trailer tell that the slice is the last of the archive (duplicated information). However, when doing multi-sliced archive, it is not possible to known whether a slice is the last before reaching the requested amount of data per slice (which depends on the amount of byte to save, compression ratio, encryption overhead, etc.). Thus the header flag contains a value telling that to know whether the slice is the last or not, one must read the trailer.

In older format, it was necessary to seek back to update the header with the correct information when a new slice had to be created. But, keeping this behavior, it would not have been possible to make a digest "on the fly" (see --hash option). The addition of the trailer was required for that feature: to compute a md5 or sha1 hash of each slice. But, this costs one byte per slice, yes.

Data Name

As seen above in the header fields, we have among others the three following identifiers:
  • magic number
  • internal name
  • data name
as already said, magic number is constant and let dar be (almost) sure a given file is a dar slice file. Also briefly explained, the internal_name is a identifier that let dar be almost sure that several slices are from the same archive (problem car arise if two archives of same basename have their slices mixed together: dar will see that and report it to the user).

The new and not yet described field is the "data_name". A new feature with release 2.4.0 is the ability to use an extracted catalogue to backup a internal catalogue of a given archive. However dar must be (almost) sure that the extracted catalogue is not extracted from an other archive than the one the user is trying to use it with. For this reason, the data_name identifier is kept identical between an archive and any of its extracted catalogue. Dar_xform also does not modify data_name, thus one can reslice an extracted catalog or a given archive and still be able to use these new archive or extracted catalogue with older archive or extracted catalogues. Instead the merging operation creates a new archive with a different data_name, to avoid one to use a isolated catalogue of the original archive to backup the internal catalogue of a merged archive.

Archive Level

The archive level describes the structure of the slice's data field (removing header and trailer of each slice), when they are all sticked together from slice to slice:

+---------+----------------------------+-----------+--------+---------+--------+
| version |   Data                     | catalogue | term 1 | version | term 2 |
| header 
|                            |           |        | trailer |        |
+---------+----------------------------+-----------+--------+---------+--------+

The version header is a short version of the trailer version. It is used when reading an archive in sequential mode, to be able to prepare the proper compression layer, and known whether escape sequence mark are present in the archive.

the version trailer (which may still be called "version header" in some part of the documentation because it was only located at the beginning of the archive in previous archive format) is composed of:
  • edition version of the archive
  • compression algorithm used
  • command line used for creating the archive, now known as "user comment"
  • flag (telling whether the archive is encrypted, whether it has escape sequence marks, etc.)
  • initial offset (telling where starts the data in the archive, is only present in the trailer)
  • CRC (Cyclic Redundancy Check) computed on the whole version header or trailer
+---------+------+---------------+------+--------+--------+
| edition | algo | command line  | flag | initial|  CRC   |
|         |      |               |      | offset |        |
+---------+------+---------------+------+--------+--------+

The trailer is used when reading an archive in direct access mode, to build the proper compression layer, escape layer (it is needed if mark have been inserted in the archive to un-escape data that could else be taken as an escape sequence mark).

The data is a suite of file contents, with EA if present

  ....--+---------------------+----+------------+-----------+----+---....
        |  file data          | EA | file data  | file data | EA |
        | (may be compressed) |    | (no EA)    |           |    |
  ....--+---------------------+----+------------+-----------+----+---....

the catalogue, contains all inode, directory structure and hard_links information. The directory structure is stored in a simple way: the inode of a directory comes, then the inode of the files it contains, then a special entry named "EOD" for End of Directory. Considering the following tree:

 - toto
    | titi
    | tutu
    | tata
    |   | blup
    |   +--
    | boum
    | coucou
    +---

it would generate the following sequence for catalogue storage:

+-------+------+------+------+------+-----+------+--------+-----+
|  toto | titi | tutu | tata | blup | EOD | boum | coucou | EOD |
|       |      |      |      |      |     |      |        |     |
+-------+------+------+------+------+-----+------+--------+-----+

EOD takes on byte, and this way no need to store the full path of each file, just the filename is recorded.

the terminator stores the position of the beginning of the catalogue, it is the last thing to be written. Thus dar first reads the terminator, then the catalogue. Well there is now two terminators, both are meant to be read backward. The second terminator points to the beginning of the "trailer version" which is read first in direct access mode. The first terminator points to the start of the catalogue, which is read once the adhoc compression layer has been built based on the information found on the "trailer version"

All  Together

Here is an example of how data can be structured in a four slice archive:

+--------+--------+------------------------+--+
| slice  | version|  file data + EA        |Tr|
| header | header |                        |  |
+--------+--------+------------------------+--+

the first slice (just above) has been defined smaller using the -S option

+--------+-----------------------------------------------------------------+--+
| slice  |           file data + EA                                        |Tr|
| header |                                                                 |  |
+--------+-----------------------------------------------------------------+--+

+--------+-----------------------------------------------------------------+--+
| slice  |           file data + EA                                        |Tr|
| header |                                                                 |  |
+--------+-----------------------------------------------------------------+--+

+--------+---------------------+-----------+------ +---------+--------+--+
| slice  |   file data + EA    | catalogue | term 1| version | term 2 |Tr|
| header |                     |           |       | trailer |        |  |
+--------+---------------------+-----------+-------+---------+--------+--+

the last slice is smaller because there was not enough data to make it full.

The archive is written sequentially this way.


Other Levels

Things get a bit more complicated if we consider compression and encryption. The way the problem is addressed in dar's code is a bit like networks are designed in computer science, using the notion of layers. Here, there is a additional constraint, a given layer may or may not be present (encryption, compression, slicing for example). So all layer must have the same interface for serving the layer above them. This interface is defined by the pure virtual class "generic_file", which provides generic methods for reading, writing, skipping, getting the current offset when writing or reading data to a "generic_file". This way the compressor class acts like a file which compresses data wrote to it and writes compressed data to another "generic_file" below it. The strong encryption and scramble classes act the same but in place of compressing/uncompressing they encrypt/decrypt the data to/from another generic_file object. The slicing we have seen above follows the same principle, this is a "sar" object that transfers data wrote to it to several fichier [=file] objects. Class fichier [=file] also inherit from generic_file class, and is a wrapper for the plain file system calls. Some new classes have been added with format 8, in particular the escape class, which inserts escape sequence mark at requested position, and modifies data wrote for it never looks like an escape sequence mark. To reduce the level of context switch when reading the catalogue (which makes a ton of small read), a cache class is also present, it gather small writes made to it into larger writes, and pre-reads a large amount of data to answer to the many small reads when building the catalogue in memory from the archive.

Here are now all currently possible layers together:

              +----+--+----+-...........+---------+
archive       |file|EA|file|            |catalogue|
layout        |data|  |data|            |         |
              +----+--+----+-...........+---------+
                |   |    |      |              |
            +-----+ | +-------+ |              |
sparse      |spars| | |sparse | |              |
file        |file | | |file   | |              |
detection   |detec| | |detect.| |              |
layer       +-----+ | +-------+ |              |
(optional)      |   |    |      |              |
                V   V    V      V              V
              +-----------------------------------+
compression   |         (compressed)  data        |
              +-----------------------------------+
                    |                      |
                    |                      |
                    V                      V
              +-----------------------------------+
escape layer  |   escaped data / escape sequences |
(optional)    +-----------------------------------+
                    |                      |          / First Terminateur
                    |                      |          |
                    |                      |          V
elastic  +---+      |                      |       +----+---+
buffers  |EEE|      |                      |       | T1 |EEE|
         +---+      |                      |       +----+---+
           |        |                      |              |           Second
           V        V                      V              V         Terminator
         +--------------------------------------------------+              |
cipher   |        (encrypted) data / cache if no encryption |              |
         +--------------------------------------------------+              V
                    |                         |               +---------+----+
+-------+           |                         |               | trailer | T2 |
| header|           |                         |               +---------+----+
+-------+           |                         |                    |      |
    |               |                         |                    |      |
    V               V                         V                    V      v
+-----------------------------------------------------------------------------+
|                  data                                                       |
+-----------------------------------------------------------------------------+
        |         |  |         |   |        |   |        |  |    |  |        |
slice   |         |  |         |   |        |   |        |  |    |  |        |
headers |         |  |         |   |        |   |        |  |    |  |        |
 |  |   |         |  |         |   |        |   |        |  |    |  |        |
 |  +---|------\  |  |         |   |        |   |        |  |    |  |        |
 V      V      V  V  V         V   V        V   V        V  V    V  V        V
+---------+  +---------+  +---------+  +---------+  +-------+  +-------+  +----+
|HH| data |  |HH| data |  |HH| data |  |HH| data |  |HH|data|  |HH|data|  |HH| |
+---------+  +---------+  +---------+  +---------+  +-------+  +-------+  +----+
  slice 1      slice 2      slice 3      slice 4      slice 5


The elastic buffers are here to prevent plain text attack, where one knows which data is expected at a given place, an trying to guess the cipher comparing the expected data and the encrypted one. As dar generates structured archives, there would have some possibility that one use this attack to crack an archive encryption. To overcome this problem, elastic buffers have been added at the beginning and at the end of encrypted data. This way it is not possible to know where is located a given archive structure within the encrypted data. The elastic buffers are random data that contain at a random place a pattern that tells the overall size of the buffer (which size is randomly chosen during archive creation). The pattern is of the form ">###<" where the hash field (###) contains the elastic buffer size in binary. Small elastic buffer can be "><" for two bytes or "X" for one byte, but as it is encrypted beside archive data, it is not possible to determine its size for one that does not hold the archive encryption key. Elastic buffer are usually several kilobyte long. Here follows an example of elastic buffer:

972037219>20<8172839


For clarity, the size field between '>' and '<' has been written in decimal instead of binary, as well as the random data inside the elastic buffer. The location of the size field '>20<' is also randomly chosen at creation time.

A Teminateur is short structure that is intended to be read backward. It gives the absolute position of a given item within the archive: The second terminateur let dar skip at the beginning of the archive trailer. The first terminateur (eventually encrypted) let dar skip at the beginning of the catalogue).





Scrambling


Before strong encryption was implemented, dar had only a very simple and weak encryption mechanism. It remains available in current release under the "scram" algorithm name. It mains advantage is that is does not rely on any external library, it is completely part of libdar.

How does it works?

Consider the pass phrase as a string, thus a sequence of bytes, thus a sequence of integer each one between 0 and 255 (including 0 and 255). The data to "scramble" is also a sequence of byte, usually much longer than the pass phrase. The principle is to add byte by byte the pass phrase to the data, modulo 256. The pass phrase is repeated all along the archive. Let's take an example:

the pass phrase is "he\220lo" (where \220 is the character which decimal value is 220). the data is "example"

taken from ASCII standard:
h = 104
l = 108
o = 111
e = 101
x = 120
a = 97
m = 109
p = 112

        e       x       a       m       p       l       e
        101     120     97      109     112     108     101

+       h       e       \220    l       o       h       e
        104     101     220     108     111     104     101

---------------------------------------------------------------

        205     221     317     217     223     212     202

---------------------------------------------------------------
modulo
256 :   205     221     61      217     223     212     202
        \205    \201    =       \217    \223    \212    \202


thus the data "example" will be written in the archive "\205\201=\217\223\212\202"

This method allows to decode any portion without knowing the rest of the data. It does not consume much resources to compute, but it is terribly weak and easy to crack. Of course, the data is more difficult to retrieve without the key when the key is long. Today dar can also use strong encryption (blowfish and few others) and thanks to a encryption block can still avoid reading the whole archive to restore any single file.



Overflow in arithmetic integer operations


Some code explanation about the detection of integer arithmetic operation overflows. We speak about *unsigned* integers, and we have only portable standard ways to detect overflows when using 32 bits or 64 bits integer in place of infinint.

Developed in binary, a number is a finite suite of digits (0 or 1). To obtain the original number from the binary representation, we must multiply each digit by a power of two. example the binary representation "101101" designs the number N where:

N = 2^5 + 2^3 + 2^2 + 2^0

in that context we will say that 5 is the maximum power of N (the power of the higher non null binary digit).

for the addition "+" operation, if an overflow occurs, the result is less than one or both operands, so overflow is not difficult to detect. To convince you, let's assume that the result is greater both operands while it has overflowed. Thus the real result (without overflow) less the first operands should gives the second argument, but here we get a value that is greater than the all 1 bits integer (because there was an overflow and the resulting overflowed value is greater than the first second and the first operand), so this is absurd, and in case of overflow the resulting value is less than one of the operands.

for substraction "-" operation, if the second operand is greater than the first there will be an overflow (result must be unsigned thus positive) else there will not be any overflow. Thus detection is even more simple.

for division "/" and modulo "%" operations, there is never an overflow (there is just illicit the division by zero).

for multiplication "*" operation, a heuristic has been chosen to quickly detect overflow, the drawback is that it may triggers false overflow when number get near the maximum possible integer value. Here is the heuristic used:

given A and B two integers, which max powers are m and n respectively, we have

A < 2^(m+1)
and
B < 2^(n+1)

thus we also have:

A.B < 2^(m+1).2^(n+1)

which is:

A.B < 2^(m+n+2)

by consequences we know that the maximum power of the product of A by B is at most m+n+1 and while m+n+1 is less than or equal to the maximum power of the integer field there will not be overflow else we consider there will be an overflow even if it may not be always the case (this is an heuristic algorithm).



Strong encryption


Several cyphers are available. Remind that "scrambling" is not a strong encryption cypher, all other are.

to be able to use a strong encrypted archive you need to know the three parameters used at creation time:
  • the cypher used (blowfish, ...)
  • the key or password used
  • the encryption block size used
no information about these parameters is stored in the generated archive. If you make an error on just one of them, you will not be able to use your archive. If you forgot one of them, nobody can help you, you can just consider the data in this archive as lost. This is the drawback of strong encryption.

How is it implemented?

To not completely break the possibility to directly access file, the archive is  not encrypted as a whole (as would do an external program). The encryption is done block of data by block of data. Each block can be decrypted, and if you want to read some data somewhere you need to decrypt the whole block(s) it is in.

In consequence, the larger the block size is, the stronger the encryption is. But the larger the block size is too, the longer it will take to recover a given file, in particular when the file size to restore is much smaller than the encryption block size used.

An encryption block size can range from 10 bytes to 4 GB.

If encryption is used as well as compression, compression is done first, then encryption is done on compressed data.

An "elastic buffer" is introduced at the beginning and at the end of the archive, to protect against plain text attack.  The elastic buffer size randomly varies and is defined at execution time. It is composed of random (srand()) values. Two marks characters '>' and '<' delimit the size field, which indicate the byte size of the elastic buffer. The size field is randomly placed in the buffer. Last, the buffer is encrypted with the rest of the data. Typical elastic buffer size range from 1 byte to 10 kB, for both initial and terminal elastic buffers.

Elastic buffers are also used inside compression blocks. The underlying cypher may not be able to encrypt at the requested block size boundary. If necessary a small elastic buffer is appended to the data before encryption, to be able, at restoration time, to know the amount of data and the amount of noise around it.

Let's take an example with blowfish. Blowfish encrypts by multiple of 8 bytes (blowfish chain block cypher). An elastic buffer is always added to the data of a encryption block, its minimal size is 1 byte.

Thus, if you request a encryption block of 3 bytes, these 3 bytes will be padded by an elastic buffer of 5 bytes for these 8 bytes to be encrypted. This will make a very poor compression ratio as only 3 bytes on 8 bytes are significant.

If you request a encryption block of 8 bytes, as there is no room for the minimal elastic buffer of 1 byte, a second 8 byte block is used to put the elastic buffer, so the real encryption block will be 16 bytes.

Ideally, a encryption block of 7 bytes, will use 8 bytes with 1 byte for the elastic buffer.

This problem tends to disappear when the encryption block size grows, so this should not be a problem in normal conditions. Encryption block of 3 bytes is not a good idea to have a strong encryption scheme, for information, the default encryption block size is 10kB.



libdar and thread-safe requirement


This is for those who plane to use libdar in their own programs.

If you plan to have only one thread using libdar there is no problem, of course, you will however have to call one of the get_version() first, as usual. Thing change if you intend to have several concurrent threads using libdar library.

libdar is thread-safe under certain conditions:

Several 'configure' options have an impact on thread-safe support:

--enable-test-memory is a debug option that avoid libdar to be thread-safe,  so don't use it.
--enable-special-alloc (set by default), makes a thread-safe library only if POSIX mutex are available (pthread_mutex_t type).
--disable-thread-safe avoid looking for mutex, so unless --disable-special-alloc is also used, the generated library will not be thread safe.

You can check the thread safe capability of a library thanks to the get_compile_time_feature(...) call from the API. Or use 'dar -V' command, to  quickly have the corresponding values and check using 'ldd' to see which library has been dynamically linked to dar, if applicable.

IMPORTANT:
As more as before it is mandatory to call get_version() call before any other call, when the call returns, libdar is ready for thread safe. Note that even if the prototype does not change get_version() *may* now throw an exception, so use get_version_noexcept() if you don't want to manage exceptions.

For more information about libdar and its API, check the doc/api_tutorial.html document and the API reference manual under doc/html/index.html


Dar_manager and delete files


This is for further reference and explanations.

In dar archive when a file has been deleted since the backup of reference (in case of differential archive), an entry of a special type (called "detruit") is put in the catalogue of the archive which only contains the name of the missing file.

In a dar_manager database, to each files that have been found in one of the archive used to build this database corresponds a list of association. These associations put in relation the mtime (date of modification of the file) to the archive number where the file has been found in that state.

There is thus no way to record "detruit" entries in a dar_manager database, as no date is associated with this type of object. Yes, in a dar archive, we can only notice a file has been destroyed because it is not present in the filesystem but is present in the catalogue of the archive of reference. Thus we know the file has been destroyed between the date the archive of reference has been done and the date the current archive is actually done. Unfortunately, no date is recorded in dar archives telling it has been done at which time.

Thus, from dar_manager, inspecting a catalogue, there is no way to give a significant date to a "detruit" entry. In consequences, for a given file which has been removed, then recreated, then removed again along a series of differential backups, it is not possible to order the times when this file has been removed in the series of date when it has existed.

The ultimate consequence is that if the user asks dar_manager to restore a directory in the state just before a given date (-w option), it will not be possible to know if that file existed at that time. We can effectively see that it was not present in a given archive but as we don't know the date of that archive we cannot determine if it is before of after the date requested by the user, and dar_manager is not able to restore the non existence of a file for a given time, we must use dar directly with the archive that has been done at the date we wish.

Note that having a date stored in each dar archive would not solve the problem without some more informations. First, we should assume that the date is consistent from host to host and from time to time (What if the user change of time due to daylight saving or move around the Earth, or if two users in two different places share a filesystem --- with rsync, nfs, or other mean --- and do backups alternatively...). Let's assume the system time is significant and thus let's imagine what would be the matter if in each archive this date of archive construction was stored.

Then when a detruit object is met in an archive it can be given the date the archive has been built and thus ordered in the series of dates when the corresponding file was found in other archives. So when the user asks for restoration of a directory a given file's state is possible to know, and thus the restoration of the corresponding archive will do what we expect : either remove the file (if the selected backup contains an  "detruit" object, or  restore the file in the state it had).

Suppose now, a dar_manager database built with a series of full backup. There will thus no be any "detruit" objects, but a file may be present or may be missing in a given archive. The solution is thus that once an archive has been integrated in the database, the last step is to scan the whole database for files that have no date associated with this last archive, thus we can assume these files were not present and add the date of the archive creation with the information that this file was removed at that time. Moreover, if the last archive add a file which was not know in archives already present in the database, we must consider this file was deleted in each of these previous archives, but then we must record the dates of creation for all these previous archive to be able to put this information properly in the database. But, in that case we would not be able to make dar removing a file, as no "detruit" object exist (all archive are full backups), and dar_manager should remove itself the entry from the filesystem. Beside the fact that it is not the role to dar_manager to directly interact with the filesystem, dar_manager should record an additional information to know if a file is deleted because it has been found a "detruit" object in an archive, or if it is deleted because it has not been found any entry in an given archive. This is necessary to known whether to rely on dar to remove the file or to make dar_manager do it itself, or maybe better is to never rely on dar to remove a file but always let dar_manager do it itself.

Assuming we accept to make dar_manager able to rm entries from filesystem without relying on dar, we must store the date of the archive creation in each archive, and store these dates for each archive in dar_manager databases. Then instead of using the mtime of each file, we could do something much more simple in database: for each file, record if it was present or not in each archive used to built the database, and beside this, store only the archive creation date of each archive. This way, dar_manager would only have for each file to take the last state of the file (deleted or present) before the given date (or the last known state if no date is given) and either restore the file from the corresponding archive or remove it.

But if a user has removed a file by accident and only notice this mistake after several backups, it would become painful to restore this file, as the user should find manually at which date it was present to be able to feed dar_manager with the proper -w option, this worse than looking for the last archive that has the file we look for.

Here we are back to the restoration of a file and the restoration of a state. By state, I mean the state a directory tree had at a given time, like a photo. In its original version dar_manager was aimed to restore files, whatever they exist or not in the last archive added to a database. It only finds the last archive where the file is present. Making dar_manager restore a state, and thus considering files that have been removed at a given date, is no more no less than restoring from a given archive, directly with dar. So all this discussion about the fact that dar_manager is not able to handle files that have been removed, to arrive to the fact that adding this feature to dar_manager will make it become quite useless... sight. But, that was necessary.



Native Language Support / gettext / libintl


Native Language Support (NLS) is the fact a given program can display its messages in different languages. For dar, this is implemented using the gettext tools. This tool must be installed on the system for dar can be able to display messages in another language than English. 

Things are the following:
- On a system without gettext dar will not use gettext at all. All messages will be in English (OK maybe better saying Frenglish) ;-)
- On a system with gettext dar will use the system's gettext, unless you use --disable-nls option with the configure script.

If NLS is available you just have to set the LANG environment variable to your locale settings to change the language in which dar displays its messages (see ABOUT-NLS for more about the LANG variable).

just for information, gettext() is the name of the call that makes translations of string in the program. This call is implemented in the library called 'libintl' (intl for Internationalization). Last point, gettext by translating strings, makes the Native Language Support (NLS) possible, in other words, it let you have the messages of your preferred  programs being displayed in you native language for those not having the English as mother tong.

This was necessary to say, because you may miss the links between "gettext" , "libintl" and "NLS".

READ the ABOUT-NLS file at the root of the source package to learn more about the way to display dar's messages in your own language. Note that not all languages are yet supported, this is up to you to send me a translation in your language and/or contact a translating team as explained in ABOUT-NLS.

To know which languages are supported by dar, read the po/LINGUAS file and check out for the presence of the corresponding *.po files in this directory.



Dar Release Process

Development Phase:
Dar receive new features during the development phase, at this stage sources are modified and tested after each feature addition. The development sources are stored in a CVS repository at sourceforge, repository you can access in read-only.

Frozen API Phase:
No new feature that would change the API are added. The API shall be documented enough to let API users give their feedback about the design and its implementation. During this time, development continues, whatever is necessary while it does not changes the API, like documentation of the whole project, problem fix in libdar, new features in command-line part of the source, and so on.

Pre-release Phase:
Once the documentation and API is stable, comes the pre-release phase, this phase starts and ends by a email to the dar-news mailing-list. At this period intensive test is done on the pre-release source, feedback and information about new pre-release packages are exchanged through the pre-release mailing-list, this mailing-list lives only during the pre-release phases and is not archived, nor visible through a mail to news gateway. Of  course, you are welcome to participate in the testing process and report to the pre-release mailing list any problem you could meet with a given pre-release package.

Release Phase:
Some little time after pre-release has ended, a first package is released (last number version is zero) and available at sourceforge for download. This phase also begins by an email to dar-news mailing-list. During that phase, users may report bugs/problem about the released software,  depending on the amount of bugs found and of their importance a new release will take place to only fixe these found bugs (no features is added), the last number of the version is incremented by one and a new mail to dar-news is sent with the list of problem fixed by the new release. The release phase ends when a new release phase begins, thus during a release phase a concurrent development phase takes place, then a frozen API, then a pre-release phase but for a new major version (the first or the second number of the version changes).

Dar's Versions

package release version

Dar packages are release during the pre-release phase (see above). Each version is identified by three number separated by dot like for example, version 2.3.0 . The last number is incremented between releases that take place in the same release phase (just bug have been fixed), the middle number increments at each pre-release phase. Last the first number is incremented when a major change in the software structure took place [version 2.0.0 has seen the split of dar's code in one part related to command-line and the rest put in a library called libdar, that can be accessed by a well defined API even by external softwares (like kdar for example). Version 2.0.0 has also seen the apparition of the configure script and the use of the gnu tools autoconf, autmake, libtool and gettext, thus in particular the possibility to have internationalization].

Note that release versionning is completely different from what is done for the Linux kernel, here for dar all versionnized packages are stable released software and thus stability increases with the last number of the version.

Libdar version

Unfortunately, the release version does not give much information about the compatibility of different libdar version, from the point of view of an external application, that thus has not been released with libdar and may be faced to different libdar versions. So, libdar has its own version. It is also a three number version, (for example, current libdar version is version 3.1.2), but each number has a different meaning. The last number increases with a new version that only fixes bugs, the middle number increases with when new features has been added but stay compatible with older libdar version in the way to use older features. Last the first number changes when the API has been changed in a way that no ascendant compatibility is no more possible for some features.

Other versions


beside the libdar library, you can find five command-line applications: dar, dar_xform, dar_slave, dar_manager and dar_cp. These except dar have their own version which is here too made of three numbers. Their meaning is the same as the meaning for the package release version: The last number increases upon bug fix, the middle upon new feature, the first upon major architecture changes.

Archive format version

When new features come, it is sometime necessary to change the structure of the archive. To be able to know the format used in the archive, a field is present in each archive that defines this format. Each dar binary can thus read all archive format, well of course a particular version cannot guess the format of archive that have been defined *after* that dar binary version has been released. If you try to open a recent archive with an old dar binary, you will have a warning about the fact that dar is probably not able to read the archive, dar will then ask you if you want to proceed anyway. Of course, you can try to read it, but this is at your own risk. In particular, depending on the feature used (See the Changelog to know which feature required to upgrade the archive format), you may succeed reading a recent archive with an old dar binary and get neither error nor warning, but this does not mean that dar did all that was necessary to restore the files properly, so it is advised to avoid using an archive with a version of dar that is tool old to handle the archive format properly (and rather reserve this possibility only in case of necessity).

Cross reference matrix

OK, you may now find that this is a bit complex so a list of version is give below. Just remember that there are two points of view: The command-line user and the external application developer.

Date
released package and dar version
Archive format
libdar version
dar_xform
dar_slave
dar_manager
dar_cp
April 2nd, 2002
1.0.0
01
----- ----- ----- ----- -----
April 24th, 2002
1.0.1
01
----- ----- ----- ----- -----
May 8th, 2002
1.0.2
01
----- ----- ----- ----- -----
May 27th, 2002
1.0.3
01
----- ----- ----- ----- -----
June 26th, 2002
1.1.0
02
----- 1.0.0
1.0.0
----- -----
Nov. 4th, 2002
1.2.0
03
----- 1.1.0
1.1.0
1.0.0
-----
Jan. 10th, 2003
1.2.1
03
----- 1.1.0 1.1.0 1.0.0
-----
May 19th, 2003
1.3.0
03
----- 1.1.0
1.1.0
1.1.0
-----
Nov. 2nd, 2003
2.0.0
03
1.0.0
1.1.0
1.1.0
1.2.0
1.0.0
Nov. 21th, 2003
2.0.1
03
1.0.1
1.1.0
1.1.0
1.2.0
1.0.0
Dec. 7th, 2003
2.0.2
03
1.0.2
1.1.0
1.1.0
1.2.0
1.0.0
Dec. 14th, 2003
2.0.3
03
1.0.2
1.1.0
1.1.0
1.2.1
1.0.0
Jan. 3rd, 2004
2.0.4
03
1.0.2
1.1.0
1.1.0
1.2.1
1.0.0
Feb. 8th, 2004
2.1.0
03
2.0.0
1.2.0
1.2.0
1.2.1
1.0.0
March 5th, 2004
2.1.1
03
2.0.1
1.2.1
1.2.1
1.2.2
1.0.0
March 12th, 2004
2.1.2
03
2.0.2
1.2.1
1.2.1
1.2.2
1.0.0
May 6th, 2004
2.1.3
03
2.0.3
1.2.1
1.2.1
1.2.2
1.0.1
July 13th, 2004
2.1.4
03
2.0.4
1.2.1
1.2.1
1.2.2
1.0.1
Sept. 12th, 2004
2.1.5
03
2.0.5
1.2.1
1.2.1
1.2.2
1.0.1
Jan. 29th, 2005
2.1.6
03
2.0.5
1.2.1
1.2.1
1.2.2
1.0.1
Jan. 30th, 2005
2.2.0
04
3.0.0
1.3.0
1.3.0
1.3.0
1.0.1
Feb. 20th, 2005
2.2.1
04
3.0.1
1.3.1
1.3.1
1.3.1
1.0.1
May 12th, 2005
2.2.2
04
3.0.2
1.3.1
1.3.1
1.3.1
1.0.2
Sept. 13th, 2005
2.2.3
04
3.1.0
1.3.1
1.3.1
1.3.1
1.0.2
Nov. 5th, 2005
2.2.4
04
3.1.1
1.3.1
1.3.1
1.3.1
1.0.2
Dec. 6th, 2005
2.2.5
04
3.1.2
1.3.1
1.3.1
1.3.1
1.0.2
Jan. 19th, 2006
2.2.6
04
3.1.3
1.3.1
1.3.1
1.3.1
1.0.3
Feb. 24th, 2006
2.2.7
04
3.1.4
1.3.1
1.3.1
1.3.1
1.0.3
Feb. 24th, 2006
2.3.0
05
4.0.0
1.4.0
1.3.2
1.4.0
1.1.0
June 26th, 2006
2.3.1
05
4.0.1
1.4.0
1.3.2
1.4.0
1.1.0
Oct. 30th, 2006
2.3.2
05
4.0.2
1.4.0 1.3.2 1.4.0 1.1.0
Feb. 24th, 2007
2.3.3
05
4.1.0
1.4.0
1.3.2
1.4.1
1.2.0
June 30th, 2007
2.3.4
06
4.3.0
1.4.0
1.3.2
1.4.1
1.2.0
Aug. 28th, 2007
2.3.5
06
4.4.0
1.4.1
1.3.3
1.4.2
1.2.1
Sept. 29th, 2007
2.3.6
06
4.4.1
1.4.1
1.3.3
1.4.2
1.2.1
Feb. 10th, 2008
2.3.7
06
4.4.2
1.4.2
1.3.4
1.4.3
1.2.2
June 20th, 2008
2.3.8
07
4.4.3
1.4.2
1.3.4
1.4.3
1.2.2
May 22nd, 2009
2.3.9
07
4.4.4
1.4.2
1.3.4
1.4.3
1.2.2
April 9th, 2010
2.3.10
07
4.4.5
1.4.2
1.3.4
1.4.3
1.2.2
March 13th, 2011
2.3.11 07 4.5.0 1.4.3 1.3.4 1.4.3 1.2.2
February 25th, 2012 2.3.12 07 4.5.1 1.4.3 1.3.4 1.4.3 1.2.2
June 2nd, 2011
2.4.0 08 5.0.0 1.5.0
1.4.0
1.5.0
1.2.3
July 21st, 2011
2.4.1
08
5.1.0
1.5.0
1.4.0
1.6.0
1.2.3
Sept. 5th, 2011
2.4.2
08
5.1.1
1.5.0
1.4.0
1.6.0
1.2.3
February 25th, 2012
2.4.3
08
5.2.0
1.5.0 1.4.0 1.7.0
1.2.3
March 17th, 2012
2.4.4
08
5.2.1
1.5.0
1.4.0
1.7.1
1.2.3
April 15th, 2012
2.4.5
08
5.2.2
1.5.1
1.4.1
1.7.2
1.2.4
June 24th, 2012
2.4.6
08
5.2.3
1.5.2
1.4.2
1.7.3
1.2.5
July 5th, 2012
2.4.7
08
5.2.4
1.5.2
1.4.3
1.7.3
1.2.5
September 9th, 2012
2.4.8
08
5.3.0
1.5.3
1.4.4
1.7.4
1.2.6




dar-2.4.8/doc/Known_Bugs.html0000644000175000017430000014044712003275572012742 00000000000000 Known Bugs
Dar Documentation


Known Bugs





Here follows the description and status of old bugs. For recent bugs please consult the bug tracker on sourceforge.


#1:
description: files pointed to by a symbolic links get their times (mtime & atime) not properly restored

problem: when restoring time of a symbolic link the utime() system call does not change the inode of the symbolic link itself but the one of the file pointed to by the symbolic link

solution: as the utime() call behaves the same on many Unix platforms, (HP-UX, Solaris, Linux, etc.), the times for the symbolic links are no more restored, thus files pointed to by symbolic links are not affected by the utime() strange behavior. Dar, behaves not the same as touch, tar, etc.

affects version: 1.0.0
fixed in version: 1.0.1


#2:
description: dar immediately exits with message "file mode is neither read nor write" while creating an archive on 64 bits platforms.

problem: missing mask with O_ACCMODE for flags returned by fcntl() system call

solution: O_ACCMODE mask added

affects versions: 1.0.0, 1.0.1
fixed in version: 1.0.2


#3:
description: making differential backup with some directory implicitly or explicitly (-P) excluded, sometime lead to full backup.

problem: when entering a directory that was ignored, the comparison in the catalogue of reference was not going back to the parent directory, leading the comparison to be no more synchronized with the file being read.

solution: added a line to go back to parent directory when treating an ignored directory

affects versions: 1.0.0, 1.0.1
fixed in version: 1.0.2


#4:
description: on Linux 64 bits, when an interaction from the user is required, (escape or return key), the keys have to be pressed four time before dar can read a character.

problem: unknown, same code works fine on other platforms, and if bad character remains in the pipe, the message would be displayed several time, which is not the case, here. It seems that the user has to press four key for the first one to be accessible to dar.

solution: none actually

affects versions: 1.0.0, 1.0.1 , 1.0.2 and probably also 1.1.0
fixed in version: none

#5:
description: Dar stops one of the two following error:
--- exception nature = [alive]  exception type = [BUG] ----------
[source]
       file compressor.cpp line 244 : it seems to be a bug here
[most outside call]
 -----------------------------------

or

--- exception nature = [alive]  exception type = [BUG] ----------
[source]
      file compressor.cpp line 254 : it seems to be a bug here
[most outside call]
-----------------------------------

problem: the zlib deflate() calls returned an unexpected value (Z_STREAM_END) and inflate() in some rare cases was called without data to process, when reaching end of file, which made inflate() to return Z_BUF_ERROR code.

solution: adding code for taking care of the returned code Z_STREAM_END by deflate(), and code for taking care of Z_BUF_ERROR returned at end of file.

affects version: 1.0.0, 1.0.1, 1.0.2
fixed in version: 1.0.3


#6:
description: Dar does not properly display dates on Linux 64 bits systems.

problem: non portable cast from 'time_t' to 'unsigned long int'

solution: changed code not to have to use 'unsigned long int'

affects version: 1.0.x, 1.1.x
fixed in version: 1.2.0


#7:
description: while saving, Dar stops when a given inode could not be read, even if the inode had not to be saved

problem: Dar reads all file in the directory to save, and then applies filters to their filename. Error appears before the filter, leading to a global saving error.

solution: read error are now caught sooner a message is displayed that the file or directory will be ignored, and saving process continues. As this error occurs before testing filename against filters, an error message is still displayed (which may seems strange when the file was about to be excluded by filter mechanism)

Just reminds that dar reads a directory contents first, and then check filenames against filter. At the time of the message, Dar does not know the file would not to be saved. Thus, don't worry if you see this kind of message, they don't mean that dar was about to save the given file but rather exactly what the message says, that the file or directory could not be read. ;-)

affects version: 1.0.x, 1.1.x
fixed in version: 1.2.0


#8:
description: when saving hard linked file using filter that does not save the first hard link, dar stops with SEGFAULT.

problem: The first hard link is used as reference for the following occurrences. As it is not covered by the filter it is destroyed in filter module, but filesystem module keeps using it as reference for following hard links on that inode.

solution: added a call in filesystem module to "forget" a given reference. This way, if a file_etiquette is not covered, filter module can inform filesystem module to delete the corresponding entry on the hard link map. Subsequent hard link on that inode will generate first a new file_etiquette, which will be used as reference (unless it is not covered by filter too).

affects version: 1.1.x
fixed in version: 1.2.0


#9:
description: failed to read an archive under windows NT and 2000

problem: error concerning the integer macro definition (module integer.hpp)

solution: replaced the macro by a proper definition.

affects version: 1.2.0
fixed in version: 1.2.1

#10:
description: when using dar on windows95 if giving c:/ or other driver to -R option, dar stops with the following error:

file filesystem.cpp line 186 : it seems to be a bug here

problem: dar checks first that c:/ is a directory. Cygwin says yes this dar continues : from c:/ dar completes the path with a dot thus dar tries to open the path c:/. which is Cygwin does not report as an existing directory.

solution: dar does not try to open c:/. but tries to open c:/ . Moreover additional code as been added to detect the incoherence if in some other conditions such problem occurs. Dar will report the exact problem (path does not exist / or path is not a directory)

affects version: 1.2.1, 1.2.0
fixed in version: 1.3.0

#11:
description: when giving to dar_manager an absolute path to -r option dar_manager stops with INTERNAL ERROR message.

problem: a check is missing to display error message in such condition

solution: added a error message, if an absolute path is given to -r

affects dar_manager version   : 1.0.0
fixed in dar_manager version  : 1.0.1

#12:
description: memory leakage when using -Z or -Y option. This bug has been found and explained by David Rose, who proposed a good patch, which I have just a little adapted.

problem: the dar code that call the zlib when changing the compression algorithm did properly released memory allocated by dar but forget to execute a call of the zlib library that releases memory allocate by the library itself.

solution: solution, add call to release memory used by zlib library when changing of compression algorithm

affects dar version   : 1.2.0, 1.2.1
fixed in dar version  : 1.3.0

#13:
description: archive created by dar under Windows NP SP6, XP, 2000, cannot be read, tested. All action on generated archive finishes by CRC error message, incoherent catalogue structure, or badly formated infinint.

problem: under windows, Dar uses Cygwin. As Unix and Window do not define end of line the same way (\n for Unix and \r\n for Windows), Cygwin translates end of line on the fly when reading a file (Windows to Unix), and when writing (Unix to Windows). At writing time especially, each \n anywhere in the archive is written \r\n, thus two bytes get written in place of one. In another hand, to get the global position in the whole archive dar counts bytes it writes to file. Thus a shift occurs between the position it thinks it writes the catalogue and the position it actually does. Thus reading the archive, the given position is wrong, and archive cannot be opened.

solution: deactivate the translation when generating the archive. Cygwin adds a two special flags to pass to open(2) which are O_TEXT and O_BINARY, that change the way Cygwin handles file read(2) and write(2).

affects all dar suite program version 1.0.x, 1.1.x, 1.2.x
fixed in dar version 1.3.0

#14:
description: Dar aborts with the following message

---- exception nature = [alive] exception type = [BUG] ----------
[source]
    file compressor.cpp line 320 : it seems to be a bug here
[most outside call]
-----------------------------------

It appears while doing a backup using compression and scrambling (could not (yet) be reproduced twice).

problem: this message is issued because zlib library reported an inconsistency in call order or in call arguments given to zlib calls. Actually I cannot figure out what is wrong, nor how this bug may happens.

solution: none actually at the time of 2.0.0 release.

But, if you are face to face with this bug, thanks to try reproducing it, re-issuing the same command in the same condition,  and if possible to reproduce then as much as possible reduce the necessary environment condition that make this bug happen. Then please, contact me (either via Sourceforge or via email).

affects version 1.2.1 (at least)
not yet fixed (check Sourceforge for fresh news), nor reproduced.

#15:
description: Dar aborts with the following message when giving -y0 as argument

---- exception nature = [alive]  exception type = [BUG] ----------
[source]
    file compressor.cpp line 96 : it seems to be a bug here
[most outside call]
-----------------------------------

problem: correct compression level for zlib are from 0 to 9, 0 meaning no compression, while for libbz2 correct compression levels are from 1 to 9, thus zero is not a valid value. libbz2 returns an error code which triggers a libdar internal sanity check code, which throws this BUG exception.


solution: forbid the use of -z0 or -y0. If you don't want compression, you will need to not give neither -z nor -y.

affects version 1.3.0
fixed in version 2.0.0

#16:
description: Dar include does not ignore comments in included file as given by -B option

problem: when a line with only one carriage-return is put in a file, the following line is considered as a normal line (not a comment), whatever it is, in particular if it starts with a hash (#).

solution: make the algorithm consider normal line once another character than space, tab hash or carriage-return is met. (added the carriage-return in the list).

affects version 1.2.0, 1.2.1, 1.3.0
fixed in version 2.0.0


#17:
description: Dar warns that the archive is about to save itself while a -P option is used to prevent this eventuality.

problem: testing against this situation did not check that a parent directory of the path where is saved the archive is excluded by a -P option.

solution: updating the test for the warning only appear in real auto-saving conditions.

affects version 2.0.0
fixed in version 2.0.1

#17bis:
description: save as #17, but in the case where the archive is in the under the current directory, and some -P options exclude it.

problem: the current directory "." is not substitued by its full path, and the testing algorithm does tread "." as directly under the root "/" directory, no -P could thus exclude it from the files to save.

solution: for testing and preventing self-backup, "." is replaced by its full path counterpart. affects version 2.0.0, 2.0.1, fixed in version 2.0.2

#18:
description: Dar does not save files or just stores empty directories when using -B option or having a ~/.darrc or /etc/darrc file

problem: the parsing code that strip the comments from files is not counting empty lines ('\n') between two lines of commands (i.e.: uncommented lines). This strips the last uncommented line before the next line of comment, usually leading to the  removal of an ending '\n' in the uncommented line, implying the fusion of the last command on this line and the next command on any uncommented after the last line of comment two commands that were initially separated by a '\n' and one or several  lines of comments. This wrongly interpreted command most of the time appear as a member of [list of path] and thus restricts  the backup to only a subdirectory that most of the time does not exist. If -D option is used, all first level directory are excluded from backup and thus stored as empty directories.

solution: bug fix in no_comment.cpp. A workaround is to remove comments or empty lines from configuration files.

affects versions 2.0.0, 2.0.1
fixed in versions 2.0.2

#19:
description: Dar hangs when using --nodump with named pipe to save

problem: to get the nodump flag value, dar needs to call the open() system call in read-only mode. For named pipes this  system call does return only when another process opens the same pipe in write mode.

solution: as proposed by Joshua Neal, the open will be set to non blocking mode to return immediately.

affects version 1.3.0, 2.0.0, 2.0.1
fixed in version 2.0.2

#20:
description: dar_manager did not completely removed reference to an archive when it was removed from a database

problem: data_dir is a C++ class that inherit from data_tree. This one has a virtual method remove_all_from(), that had was redefined in the data_dir class with a very little difference, making it not be a redefinition of the inherited class. (argument was "archive_num" in place of "const & archive_num"). This redefined method was never called, and the removal never recursed in subdirectories.

solution: make the two signature identical in the mother and inherited classes.

affects version 1.3.0, 2.0.0, 2.0.1, 2.0.2
fixed in version 2.0.3

#21:
description: under FreeBSD using -B option leads to parse error

problem: under FreeBSD, getopt() call is not reset just by setting optind to zero like under Linux, but the variable optreset must be set to 1.

solution: configure script now check for the presence of optreset variable which allows to properly reset the getopt call according to the system nature.

affects version 2.1.0 and older
fixed in version 2.1.1

#22:
description: dar stops when fed with an empty file for -B option

problem: the comments stripping class lack a variable initialization

solution: initialize variable before use

affects version 2.1.0 and older
fixed in version 2.1.1


#23:
description: -z -y -H and -w do not work under FreeBSD

problem: the getopt() version does not support optional arguments

solution: document this problem, and make possible to reach any feature

in the meanwhile, replace
-z    by   -z 9
-y    by   -y 9
-H    by   -H 1
-w    by   -w d (added for the circumstances)

affects all versions
documented starting version 2.1.2 (see dar's man page)

#24:
description: under windows 2000 SP4 failed to properly save a text file with DOS carriage return. Testing the archive reports CRC error.

problem: The cygwin adaptation layer for dar to be able to run under windows, needs non standard flag mode to not automatically translate between windows and unix carriage return. This bug is related to bug #13 which fix was not properly applied in one single place.

solution: properly apply the fix of bug #13 in this single place too.

affects version 2.1.1 and older
fixed in version 2.1.2

#25:
description: under some hypothetical conditions (when the archive is larger than 10^16 Terabytes, 1 followed by 16 zeros), dar is not able to read the archive.

problem: In this situation, the terminateur is not properly formed and cannot fulfill its role: point to the start of the catalogue. A byte 0x00 is missing near the end of the archive, that indicate the end of the terminateur.

solution: properly write down terminateur string of archive. Note that archive produced by earlier version are not possible to read dar version that have the fix, but don't worry, if you have a such big archive (more than 10^16 Terabytes), tell me I will create a little program that fixes the archive ;-) . Note, "terminateur" the french word for terminator, the word has been kept here because it is used in the source code.

affects version 2.1.2 and older
fixed in version 2.1.3

#26:
description: in configuration file using conditional syntax (see man page), when two targets follow each other on two directly following lines (the first condition is thus empty), the second condition is skipped. This condition also occurs if there is only comments between the two targets.

problem: when looking for the first target, an extra character is read (the first of the second target), as there is no contents in this target no reposition (seek) in the file is done, and the search for the next target start at that point (second character of the second target), which makes the second target not recognized because of the missing initial character.

solution: avoid reading this extra character so the next character to read is the first character (not the second) of the following target.

affects version 1.2.0 and older versions
fixed in version 2.1.3


#27:
description: when restoring only more recent files (-r), with hourshift activated (-H) the -H feature was randomly not activated.

problem: a test comparing the dates is made on address's objects rather on objects themselves. This leads to the -H test being activated randomly depending on address of two objects to compare.

solution: make the test on objects rather than on their addresses in memory.

affects version from 2.0.0 to 2.1.3
fixed in version 2.1.4

#28:
description: dar does not restore root Extended Attributes

problem: a collision occurred in the naming of the enumeration listing the extended attributes available domains, and the variables carrying user choice to consider or not user or root extended attributes for the operation (backup, restoration, comparison, etc.)

solution: rename both the domain enumeration and the variables carrying user choice about extended attribute comportment.

affects version from 2.1.0 to 2.1.3
fixed in version 2.1.4

#29:
description: dar does Segmentation fault during backup when hard links are involved

problem: when an plain file inode linked several time to the directory tree (a hard linked file), is not readable due to a lack a permission, the filesystem module generates a C++ object and stores its address for further reference and hard link handling. When the file opening comes an exception is throw, leading to this object destruction, but the reference stays in the the filesystem module, leading the next link to this inode being referenced by a destroyed object.

solution: remove the reference when exception is caught. This removal was properly done when such an inode was not to be saved (and the corresponding C++ object was destroyed), the case of the exception had been forgotten.

affects version up to 2.1.4
fixed in version 2.1.5

#30:
description: compilation failed with syntax error reported in libstdc++ header files (reported on Solaris and openBSD).

problem: the configure scripts does detect the presence of mbstate_t type, and redefines it. This conflicts the system header files and lead to a syntax error.

workaround 1:
will update the configure script to check if that fixes the problem. Else there is a workaround. Once configure has finished, edit the config.h file adding the line
#define HAVE_MBSTATE_T 1
and commenting out the line
#define mbstate_t int
Thanks to Steve Evans for this workaround.

solution: undefine the mbstate_t in dar's source code. This is implemented and seems to work. No action is required for your part.

may affects version up to 2.2.0
fixed in version 2.2.1


#31:
description: dar segfault when restoring Extended Attributes

problem: to speed up the reading of the catalogue from an archive an intermediate layer has been added thanks to an "cache"  object. This object is temporary instead of the object it does caching on. When the catalogue reading has finished, the reference given to inode becomes invalid because it is the caching object in place of the real underlying archive object.

solution: keep doing archive reading through the caching object but give the real archive object to the inode contained in the catalogue for they have a valid reference when it is time to restore EA.

affects version 2.2.0
fixed in version 2.2.1

#32:
description: dar is unable to read blowfish encrypted archives under certain conditions

problem: this bug appears when using infinint (no --enable-mode in command-line). The implicit cast from a unsigned integer to an infinint in a operation was not properly made by the compiler. Instead of converting the system integer to an infinint which is possible thanks to the infinint constructor class, the compiler down cast the the unsigned integer to unsigned char.

solution: use explicit cast conversion when doing operations which concern both infinint and unsigned integers.

affects version 2.2.0
fixed in version 2.2.1

#33:
decription: dar hangs when the last line of a DCF file is starting with a few spaces or tabs just following a comment.

problem: The layer that removes comments was counting the bytes from the beginning while the first byte retained as significative was the first non space of the line. leading to provide to upper layer a wrong information about the number of byte in the file.

solution: start counting bytes from the first significative byte of the line instead of the first byte of the line.

affects versions 2.1.x up to 2.2.0
fixed in version 2.2.1

#34:
description: when restoring files in a directory, this one has all permission set for every body. This makes a security hole on multi-user system.

problem: when restoring in a directory as non privileged user it is necessary to have write access to that directory, thus temporary change the permission. The permission is thus changed, but the temporary change is too permissive, thus opens a security hole, allowing local user delete/create/replace under the directory restored.

solution: only add the user write access (if necessary) to the directory when dar is run as non privileged user.

affects versions 1.x.x up to 2.2.0
fixed in version 2.2.1

#35:
description: when producing archive to stdout sometimes the message "Error while saving data: Bad file descriptor given" is displayed and dar aborts its execution

problem: health check done on a uninitialized object field in place of a constructor's argument that will be used to set this field.

solution: test the method's argument in place of the object field

affects version 2.2.x up to 2.2.3
fixed in version 2.2.4

#36:
description: when using -I or -X with -l option directory names are also filtered while they should not be, as described in the man page

solution: avoid having -X/-I filtering directories

affected versions 2.2.x up to 2.2.5
fixed in 2.2.6

#37:
description: when restoring a file which all EA has been dropped since the archive of reference, dar reports CRC error.

problem: to store that EA has been dropped, dar saves an empty EA list, but did not calculated the CRC value

solution: make the CRC value set when storing an empty list of EA. Note that this bug did not cause any problem except a CRC error message.

affects versions up to 2.2.6
fixed in 2.2.7




dar-2.4.8/doc/dar-catalog.dtd0000644000175000017430000000572212010477201012636 00000000000000 dar-2.4.8/doc/Doxyfile0000644000175000017430000017501112010477201011470 00000000000000# Doxyfile 1.6.1 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project # # All text after a hash (#) is considered a comment and will be ignored # The format is: # TAG = value [value, ...] # For lists items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (" ") #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the config file # that follow. The default is UTF-8 which is also the encoding used for all # text before the first occurrence of this tag. Doxygen uses libiconv (or the # iconv built into libc) for the transcoding. See # http://www.gnu.org/software/libiconv for the list of possible encodings. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded # by quotes) that should identify the project. PROJECT_NAME = "Disk ARchive" # The PROJECT_NUMBER tag can be used to enter a project or revision number. # This could be handy for archiving the generated documentation or # if some version control system is used. PROJECT_NUMBER = "##VERSION##" # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) # base path where the generated documentation will be put. # If a relative path is entered, it will be relative to the location # where doxygen was started. If left blank the current directory will be used. OUTPUT_DIRECTORY = ./doc # If the CREATE_SUBDIRS tag is set to YES, then doxygen will create # 4096 sub-directories (in 2 levels) under the output directory of each output # format and will distribute the generated files over these directories. # Enabling this option can be useful when feeding doxygen a huge amount of # source files, where putting all generated files in the same directory would # otherwise cause performance problems for the file system. CREATE_SUBDIRS = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # The default language is English, other supported languages are: # Afrikaans, Arabic, Brazilian, Catalan, Chinese, Chinese-Traditional, # Croatian, Czech, Danish, Dutch, Esperanto, Farsi, Finnish, French, German, # Greek, Hungarian, Italian, Japanese, Japanese-en (Japanese with English # messages), Korean, Korean-en, Lithuanian, Norwegian, Macedonian, Persian, # Polish, Portuguese, Romanian, Russian, Serbian, Serbian-Cyrilic, Slovak, # Slovene, Spanish, Swedish, Ukrainian, and Vietnamese. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES (the default) Doxygen will # include brief member descriptions after the members that are listed in # the file and class documentation (similar to JavaDoc). # Set to NO to disable this. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES (the default) Doxygen will prepend # the brief description of a member or function before the detailed description. # Note: if both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator # that is used to form the text in various listings. Each string # in this list, if found as the leading text of the brief description, will be # stripped from the text and the result after processing the whole list, is # used as the annotated text. Otherwise, the brief description is used as-is. # If left blank, the following values are used ("$name" is automatically # replaced with the name of the entity): "The $name class" "The $name widget" # "The $name file" "is" "provides" "specifies" "contains" # "represents" "a" "an" "the" ABBREVIATE_BRIEF = "The $name class" \ "The $name widget" \ "The $name file" \ is \ provides \ specifies \ contains \ represents \ a \ an \ the # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # Doxygen will generate a detailed section even if there is only a brief # description. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES then Doxygen will prepend the full # path before files name in the file list and in the header files. If set # to NO the shortest path that makes the file name unique will be used. FULL_PATH_NAMES = NO # If the FULL_PATH_NAMES tag is set to YES then the STRIP_FROM_PATH tag # can be used to strip a user-defined part of the path. Stripping is # only done if one of the specified strings matches the left-hand part of # the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the # path to strip. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of # the path mentioned in the documentation of a class, which tells # the reader which header file to include in order to use a class. # If left blank only the name of the header file containing the class # definition is used. Otherwise one should specify the include paths that # are normally passed to the compiler using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter # (but less readable) file names. This can be useful is your file systems # doesn't support long names like on DOS, Mac, or CD-ROM. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then Doxygen # will interpret the first line (until the first dot) of a JavaDoc-style # comment as the brief description. If set to NO, the JavaDoc # comments will behave just like regular Qt-style comments # (thus requiring an explicit @brief command for a brief description.) JAVADOC_AUTOBRIEF = NO # If the QT_AUTOBRIEF tag is set to YES then Doxygen will # interpret the first line (until the first dot) of a Qt-style # comment as the brief description. If set to NO, the comments # will behave just like regular Qt-style comments (thus requiring # an explicit \brief command for a brief description.) QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make Doxygen # treat a multi-line C++ special comment block (i.e. a block of //! or /// # comments) as a brief description. This used to be the default behaviour. # The new default is to treat a multi-line C++ comment block as a detailed # description. Set this tag to YES if you prefer the old behaviour instead. MULTILINE_CPP_IS_BRIEF = NO # If the INHERIT_DOCS tag is set to YES (the default) then an undocumented # member inherits the documentation from any documented member that it # re-implements. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce # a new page for each member. If set to NO, the documentation of a member will # be part of the file/class/namespace that contains it. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. # Doxygen uses this value to replace tabs by spaces in code fragments. TAB_SIZE = 8 # This tag can be used to specify a number of aliases that acts # as commands in the documentation. An alias has the form "name=value". # For example adding "sideeffect=\par Side Effects:\n" will allow you to # put the command \sideeffect (or @sideeffect) in the documentation, which # will result in a user-defined paragraph with heading "Side Effects:". # You can put \n's in the value part of an alias to insert newlines. ALIASES = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C # sources only. Doxygen will then generate output that is more tailored for C. # For instance, some of the names that are used will be different. The list # of all members will be omitted, etc. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java # sources only. Doxygen will then generate output that is more tailored for # Java. For instance, namespaces will be presented as packages, qualified # scopes will look different, etc. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources only. Doxygen will then generate output that is more tailored for # Fortran. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for # VHDL. OPTIMIZE_OUTPUT_VHDL = NO # Doxygen selects the parser to use depending on the extension of the files it parses. # With this tag you can assign which parser to use for a given extension. # Doxygen has a built-in mapping, but you can override or extend it using this tag. # The format is ext=language, where ext is a file extension, and language is one of # the parsers supported by doxygen: IDL, Java, Javascript, C#, C, C++, D, PHP, # Objective-C, Python, Fortran, VHDL, C, C++. For instance to make doxygen treat # .inc files as Fortran files (default is PHP), and .f files as C (default is Fortran), # use: inc=Fortran f=C. Note that for custom extensions you also need to set FILE_PATTERNS otherwise the files are not read by doxygen. EXTENSION_MAPPING = # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should # set this tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); v.s. # func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. BUILTIN_STL_SUPPORT = YES # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip sources only. # Doxygen will parse them like normal C++ but will assume all classes use public # instead of private inheritance when no explicit protection keyword is present. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate getter # and setter methods for a property. Setting this option to YES (the default) # will make doxygen to replace the get and set methods by a property in the # documentation. This will only work if the methods are indeed getting or # setting a simple type. If this is not the case, or you want to show the # methods anyway, you should set this option to NO. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES, then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. DISTRIBUTE_GROUP_DOC = NO # Set the SUBGROUPING tag to YES (the default) to allow class member groups of # the same type (for instance a group of public functions) to be put as a # subgroup of that type (e.g. under the Public Functions section). Set it to # NO to prevent subgrouping. Alternatively, this can be done per class using # the \nosubgrouping command. SUBGROUPING = YES # When TYPEDEF_HIDES_STRUCT is enabled, a typedef of a struct, union, or enum # is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically # be useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. TYPEDEF_HIDES_STRUCT = NO # The SYMBOL_CACHE_SIZE determines the size of the internal cache use to # determine which symbols to keep in memory and which to flush to disk. # When the cache is full, less often used symbols will be written to disk. # For small to medium size projects (<1000 input files) the default value is # probably good enough. For larger projects a too small cache size can cause # doxygen to be busy swapping symbols to and from disk most of the time # causing a significant performance penality. # If the system has enough physical memory increasing the cache will improve the # performance by keeping more symbols in memory. Note that the value works on # a logarithmic scale so increasing the size by one will rougly double the # memory usage. The cache size is given by this formula: # 2^(16+SYMBOL_CACHE_SIZE). The valid range is 0..9, the default is 0, # corresponding to a cache size of 2^16 = 65536 symbols SYMBOL_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in # documentation are documented, even if no documentation was available. # Private class members and static file members will be hidden unless # the EXTRACT_PRIVATE and EXTRACT_STATIC tags are set to YES EXTRACT_ALL = NO # If the EXTRACT_PRIVATE tag is set to YES all private members of a class # will be included in the documentation. EXTRACT_PRIVATE = NO # If the EXTRACT_STATIC tag is set to YES all static members of a file # will be included in the documentation. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) # defined locally in source files will be included in the documentation. # If set to NO only classes defined in header files are included. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. When set to YES local # methods, which are defined in the implementation section but not in # the interface are included in the documentation. # If set to NO (the default) only methods in the interface are included. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base # name of the file that contains the anonymous namespace. By default # anonymous namespace are hidden. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, Doxygen will hide all # undocumented members of documented classes, files or namespaces. # If set to NO (the default) these members will be included in the # various overviews, but no documentation section is generated. # This option has no effect if EXTRACT_ALL is enabled. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, Doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. # If set to NO (the default) these classes will be included in the various # overviews. This option has no effect if EXTRACT_ALL is enabled. HIDE_UNDOC_CLASSES = YES # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, Doxygen will hide all # friend (class|struct|union) declarations. # If set to NO (the default) these declarations will be included in the # documentation. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, Doxygen will hide any # documentation blocks found inside the body of a function. # If set to NO (the default) these blocks will be appended to the # function's detailed documentation block. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation # that is typed after a \internal command is included. If the tag is set # to NO (the default) then the documentation will be excluded. # Set it to YES to include the internal documentation. INTERNAL_DOCS = YES # If the CASE_SENSE_NAMES tag is set to NO then Doxygen will only generate # file names in lower-case letters. If set to YES upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # and Mac users are advised to set this option to NO. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO (the default) then Doxygen # will show members with their full class and namespace scopes in the # documentation. If set to YES the scope will be hidden. HIDE_SCOPE_NAMES = NO # If the SHOW_INCLUDE_FILES tag is set to YES (the default) then Doxygen # will put a list of the files that are included by a file in the documentation # of that file. SHOW_INCLUDE_FILES = YES # If the INLINE_INFO tag is set to YES (the default) then a tag [inline] # is inserted in the documentation for inline members. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES (the default) then doxygen # will sort the (detailed) documentation of file and class members # alphabetically by member name. If set to NO the members will appear in # declaration order. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the # brief documentation of file, namespace and class members alphabetically # by member name. If set to NO (the default) the members will appear in # declaration order. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the (brief and detailed) documentation of class members so that constructors and destructors are listed first. If set to NO (the default) the constructors will appear in the respective orders defined by SORT_MEMBER_DOCS and SORT_BRIEF_DOCS. This tag will be ignored for brief docs if SORT_BRIEF_DOCS is set to NO and ignored for detailed docs if SORT_MEMBER_DOCS is set to NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the # hierarchy of group names into alphabetical order. If set to NO (the default) # the group names will appear in their defined order. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be # sorted by fully-qualified names, including namespaces. If set to # NO (the default), the class list will be sorted only by class name, # not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the # alphabetical list. SORT_BY_SCOPE_NAME = NO # The GENERATE_TODOLIST tag can be used to enable (YES) or # disable (NO) the todo list. This list is created by putting \todo # commands in the documentation. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable (YES) or # disable (NO) the test list. This list is created by putting \test # commands in the documentation. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable (YES) or # disable (NO) the bug list. This list is created by putting \bug # commands in the documentation. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable (YES) or # disable (NO) the deprecated list. This list is created by putting # \deprecated commands in the documentation. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional # documentation sections, marked by \if sectionname ... \endif. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines # the initial value of a variable or define consists of for it to appear in # the documentation. If the initializer consists of more lines than specified # here it will be hidden. Use a value of 0 to hide initializers completely. # The appearance of the initializer of individual variables and defines in the # documentation can be controlled using \showinitializer or \hideinitializer # command in the documentation regardless of this setting. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated # at the bottom of the documentation of classes and structs. If set to YES the # list will mention the files that were used to generate the documentation. SHOW_USED_FILES = YES # If the sources in your project are distributed over multiple directories # then setting the SHOW_DIRECTORIES tag to YES will show the directory hierarchy # in the documentation. The default is NO. SHOW_DIRECTORIES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. # This will remove the Files entry from the Quick Index and from the # Folder Tree View (if specified). The default is YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the # Namespaces page. # This will remove the Namespaces entry from the Quick Index # and from the Folder Tree View (if specified). The default is YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command , where is the value of # the FILE_VERSION_FILTER tag, and is the name of an input file # provided by doxygen. Whatever the program writes to standard output # is used as the file version. See the manual for examples. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed by # doxygen. The layout file controls the global structure of the generated output files # in an output format independent way. The create the layout file that represents # doxygen's defaults, run doxygen with the -l option. You can optionally specify a # file name after the option, if omitted DoxygenLayout.xml will be used as the name # of the layout file. LAYOUT_FILE = #--------------------------------------------------------------------------- # configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated # by doxygen. Possible values are YES and NO. If left blank NO is used. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated by doxygen. Possible values are YES and NO. If left blank # NO is used. WARNINGS = YES # If WARN_IF_UNDOCUMENTED is set to YES, then doxygen will generate warnings # for undocumented members. If EXTRACT_ALL is set to YES then this flag will # automatically be disabled. WARN_IF_UNDOCUMENTED = NO # If WARN_IF_DOC_ERROR is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some # parameters in a documented function, or documenting parameters that # don't exist or using markup commands wrongly. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be abled to get warnings for # functions that are documented, but have no documentation for their parameters # or return value. If set to NO (the default) doxygen will only warn about # wrong or incomplete parameter documentation, but not about the absence of # documentation. WARN_NO_PARAMDOC = NO # The WARN_FORMAT tag determines the format of the warning messages that # doxygen can produce. The string should contain the $file, $line, and $text # tags, which will be replaced by the file and line number from which the # warning originated and the warning text. Optionally the format may contain # $version, which will be replaced by the version of the file (if it could # be obtained via FILE_VERSION_FILTER) WARN_FORMAT = "$file:$line: $text" # The WARN_LOGFILE tag can be used to specify a file to which warning # and error messages should be written. If left blank the output is written # to stderr. WARN_LOGFILE = #--------------------------------------------------------------------------- # configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag can be used to specify the files and/or directories that contain # documented source files. You may enter file names like "myfile.cpp" or # directories like "/usr/src/myproject". Separate the files or directories # with spaces. INPUT = # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding, which is # also the default input encoding. Doxygen uses libiconv (or the iconv built # into libc) for the transcoding. See http://www.gnu.org/software/libiconv for # the list of possible encodings. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp # and *.h) to filter out the source-files in the directories. If left # blank the following patterns are tested: # *.c *.cc *.cxx *.cpp *.c++ *.java *.ii *.ixx *.ipp *.i++ *.inl *.h *.hh *.hxx # *.hpp *.h++ *.idl *.odl *.cs *.php *.php3 *.inc *.m *.mm *.py *.f90 FILE_PATTERNS = *.h \ *.hpp # The RECURSIVE tag can be used to turn specify whether or not subdirectories # should be searched for input files as well. Possible values are YES and NO. # If left blank NO is used. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. EXCLUDE = doc \ intl \ m4 \ man \ misc \ po \ src/testing \ config.h \ gettext.h \ my_config.h # The EXCLUDE_SYMLINKS tag can be used select whether or not files or # directories that are symbolic links (a Unix filesystem feature) are excluded # from the input. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. Note that the wildcards are matched # against the file with absolute path, so to exclude all test directories # for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or # directories that contain example code fragments that are included (see # the \include command). EXAMPLE_PATH = # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp # and *.h) to filter out the source-files in the directories. If left # blank all files are included. EXAMPLE_PATTERNS = * # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude # commands irrespective of the value of the RECURSIVE tag. # Possible values are YES and NO. If left blank NO is used. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or # directories that contain image that are included in the documentation (see # the \image command). IMAGE_PATH = doc/dar_s_doc.jpg # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command , where # is the value of the INPUT_FILTER tag, and is the name of an # input file. Doxygen will then use the output that the filter program writes # to standard output. # If FILTER_PATTERNS is specified, this tag will be # ignored. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. # Doxygen will compare the file name with each pattern and apply the # filter if there is a match. # The filters are a list of the form: # pattern=filter (like *.cpp=my_cpp_filter). See INPUT_FILTER for further # info on how filters are used. If FILTER_PATTERNS is empty, INPUT_FILTER # is applied to all files. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER) will be used to filter the input files when producing source # files to browse (i.e. when SOURCE_BROWSER is set to YES). FILTER_SOURCE_FILES = NO #--------------------------------------------------------------------------- # configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will # be generated. Documented entities will be cross-referenced with these sources. # Note: To get rid of all source code in the generated output, make sure also # VERBATIM_HEADERS is set to NO. SOURCE_BROWSER = YES # Setting the INLINE_SOURCES tag to YES will include the body # of functions and classes directly in the documentation. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES (the default) will instruct # doxygen to hide any special comment blocks from generated source code # fragments. Normal C and C++ comments will always remain visible. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES # then for each documented function all documented # functions referencing it will be listed. REFERENCED_BY_RELATION = YES # If the REFERENCES_RELATION tag is set to YES # then for each documented function all documented entities # called/used by that function will be listed. REFERENCES_RELATION = YES # If the REFERENCES_LINK_SOURCE tag is set to YES (the default) # and SOURCE_BROWSER tag is set to YES, then the hyperlinks from # functions in REFERENCES_RELATION and REFERENCED_BY_RELATION lists will # link to the source code. # Otherwise they will link to the documentation. REFERENCES_LINK_SOURCE = YES # If the USE_HTAGS tag is set to YES then the references to source code # will point to the HTML generated by the htags(1) tool instead of doxygen # built-in source browser. The htags tool is part of GNU's global source # tagging system (see http://www.gnu.org/software/global/global.html). You # will need version 4.8.6 or higher. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set to YES (the default) then Doxygen # will generate a verbatim copy of the header file for each class for # which an include is specified. Set to NO to disable this. VERBATIM_HEADERS = NO #--------------------------------------------------------------------------- # configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index # of all compounds will be generated. Enable this if the project # contains a lot of classes, structs, unions or interfaces. ALPHABETICAL_INDEX = YES # If the alphabetical index is enabled (see ALPHABETICAL_INDEX) then # the COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns # in which this list will be split (can be a number in the range [1..20]) COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all # classes will be put under the same header in the alphabetical index. # The IGNORE_PREFIX tag can be used to specify one or more prefixes that # should be ignored while generating the index headers. IGNORE_PREFIX = #--------------------------------------------------------------------------- # configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES (the default) Doxygen will # generate HTML output. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `html' will be used as the default path. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for # each generated HTML page (for example: .htm,.php,.asp). If it is left blank # doxygen will generate files with .html extension. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a personal HTML header for # each generated HTML page. If it is left blank doxygen will generate a # standard header. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a personal HTML footer for # each generated HTML page. If it is left blank doxygen will generate a # standard footer. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading # style sheet that is used by each HTML page. It can be used to # fine-tune the look of the HTML output. If the tag is left blank doxygen # will generate a default style sheet. Note that doxygen will try to copy # the style sheet file to the HTML output directory, so don't put your own # stylesheet in the HTML output directory as well, or it will be erased! HTML_STYLESHEET = # If the HTML_ALIGN_MEMBERS tag is set to YES, the members of classes, # files or namespaces will be aligned in HTML using tables. If set to # NO a bullet list will be used. HTML_ALIGN_MEMBERS = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. For this to work a browser that supports # JavaScript and DHTML is required (for instance Mozilla 1.0+, Firefox # Netscape 6.0+, Internet explorer 5.0+, Konqueror, or Safari). HTML_DYNAMIC_SECTIONS = NO # If the GENERATE_DOCSET tag is set to YES, additional index files # will be generated that can be used as input for Apple's Xcode 3 # integrated development environment, introduced with OSX 10.5 (Leopard). # To create a documentation set, doxygen will generate a Makefile in the # HTML output directory. Running make will produce the docset in that # directory and running "make install" will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find # it at startup. # See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html for more information. GENERATE_DOCSET = NO # When GENERATE_DOCSET tag is set to YES, this tag determines the name of the # feed. A documentation feed provides an umbrella under which multiple # documentation sets from a single provider (such as a company or product suite) # can be grouped. DOCSET_FEEDNAME = "Doxygen generated docs" # When GENERATE_DOCSET tag is set to YES, this tag specifies a string that # should uniquely identify the documentation set bundle. This should be a # reverse domain-name style string, e.g. com.mycompany.MyDocSet. Doxygen # will append .docset to the name. DOCSET_BUNDLE_ID = org.doxygen.Project # If the GENERATE_HTMLHELP tag is set to YES, additional index files # will be generated that can be used as input for tools like the # Microsoft HTML help workshop to generate a compiled HTML help file (.chm) # of the generated HTML documentation. GENERATE_HTMLHELP = NO # If the GENERATE_HTMLHELP tag is set to YES, the CHM_FILE tag can # be used to specify the file name of the resulting .chm file. You # can add a path in front of the file if the result should not be # written to the html output directory. CHM_FILE = # If the GENERATE_HTMLHELP tag is set to YES, the HHC_LOCATION tag can # be used to specify the location (absolute path including file name) of # the HTML help compiler (hhc.exe). If non-empty doxygen will try to run # the HTML help compiler on the generated index.hhp. HHC_LOCATION = # If the GENERATE_HTMLHELP tag is set to YES, the GENERATE_CHI flag # controls if a separate .chi index file is generated (YES) or that # it should be included in the master .chm file (NO). GENERATE_CHI = NO # If the GENERATE_HTMLHELP tag is set to YES, the CHM_INDEX_ENCODING # is used to encode HtmlHelp index (hhk), content (hhc) and project file # content. CHM_INDEX_ENCODING = # If the GENERATE_HTMLHELP tag is set to YES, the BINARY_TOC flag # controls whether a binary table of contents is generated (YES) or a # normal table of contents (NO) in the .chm file. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members # to the contents of the HTML help documentation and to the tree view. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and QHP_VIRTUAL_FOLDER # are set, an additional index file will be generated that can be used as input for # Qt's qhelpgenerator to generate a Qt Compressed Help (.qch) of the generated # HTML documentation. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can # be used to specify the file name of the resulting .qch file. # The path specified is relative to the HTML output folder. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating # Qt Help Project output. For more information please see # http://doc.trolltech.com/qthelpproject.html#namespace QHP_NAMESPACE = # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating # Qt Help Project output. For more information please see # http://doc.trolltech.com/qthelpproject.html#virtual-folders QHP_VIRTUAL_FOLDER = doc # If QHP_CUST_FILTER_NAME is set, it specifies the name of a custom filter to add. # For more information please see # http://doc.trolltech.com/qthelpproject.html#custom-filters QHP_CUST_FILTER_NAME = # The QHP_CUST_FILT_ATTRS tag specifies the list of the attributes of the custom filter to add.For more information please see # Qt Help Project / Custom Filters. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this project's # filter section matches. # Qt Help Project / Filter Attributes. QHP_SECT_FILTER_ATTRS = # If the GENERATE_QHP tag is set to YES, the QHG_LOCATION tag can # be used to specify the location of Qt's qhelpgenerator. # If non-empty doxygen will try to run qhelpgenerator on the generated # .qhp file. QHG_LOCATION = # The DISABLE_INDEX tag can be used to turn on/off the condensed index at # top of each HTML page. The value NO (the default) enables the index and # the value YES disables it. DISABLE_INDEX = NO # This tag can be used to set the number of enum values (range [1..20]) # that doxygen will group on one line in the generated HTML documentation. ENUM_VALUES_PER_LINE = 4 # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. # If the tag value is set to YES, a side panel will be generated # containing a tree-like index structure (just like the one that # is generated for HTML Help). For this to work a browser that supports # JavaScript, DHTML, CSS and frames is required (i.e. any modern browser). # Windows users are probably better off using the HTML help feature. GENERATE_TREEVIEW = NO # By enabling USE_INLINE_TREES, doxygen will generate the Groups, Directories, # and Class Hierarchy pages using a tree view instead of an ordered list. USE_INLINE_TREES = NO # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be # used to set the initial width (in pixels) of the frame in which the tree # is shown. TREEVIEW_WIDTH = 250 # Use this tag to change the font size of Latex formulas included # as images in the HTML documentation. The default is 10. Note that # when you change the font size after a successful doxygen run you need # to manually remove any form_*.png images from the HTML output directory # to force them to be regenerated. FORMULA_FONTSIZE = 10 # When the SEARCHENGINE tag is enable doxygen will generate a search box for the HTML output. The underlying search engine uses javascript # and DHTML and should work on any modern browser. Note that when using HTML help (GENERATE_HTMLHELP) or Qt help (GENERATE_QHP) # there is already a search function so this one should typically # be disabled. SEARCHENGINE = YES #--------------------------------------------------------------------------- # configuration options related to the LaTeX output #--------------------------------------------------------------------------- # If the GENERATE_LATEX tag is set to YES (the default) Doxygen will # generate Latex output. GENERATE_LATEX = NO # The LATEX_OUTPUT tag is used to specify where the LaTeX docs will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `latex' will be used as the default path. LATEX_OUTPUT = latex # The LATEX_CMD_NAME tag can be used to specify the LaTeX command name to be # invoked. If left blank `latex' will be used as the default command name. LATEX_CMD_NAME = latex # The MAKEINDEX_CMD_NAME tag can be used to specify the command name to # generate index for LaTeX. If left blank `makeindex' will be used as the # default command name. MAKEINDEX_CMD_NAME = makeindex # If the COMPACT_LATEX tag is set to YES Doxygen generates more compact # LaTeX documents. This may be useful for small projects and may help to # save some trees in general. COMPACT_LATEX = NO # The PAPER_TYPE tag can be used to set the paper type that is used # by the printer. Possible values are: a4, a4wide, letter, legal and # executive. If left blank a4wide will be used. PAPER_TYPE = a4wide # The EXTRA_PACKAGES tag can be to specify one or more names of LaTeX # packages that should be included in the LaTeX output. EXTRA_PACKAGES = # The LATEX_HEADER tag can be used to specify a personal LaTeX header for # the generated latex document. The header should contain everything until # the first chapter. If it is left blank doxygen will generate a # standard header. Notice: only use this tag if you know what you are doing! LATEX_HEADER = # If the PDF_HYPERLINKS tag is set to YES, the LaTeX that is generated # is prepared for conversion to pdf (using ps2pdf). The pdf file will # contain links (just like the HTML output) instead of page references # This makes the output suitable for online browsing using a pdf viewer. PDF_HYPERLINKS = NO # If the USE_PDFLATEX tag is set to YES, pdflatex will be used instead of # plain latex in the generated Makefile. Set this option to YES to get a # higher quality PDF documentation. USE_PDFLATEX = NO # If the LATEX_BATCHMODE tag is set to YES, doxygen will add the \\batchmode. # command to the generated LaTeX files. This will instruct LaTeX to keep # running if errors occur, instead of asking the user for help. # This option is also used when generating formulas in HTML. LATEX_BATCHMODE = NO # If LATEX_HIDE_INDICES is set to YES then doxygen will not # include the index chapters (such as File Index, Compound Index, etc.) # in the output. LATEX_HIDE_INDICES = NO # If LATEX_SOURCE_CODE is set to YES then doxygen will include source code with syntax highlighting in the LaTeX output. Note that which sources are shown also depends on other settings such as SOURCE_BROWSER. LATEX_SOURCE_CODE = NO #--------------------------------------------------------------------------- # configuration options related to the RTF output #--------------------------------------------------------------------------- # If the GENERATE_RTF tag is set to YES Doxygen will generate RTF output # The RTF output is optimized for Word 97 and may not look very pretty with # other RTF readers or editors. GENERATE_RTF = NO # The RTF_OUTPUT tag is used to specify where the RTF docs will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `rtf' will be used as the default path. RTF_OUTPUT = rtf # If the COMPACT_RTF tag is set to YES Doxygen generates more compact # RTF documents. This may be useful for small projects and may help to # save some trees in general. COMPACT_RTF = NO # If the RTF_HYPERLINKS tag is set to YES, the RTF that is generated # will contain hyperlink fields. The RTF file will # contain links (just like the HTML output) instead of page references. # This makes the output suitable for online browsing using WORD or other # programs which support those fields. # Note: wordpad (write) and others do not support links. RTF_HYPERLINKS = NO # Load stylesheet definitions from file. Syntax is similar to doxygen's # config file, i.e. a series of assignments. You only have to provide # replacements, missing definitions are set to their default value. RTF_STYLESHEET_FILE = # Set optional variables used in the generation of an rtf document. # Syntax is similar to doxygen's config file. RTF_EXTENSIONS_FILE = #--------------------------------------------------------------------------- # configuration options related to the man page output #--------------------------------------------------------------------------- # If the GENERATE_MAN tag is set to YES (the default) Doxygen will # generate man pages GENERATE_MAN = NO # The MAN_OUTPUT tag is used to specify where the man pages will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `man' will be used as the default path. MAN_OUTPUT = man # The MAN_EXTENSION tag determines the extension that is added to # the generated man pages (default is the subroutine's section .3) MAN_EXTENSION = .3 # If the MAN_LINKS tag is set to YES and Doxygen generates man output, # then it will generate one additional man file for each entity # documented in the real man page(s). These additional files # only source the real man page, but without them the man command # would be unable to find the correct page. The default is NO. MAN_LINKS = NO #--------------------------------------------------------------------------- # configuration options related to the XML output #--------------------------------------------------------------------------- # If the GENERATE_XML tag is set to YES Doxygen will # generate an XML file that captures the structure of # the code including all documentation. GENERATE_XML = NO # The XML_OUTPUT tag is used to specify where the XML pages will be put. # If a relative path is entered the value of OUTPUT_DIRECTORY will be # put in front of it. If left blank `xml' will be used as the default path. XML_OUTPUT = xml # The XML_SCHEMA tag can be used to specify an XML schema, # which can be used by a validating XML parser to check the # syntax of the XML files. XML_SCHEMA = # The XML_DTD tag can be used to specify an XML DTD, # which can be used by a validating XML parser to check the # syntax of the XML files. XML_DTD = # If the XML_PROGRAMLISTING tag is set to YES Doxygen will # dump the program listings (including syntax highlighting # and cross-referencing information) to the XML output. Note that # enabling this will significantly increase the size of the XML output. XML_PROGRAMLISTING = YES #--------------------------------------------------------------------------- # configuration options for the AutoGen Definitions output #--------------------------------------------------------------------------- # If the GENERATE_AUTOGEN_DEF tag is set to YES Doxygen will # generate an AutoGen Definitions (see autogen.sf.net) file # that captures the structure of the code including all # documentation. Note that this feature is still experimental # and incomplete at the moment. GENERATE_AUTOGEN_DEF = NO #--------------------------------------------------------------------------- # configuration options related to the Perl module output #--------------------------------------------------------------------------- # If the GENERATE_PERLMOD tag is set to YES Doxygen will # generate a Perl module file that captures the structure of # the code including all documentation. Note that this # feature is still experimental and incomplete at the # moment. GENERATE_PERLMOD = NO # If the PERLMOD_LATEX tag is set to YES Doxygen will generate # the necessary Makefile rules, Perl scripts and LaTeX code to be able # to generate PDF and DVI output from the Perl module output. PERLMOD_LATEX = NO # If the PERLMOD_PRETTY tag is set to YES the Perl module output will be # nicely formatted so it can be parsed by a human reader. # This is useful # if you want to understand what is going on. # On the other hand, if this # tag is set to NO the size of the Perl module output will be much smaller # and Perl will parse it just the same. PERLMOD_PRETTY = YES # The names of the make variables in the generated doxyrules.make file # are prefixed with the string contained in PERLMOD_MAKEVAR_PREFIX. # This is useful so different doxyrules.make files included by the same # Makefile don't overwrite each other's variables. PERLMOD_MAKEVAR_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the preprocessor #--------------------------------------------------------------------------- # If the ENABLE_PREPROCESSING tag is set to YES (the default) Doxygen will # evaluate all C-preprocessor directives found in the sources and include # files. ENABLE_PREPROCESSING = YES # If the MACRO_EXPANSION tag is set to YES Doxygen will expand all macro # names in the source code. If set to NO (the default) only conditional # compilation will be performed. Macro expansion can be done in a controlled # way by setting EXPAND_ONLY_PREDEF to YES. MACRO_EXPANSION = NO # If the EXPAND_ONLY_PREDEF and MACRO_EXPANSION tags are both set to YES # then the macro expansion is limited to the macros specified with the # PREDEFINED and EXPAND_AS_DEFINED tags. EXPAND_ONLY_PREDEF = NO # If the SEARCH_INCLUDES tag is set to YES (the default) the includes files # in the INCLUDE_PATH (see below) will be search if a #include is found. SEARCH_INCLUDES = YES # The INCLUDE_PATH tag can be used to specify one or more directories that # contain include files that are not input files but should be processed by # the preprocessor. INCLUDE_PATH = # You can use the INCLUDE_FILE_PATTERNS tag to specify one or more wildcard # patterns (like *.h and *.hpp) to filter out the header-files in the # directories. If left blank, the patterns specified with FILE_PATTERNS will # be used. INCLUDE_FILE_PATTERNS = # The PREDEFINED tag can be used to specify one or more macro names that # are defined before the preprocessor is started (similar to the -D option of # gcc). The argument of the tag is a list of macros of the form: name # or name=definition (no spaces). If the definition and the = are # omitted =1 is assumed. To prevent a macro definition from being # undefined via #undef or recursively expanded use the := operator # instead of the = operator. PREDEFINED = # If the MACRO_EXPANSION and EXPAND_ONLY_PREDEF tags are set to YES then # this tag can be used to specify a list of macro names that should be expanded. # The macro definition that is found in the sources will be used. # Use the PREDEFINED tag if you want to use a different macro definition. EXPAND_AS_DEFINED = # If the SKIP_FUNCTION_MACROS tag is set to YES (the default) then # doxygen's preprocessor will remove all function-like macros that are alone # on a line, have an all uppercase name, and do not end with a semicolon. Such # function macros are typically used for boiler-plate code, and will confuse # the parser if not removed. SKIP_FUNCTION_MACROS = YES #--------------------------------------------------------------------------- # Configuration::additions related to external references #--------------------------------------------------------------------------- # The TAGFILES option can be used to specify one or more tagfiles. # Optionally an initial location of the external documentation # can be added for each tagfile. The format of a tag file without # this location is as follows: # # TAGFILES = file1 file2 ... # Adding location for the tag files is done as follows: # # TAGFILES = file1=loc1 "file2 = loc2" ... # where "loc1" and "loc2" can be relative or absolute paths or # URLs. If a location is present for each tag, the installdox tool # does not have to be run to correct the links. # Note that each tag file must have a unique name # (where the name does NOT include the path) # If a tag file is not located in the directory in which doxygen # is run, you must also specify the path to the tagfile here. TAGFILES = # When a file name is specified after GENERATE_TAGFILE, doxygen will create # a tag file that is based on the input files it reads. GENERATE_TAGFILE = # If the ALLEXTERNALS tag is set to YES all external classes will be listed # in the class index. If set to NO only the inherited external classes # will be listed. ALLEXTERNALS = NO # If the EXTERNAL_GROUPS tag is set to YES all external groups will be listed # in the modules index. If set to NO, only the current project's groups will # be listed. EXTERNAL_GROUPS = YES # The PERL_PATH should be the absolute path and name of the perl script # interpreter (i.e. the result of `which perl'). PERL_PATH = /usr/bin/perl #--------------------------------------------------------------------------- # Configuration options related to the dot tool #--------------------------------------------------------------------------- # If the CLASS_DIAGRAMS tag is set to YES (the default) Doxygen will # generate a inheritance diagram (in HTML, RTF and LaTeX) for classes with base # or super classes. Setting the tag to NO turns the diagrams off. Note that # this option is superseded by the HAVE_DOT option below. This is only a # fallback. It is recommended to install and use dot, since it yields more # powerful graphs. CLASS_DIAGRAMS = NO # You can define message sequence charts within doxygen comments using the \msc # command. Doxygen will then run the mscgen tool (see # http://www.mcternan.me.uk/mscgen/) to produce the chart and insert it in the # documentation. The MSCGEN_PATH tag allows you to specify the directory where # the mscgen tool resides. If left empty the tool is assumed to be found in the # default search path. MSCGEN_PATH = # If set to YES, the inheritance and collaboration graphs will hide # inheritance and usage relations if the target is undocumented # or is not a class. HIDE_UNDOC_RELATIONS = YES # If you set the HAVE_DOT tag to YES then doxygen will assume the dot tool is # available from the path. This tool is part of Graphviz, a graph visualization # toolkit from AT&T and Lucent Bell Labs. The other options in this section # have no effect if this option is set to NO (the default) HAVE_DOT = NO # By default doxygen will write a font called FreeSans.ttf to the output # directory and reference it in all dot files that doxygen generates. This # font does not include all possible unicode characters however, so when you need # these (or just want a differently looking font) you can specify the font name # using DOT_FONTNAME. You need need to make sure dot is able to find the font, # which can be done by putting it in a standard location or by setting the # DOTFONTPATH environment variable or by setting DOT_FONTPATH to the directory # containing the font. DOT_FONTNAME = FreeSans # The DOT_FONTSIZE tag can be used to set the size of the font of dot graphs. # The default size is 10pt. DOT_FONTSIZE = 10 # By default doxygen will tell dot to use the output directory to look for the # FreeSans.ttf font (which doxygen will put there itself). If you specify a # different font using DOT_FONTNAME you can set the path where dot # can find it using this tag. DOT_FONTPATH = # If the CLASS_GRAPH and HAVE_DOT tags are set to YES then doxygen # will generate a graph for each documented class showing the direct and # indirect inheritance relations. Setting this tag to YES will force the # the CLASS_DIAGRAMS tag to NO. CLASS_GRAPH = YES # If the COLLABORATION_GRAPH and HAVE_DOT tags are set to YES then doxygen # will generate a graph for each documented class showing the direct and # indirect implementation dependencies (inheritance, containment, and # class references variables) of the class with other documented classes. COLLABORATION_GRAPH = YES # If the GROUP_GRAPHS and HAVE_DOT tags are set to YES then doxygen # will generate a graph for groups, showing the direct groups dependencies GROUP_GRAPHS = YES # If the UML_LOOK tag is set to YES doxygen will generate inheritance and # collaboration diagrams in a style similar to the OMG's Unified Modeling # Language. UML_LOOK = YES # If set to YES, the inheritance and collaboration graphs will show the # relations between templates and their instances. TEMPLATE_RELATIONS = YES # If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDE_GRAPH, and HAVE_DOT # tags are set to YES then doxygen will generate a graph for each documented # file showing the direct and indirect include dependencies of the file with # other documented files. INCLUDE_GRAPH = YES # If the ENABLE_PREPROCESSING, SEARCH_INCLUDES, INCLUDED_BY_GRAPH, and # HAVE_DOT tags are set to YES then doxygen will generate a graph for each # documented header file showing the documented files that directly or # indirectly include this file. INCLUDED_BY_GRAPH = YES # If the CALL_GRAPH and HAVE_DOT options are set to YES then # doxygen will generate a call dependency graph for every global function # or class method. Note that enabling this option will significantly increase # the time of a run. So in most cases it will be better to enable call graphs # for selected functions only using the \callgraph command. CALL_GRAPH = NO # If the CALLER_GRAPH and HAVE_DOT tags are set to YES then # doxygen will generate a caller dependency graph for every global function # or class method. Note that enabling this option will significantly increase # the time of a run. So in most cases it will be better to enable caller # graphs for selected functions only using the \callergraph command. CALLER_GRAPH = NO # If the GRAPHICAL_HIERARCHY and HAVE_DOT tags are set to YES then doxygen # will graphical hierarchy of all classes instead of a textual one. GRAPHICAL_HIERARCHY = YES # If the DIRECTORY_GRAPH, SHOW_DIRECTORIES and HAVE_DOT tags are set to YES # then doxygen will show the dependencies a directory has on other directories # in a graphical way. The dependency relations are determined by the #include # relations between the files in the directories. DIRECTORY_GRAPH = YES # The DOT_IMAGE_FORMAT tag can be used to set the image format of the images # generated by dot. Possible values are png, jpg, or gif # If left blank png will be used. DOT_IMAGE_FORMAT = png # The tag DOT_PATH can be used to specify the path where the dot tool can be # found. If left blank, it is assumed the dot tool can be found in the path. DOT_PATH = # The DOTFILE_DIRS tag can be used to specify one or more directories that # contain dot files that are included in the documentation (see the # \dotfile command). DOTFILE_DIRS = # The DOT_GRAPH_MAX_NODES tag can be used to set the maximum number of # nodes that will be shown in the graph. If the number of nodes in a graph # becomes larger than this value, doxygen will truncate the graph, which is # visualized by representing a node as a red box. Note that doxygen if the # number of direct children of the root node in a graph is already larger than # DOT_GRAPH_MAX_NODES then the graph will not be shown at all. Also note # that the size of a graph can be further restricted by MAX_DOT_GRAPH_DEPTH. DOT_GRAPH_MAX_NODES = 50 # The MAX_DOT_GRAPH_DEPTH tag can be used to set the maximum depth of the # graphs generated by dot. A depth value of 3 means that only nodes reachable # from the root by following a path via at most 3 edges will be shown. Nodes # that lay further from the root node will be omitted. Note that setting this # option to 1 or 2 may greatly reduce the computation time needed for large # code bases. Also note that the size of a graph can be further restricted by # DOT_GRAPH_MAX_NODES. Using a depth of 0 means no depth restriction. MAX_DOT_GRAPH_DEPTH = 1000 # Set the DOT_TRANSPARENT tag to YES to generate images with a transparent # background. This is disabled by default, because dot on Windows does not # seem to support this out of the box. Warning: Depending on the platform used, # enabling this option may lead to badly anti-aliased labels on the edges of # a graph (i.e. they become hard to read). DOT_TRANSPARENT = NO # Set the DOT_MULTI_TARGETS tag to YES allow dot to generate multiple output # files in one run (i.e. multiple -o and -T options on the command line). This # makes dot run faster, but since only newer versions of dot (>1.8.10) # support this, this feature is disabled by default. DOT_MULTI_TARGETS = NO # If the GENERATE_LEGEND tag is set to YES (the default) Doxygen will # generate a legend page explaining the meaning of the various boxes and # arrows in the dot generated graphs. GENERATE_LEGEND = YES # If the DOT_CLEANUP tag is set to YES (the default) Doxygen will # remove the intermediate dot files that are used to generate # the various graphs. DOT_CLEANUP = YES dar-2.4.8/doc/README0000644000175000017430000000026212003275573010647 00000000000000 Dar Documentation Main Directory All the documentation has been moved to HTML. To access it, please point your web browser to the index.html file found in this directory. dar-2.4.8/doc/Limitations.html0000644000175000017430000002303412010477201013141 00000000000000 DAR's Limitations
Dar Documentation


DAR's Limitations





Here follows a description of the known limitation you should consult before creating a bug report for dar:

Fixed Limits

    • The size of SLICES may be limited by the file system or kernel (maximum file size is 2 GB with Linux kernel 2.2.x), other limits may exist depending on the filesystem used.
    • the number of SLICES is only limited by the size of the filenames, thus using a basename of 10 chars, considering your file system can support 256 char per filename at most, you could already get up to  10^241 SLICES, 1 followed by 241 zero. But as soon as your file system will support bigger files or longer filename, dar will follow without change.
    • dar_manager can gather up to 65534 different backups, not more. This limit should be high enough to not be a problem.
    • when using a listing file to define which file to operate on, each line of the listing file must not be longer than 20480 bytes else a new line is considered after the 20480th byte.

System variable limits

Memory

Dar uses virtual memory (= RAM+swap) to be able to add the list of file saved at the end of each archive. Dar uses its own integer type (called "infinint") that do not have limit (unlike 32 bits or 64 bits integers). This makes dar already able to manage Zettabytes volumes and above even if the systems cannot yet manage such file sizes. Nevertheless, this has an overhead with memory and  CPU usage, added to the C++ overhead for the datastructure. All together dar needs a average of 650 bytes of virtual memory by saved file with dar-2.1.0 and around 850 with dar-2.4.x (that's the price to pay for new features). Thus, for example if you have 110,000 files to save, whatever is the total amount of data to save, dar will require around 90 MB of virtual memory.

Now, when doing catalogue extraction or differential backup, dar has in memory two catalogues, thus the amount of memory space needed is the double (180 MB in the example). Why ? Because for differential backup, dar starts with the catalogue of the archive of reference which is needed to know which files to save and which not to save, and in another hand, builds the catalogue of the new archive all along the process. Now, for catalogue extraction, the process is equivalent to making a differential backup just after a full backup.

As you guess merging two archives into a third one requires even more memory (memory to store the first archive to merge, the second archive to merge and the resulting archive to produce).

This memory issue, is not a limit by itself, but you need enough virtual memory to be able to save your data (if necessary you can still add swap space, as partition or as a plain file).

Integers

To overcome the previously explained memory issue, dar can be built in an other mode. In this other mode, "infinint" is replaced by 32 bits or 64 bits integers, as defined by the use of --enable-mode=32 or --enable-mode=64 options given to configure script. The executables built this way (dar, dar_xform, dar_slave and dar_manager) run faster and use much less memory than the "full" versions using "infinint". But yes, there are drawbacks:  slice size, file size, dates, number of files to backup, total archive size (sum of all slices), etc, are bounded by the maximum value of the used integer, which is 4,294,967,296 for 32 bits and 18,446,744,073,709,551,616 for 64 bits integers.  In clear the 32 bits version cannot handle dates after year 2106 and file sizes over 4 GB. While the 64 bits version cannot handle dates after around 500 billion years (which is longer than the estimated age of the Universe: 15 billion years) and file larger than around 18 EB (18 exa bytes).

What the comportment when such a limit is reached ? For compatibility with the rest of the code, limited length integers (32 or 64 for now) cannot be used as-is, they are enclosed in a C++ class, which will report overflow in arithmetic operations. Archives generated with all the different version of dar will stay compatible between them, but the 32 bits or 64 bits will not be able to read or produce all possible archives. In that case, dar suite program will abort with an error message asking you to use the "full" version of dar program.

Command line

On several systems, command-line long options are not available. This is due to the fact that dar relies on GNU getopt. Systems like FreeBSD do not have by default GNU getopt, instead the getopt function proposed from the standard library does not support long options, nor optional arguments. On such system you will have to use short options only, and to overcome the lack of optional argument you need to explicitly set the argument. For example in place of "-z" use "-z 9" and so on (see dar's man page section "EXPLICIT OPTIONAL ARGUMENTS"). All options for dar's features are available with FreeBSD's getopt, just using short options and explicit arguments.

Else you can install GNU getopt as a separated library called libgnugetopt. If the include file <getopt.h> is also available, the configure script will detect it and use this library. This way you can have long options on FreeBSD for example.

Another point concerns the comand line length limitation. All system (correct me if I am wrong) do limit the size of the command line. If you want to add more options to dar than your system can afford, you can use the -B option instead an put all dar's arguments (or just some of them) in the file pointed to by this -B option. -B can be used several times on command line and is recursive (you can use -B from a file read by -B option).

Dates

Unix files have three dates :
  • last modification date (mtime)
  • last access date (atime)
  • last inode change (ctime)
In dar, dates are stored as integers (the number of seconds elapsed since Jan 1st, 1970) as Unix systems do. As seen above, the limitation is not due to dar but on the integer used, so if you use infinint, you should be able to store any date as far in the future as you want. Of course dar cannot stores dates before Jan the 1st of 1970, but it should not be a very big problem. ;-)

There is no standard way under Unix to change the ctime. So Dar is not able to restore the ctime date of files.

Symlinks

Unix systems do not provide a way to modify the last modification date (aka mtime) of an existing symlink.Worse If you modify the mtime of an existing symlink, you end modifying the mtime of the file targeted by that symlink, keeping untouched the mtime of the symlink itself! For that reason, dar is not able to restore mtime of symlink, it does not even try to do this, to not mess the mtime of inodes that could be pointed by a symlink.




dar-2.4.8/doc/presentation.html0000644000175000017430000003505712010477201013370 00000000000000 Dar's Documentation - General Presentation
Dar Documentation


PRESENTATION



1- General Presentation

Dar is a command-line software aimed to backup and archiving large live filesystems. It is a filesystem independent and cross platform tool. But, Dar is not a boot loader, nor it is an operating system. It does not create nor format partitions, but it can restore a full filesystem into a larger or a shorter partition, from one partition to several ones, (or the opposite from several to one partition), from a filesystem type to another filesystem type (ext2/3/4 to reiserFS for example), it can save and restore hard-linked inodes (hard linked plain files, sockets, char/block devices or even hard linked symlinks (!)), Solaris's Door files, it takes care of Posix Extended Attributes (Linux, MacOS, ...). It can also detect and restore sparse files, even when the underlying filesystem does not support them: it can restore a normal file with large portions of zeroed bytes as a sparse file on another filesystem that supports it, leading to a gain in space requirement for the restoration but also for the archive size.

From a filesystem, dar creates an archive, which may be split in a set of files (called slices) which size is user defined. Dar archives are suitable to be stored on floppy, CD, DVD, usb key, hard disks, and since release 2.4.0 to tapes too. But no, dar itself cannot burn a DVD. Instead the user can give dar a command to execute each time a slice is completed. Dar can perform full backup1, incremental backup2, differential backup3 and decremental backup4. It also records files that have been removed since the last backup was made, leading the restoration of a system to get the exact same state it was at the time of the differential/incremental/decremental backup (removing files that ought to be removed, adding files that ought to be added and modifing files as expected).

Dar is able to be run on a live filesystem. It can detect when a file has changed while it was reading it, in that case it is possible to ask dar to proceed to several retries to save such a file. File that could not be saved properly (because they changed during the backup and there is no more retry left) are flagged as "dirty" and are signaled at restoration time.

Dar provides several hooks for the user to perform certain actions when some event are met. One first hook let the user launch a command when a slice is completed  (for burning the slice to DVD for example). A second hook is available for a given set of files or directories under backup, that let the user do some action before and after saving that file or directory (like suspending a database, and resuming it afterward).

Dar code has been split into two parts since its release 2.0.0, a command-line part and a library part called libdar. Dar is not a Graphical User Interface it is only a command-line tool, but thanks to the libdar library, many external tool rely on libdar and can manipulate dar archives at a more abstracted level with other interfaces than the shell command-line.

libdar may be linked against libz, libz2 and liblzo to provide gzip, bzip2 and lzo compression. It can also be linked against libgcrypt to provide strong encryption (blowfish, twofish, aes256, serpent256, camellia256).

Dar can be used through ssh for remote backups. It may also make used of so called "isolated catalogue" which is a small file containing the table of contents (or catalog) of a given archive. These can be used as base for a differential, incremental or decremental backup, and can also be used as backup of the table of content of the original archive if that one get corrupted.

While dar/libdar provide a lot of features we will not mention here, you can use dar without having any knowledge of all of them. In its most simple form, dar can be used only with few options, here follows some example of use, that should not need additional explanations:

archive creation (backing up all /usr directory) :
   dar -c my_backup -R / -g usr
  
archive restoration (restoring /usr in a alternate directory):
  
dar -x my_backup -R /some/where/else

archive testing:
  
dar -t my_backup

archive comparison with an existing filesystem:
   dar -d my_backup -R /

While a big effort has been made on documentation, it must not lead you to think that you have to read it all to be able to use dar: most needs are covered by the tutorial or mini-howto and for direct explanation of common questions by the FAQ, then, if you like or if you need, you can also look at the detailed man pages for a particular feature. You may also find some help on the dar-support mailing-list where a bit more than a hundred of subscribed users can help you.

Dar's documentation is big because it also includes all that may be useful to know how to use libdar, which is intended for developpers of external application relying on this library. For those even more curious there is also the documentation about dar's internals, libdar's structure, archive format, which can ease the understanding of the magic that makes all this working, and give a better understanding of dar/libdar code, which is written in C++. But, no, you do not need to read all this to just use dar! ;-)

Follow an abstracted list of features is available if you want to know more about dar/libdar.

2 - Known Projects that rely on dar or libdar


  • AVFS is virtual file system layer for transparently accessing the content of archives and remote directories just like local files.
  • DarGUI by Malcolm Poole is a front-end to dar providing simple and graphical access to the main features of dar.
  • kdar is a KDE-3 Graphical User Interface to dar made by Johnathan Burchill
  • SaraB: Schedule And Rotate Automatic Backups - by Tristan Rhodes. SaraB works with DAR to schedule and rotate backups. Supports the Towers of Hanoi, Grandfather-Father-Son, or any custom backup rotation strategy.
  • Baras by Aaron D. Marasco it a rewriting in Perl of SaraB.
  • Lazy Backup by Daniel Johnson. Lazy Backup is intended to be so easy even lazy people will do their backups
  • A Dar plugin has been made by Guus Jansman for Midnight commander (mc)
  • HUbackup : Home User backup
  • Python Bindings by Wesley Leggette
  • Disk archive interface for Emacs by Stefan Reichör
  • backup.pl script, creates and verifies a backup using dump/restore or using dar
If a project you like is missing, you are welcome to contact me for it to be referred here (see the AUTHOR document in the source package).

3 - Limitations

Dar has some limitations, they are details here.

4 - Staying informed of dar/libdar's events

To stay informed about new releases and security issues please subscribe to the Dar-news mailing-list. This is a read-only mailing-list with very low number of mail (usually less than ten a year).



1 Full backup: A full backup is a backup of a full filesystem or of a subset of files where, for each file, the archive contains all the inode information (ownership, permission, dates, etc.) file's data and eventually file's Extended Attributes.
2 Differential backup: A differential backup is based on a full backup. It contains only the data and Extended Attributes of files that changed since the full backup was made. It also contains the list of files that have been removed since the full backup was made. For files that did not change, it contains only the inode information. The advantage is that the backup process is much faster, the space required is also much lower.  The drawback is that you need to restore the full backup first, then the differential backup to get the last saved state of your system.
3 Incremental backup: An incremental backup is essentially the same thing as a differential backup. Some make a difference, I do not. The only point I see is that the incremental backup is not based on a full backup but on a differential backup or on another incremental one.
4 Decremental backup: A decremental backup is a backup method in which the most recent backup is a full backup, while the oldest backup are a difference compared to that full backup. The advantage of such type of backup is the you can restore easily your system in the last state it had using only the last backup. And, if you want to restore it in the state it had some time before, then you can restore the last backup (full backup), then the previous archive (a decremental backup) and so on. As you most usually want to restore the system in its last available state, this makes restoration much more easy compared to doing incremental backups. However, this suffer from a important drawback, which is that you need to transform the last backup into a decremental backup when comes the time to make another backup. Then you have to remove the former full backup and replace it by its decremental version.

dar-2.4.8/THANKS0000644000175000017430000003116212023047015010126 00000000000000version 2.4.8 -------------- Thanks to Mario for help investigating the reason of strange messages reported by libdar whe virtual memory is exhausted. Thanks to "Fredquerty2003" (aka Richard) and Jeremy Faith for bugs report, and to Mario Metzler for bug report and help resolving the reported bug. Last, thanks again to Philippe Naudin this time for improvement suggestion. version 2.4.7 -------------- Thanks to Vladimir Mosgalin (once again) for bug report and proposed fix. Thanks to Gilles Hamel and David Palmer for bug report. version 2.4.6 -------------- Thanks to Per Grahn for bug report and problem analysis. Thanks to Brian May I forgot so many times to underline his constribution by porting dar/libdar to Debian for that many years. version 2.4.5 -------------- Thanks to Piotr Domanski and Paolo Sala for bug report. Thanks to Andreas Wolff for feedback and bug fix. A Big Thank to Neil Darlow who provided a great support in solving a bug in libdar that only expressed on ArchLinux and dynamic binary and infinint mode when used at the same time. version 2.4.4 -------------- Thanks again to Jurgen Nagler-Ihlein for bug report. version 2.4.3 -------------- Thanks to Jurgen Nagler-Ihlein for bug report. Thanks to Stefan J. Betz, Philippe Naudin, Vladimir Mosgalin and Andrea Vai for help in troubleshooting dar's memory usage. version 2.4.2 -------------- Thanks to Philippe Naudin for feedback and tests. Thanks to Jurgen Nagler-Ihlein for reports and compilation fix. Thanks to Hauke Laging for feedback. version 2.4.1 -------------- Thanks again to Peter Ladgren for Swedish translation. Thanks to Giuseppe Borzi for feedback. Thanks to Jurgen Nagler-Ihlein for feedback and help in troubleshooting. version 2.4.0 -------------- Thanks to Wiebe Cazemier for feedback and patch for documentation. Thanks to Dave Vasilevsky for bug fix and feedback. Thanks to Erik Wasser for patch. Thanks to Sony Electronics Inc. Thanks to T. Leske for his proposal of using libgcrypt and patch against 2.3.x versions. Thanks to Vladimir Mosgalin for feedback and testing on Solaris and several other Unix systems. Thanks again to Peter Landgren for Swedish translation. version 2.3.12 -------------- Thanks to Angela Kahealani for feedback. version 2.3.11 -------------- Thanks to Douglas for bug report and feedback. Thanks to Jean-Christophe Arnu and Jonathan (via Sourceforge) for bug reports. version 2.3.10 -------------- Thanks to Patrick G. for feedback and help solving the bug [SF 2922417], and to Jan-Pascal van Best for his patch. Thanks to TONG for feedback and bug report. version 2.3.9 -------------- Thanks to Jan-Pascal van Best for bug report and patch. version 2.3.8 -------------- Thanks to smartyxt for bug report. A big thank you to Szymon Juraszczyk for double bug report and detailed scenario to reproduce them. version 2.3.7 -------------- Thanks to Don and Andy Shellam for feedback. B0mber for bug report. Thanks to timkoder for bug report about documentation. version 2.3.6 -------------- Thanks to Marco Atzeri for feedback. Thanks to lucatrv for feedback. Thanks once again to Jo (aka Jo - ex-bart) for bug feedback. Thanks to Dorphell for bug report. version 2.3.5 -------------- Thanks to Sonni Norlov for optimization patch. Thanks again to Peter Landgren for Swedish translation and bug report. Thanks also to David Fries for patch. version 2.3.4 -------------- Thanks once again to Jo (aka Jo - ex-bart) for feedback and clean scenario to reproduce an unexpected behavior. Thanks to Norman J Davis for bug and patch feedback. Johnathan Burchill for feedback. Thanks to Dwayne C. Litzenberger for feedback and patch. version 2.3.3 -------------- Thanks to Mike P. for bug feedback. Thanks to "Jo - ex-bart" for feedback and very useful scenario to reproduce the reported bugs. Thanks to "Marius" for feedback. Thanks to Veysel Ozer for feedback and patch. Thanks to tianmiao for feedback and bug report. Thanks to Oliver Winker for feedback and investigations. Thanks to Andrea Palazzi for patch. version 2.3.2 -------------- Thanks to Thomas Jacob for feedback and patch about bug in Native Language Support and also for feedback and investigations about the --recent bug. Thanks to Dave Vasilevsky for bug report and design considerations. Thanks to Richard Fish for feedback, bug report and patch. Thanks to Stephan for feedback about Guus Jansman's Midnight Commander's plugin for Dar. Thanks also to Guus Jansman for his work. Thanks to Piotr Oh for feedback. version 2.3.1 -------------- Thanks to Adam Fuksa for bug feedback. Thanks to Nick Alcock and to Wiebe Cazemier for feedback and patch. Thanks to Nebojsa for feedback. Thanks to Brian May (@Debian) and Alexei Sheplyakov for feedback. Thanks also to Marko Lavikainen for feedback. version 2.3.0 -------------- Thanks to Wesley Leggette again for his feedback and for his many proposed patchs for new features. Thanks to Dave Vasilevsky for his patch opening support for EA and file forks under MacOS X. Thanks to "ex-bart" for feedback and very interesting feature request. Thanks to Uwe Freese for feedback. Thanks to Aaron D. Marasco for feedback. And as always, thanks to Johnathan Burchill (author of kdar) for appreciated feedback remarks and proposals. Thanks to Michael Roitzsch for bug fix. A special Thanks goes to Michael Roitzsch (again him ;-)) Rodd Zurcher, Helmut Jarausch and markus Kamp (again him ;-)) for helping make this release stable thanks to their feedback and participation to the pre-release phase. version 2.2.7 ------------- Thanks to Markus Kamp for German Translation version 2.2.6 ------------- Thanks to Gerald Britton and Halfgaar (Wiebe Cazemier) for reporting bug problem. version 2.2.5 -------------- Thanks to "Steffe" for feedback and suggestion, to Andrey Yasniy and to Nick Alcock for feedback and patches. version 2.2.4 -------------- Thanks to Martin Ehmsen for feedback and investigations. Thanks to Roi Rodriguez Mendez & Mauro Silvosa Rivera for sharing their backup script. version 2.2.3 -------------- Thanks to Andrew(ziem) for feedback. Thanks to Bob Barry for his script giving a raw estimation of the memory requirement for dar. Thanks again to Wesley Leggette for feedback. Thanks to Jakub Holy for feedback and providing some helper scripts for dar. Thanks also to Brian May (Debian maintainer for dar) for transmitting me feedback and patches. Thanks to Martin Jost for patch and feedback about the API tutorial. Thanks to Jeremy C. Reed and to HansS713 for feedback. version 2.2.2 -------------- Thanks to Ryan Vietri for feedback. Thanks to Ralph Slooten for feedback, and tutorial spelling fix. Thanks to Juergen Menden for feedback and patch and to Peter Landgren for feedback and Swedish translation, thanks to Olive for feedback and patience ;-). Last, a Very Big Thank to Wesley Leggette again for his many feedbacks, patches and constructive remarks. version 2.2.1 -------------- Thanks to Steve Evans for feedback and solution about bug #30. A big thank to P-O Yliniemi (alias Peo) for feedback and for providing me the necessary mean to investigate the problem of the "stack overflow" (compiler problem). Thanks to Karl W. Weigel for reporting bug #31 and for his useful feedback. A Big Thanks to Thomas Bettler for feedback about bug #32 which was difficult to reproduce and fix. Thanks to Matthias Keller for reporting bug #33 and to Joe Harvell for reporting bug #34. version 2.2.0 -------------- Thanks to "Gandalf" for his feedback and the "cache" implementation to decrease context switches when dar is run through ssh for differential backup. Thanks to Lars H. Rohwedder for a very interesting discussion feedback and suggestion for the -aSI/-abinary options, thanks also to him for the informations provided and for the implementation suggestions about strong encryption. Thanks to "nedkonz" for his idea of the "on-fly" isolation. Thanks to Holger Hoffstaette for feedback, Thanks to Uwe Freese for feedback about dar_manager, and a big Thanks to Tristan Rhodes who helps me answering support requests. A special thanks to Matthew Caron for his sens of humor and interesting script. Thanks also to Brian Daniels for feedback about a compilation problem under x86_64 and to Todd Vierling for giving feedback and sending patches to make dar possible to compile on Interix system. Thanks to David Gervasoni for having translated to Italian Grzegorz Adam Hankiewicz's mini-howto. Thanks also to Christian Neumann for API design suggestions and once again to Johnathan Burchill (the author of KDAR) for API suggestions, ideas and feedback. version 2.1.5 ------------- Thanks to Peter Landgren and Johnathan Burchill for reporting bug #29. Thanks to Brian May and to Travis (Tilley ?) for feedback. version 2.1.4 ------------- Thanks to Marius Tomaschewski, for reporting investigating and fix proposal for bug #28. version 2.1.3 ------------- Jean-Louis Liagre for feedback and help for porting dar to solaris 9. Thanks to Peter Kupec for feedback and for having found bug #26. Thanks to Wesley Leggette for having found bug #25. version 2.1.2 ------------- Many thanks to Omer Enbar for his patience, perseverance, and the patch he proposed that fixed the bug #24. Thanks again to Arcady Genkin for reporting bug #23 version 2.1.1 ------------- Bill P. for feedback. Thanks to Arcady Genkin for reporting bug #21, #22, #23 version 2.1.0 ------------- Thanks to Shanon Denize for its suggestion about -wa option, to Lars Schmidt-Thieme for his suggestion about -as option. And, thanks to "nedkonz" on the historical forum, for feedback, and Nick Alcock for his "tiny" patch and feedback about GNU Coding Standards. Thanks to Tami King for feedback and help. Thanks to Patrick Drechsler for typo error feedback version 2.0.4 -------------- Thanks to Matthew Valites and "nedkonz" for feedback about large file support. version 2.0.3 -------------- Thanks to "Gandalf" on the historical forum for having reported bug #20 version 2.0.2 -------------- Thanks to Ruslan Hristov for reporting bug #18 (Sourceforge bug 850279), and thanks to Joshua Neal for reporting bug #19 (Sourceforge bug 851320). version 2.0.1 -------------- Thanks to David Baldwin for reporting bug #17 (Sourceforge bug 837735). version 2.0.0 -------------- Thanks to Jules for its suggestion about altern memory allocation when a lot of small blocks are asked and are only released at the end of the program, this is the base of the "special allocation". Thanks to Adam Sherman for presenting me Parchive. Thanks to Johnathan Burchill (author of kdar) for his feedback about the libdar API, and help in fixing bugs in pre-release 1, and in release 1.3.0 bug #15. Thanks to Sergei Suyev for feedback. Thanks to Grzegorz Adam Hankiewicz for its mini-howto. Thanks to Peter Kupec for having found and reported bug #16, and for his attention to documentation coherence, and his intensive testing. version 1.3.0 -------------- Thanks to David Rose for having found, identified an proposed a patch for bug #12. Memory leakages in general are difficult to find, thus congratulations ! Thanks to Oliver Brendel for the great help and patience to my many asked tests to solve the bug #13, bug I could not reproduce. Thanks to John Reynolds for feedback. Thanks also to Rgbtxus for the --flat option need expression. Thanks to Sergio Borghese for reporting bug #11. Nicola Larosa for his suggestions about UPX (which will be considered for dar 2.x.x series), Dietrich Rothe for the --nodump feature request, and many others for feedback and encouragements. version 1.2.1 --------------- Thanks to Axel Kohlmeyer for his patch about RPMS, Dietrich Rothe for its patch on compression level, Brian May for his patches to comply with Debian distro, Ulrich Hagen D.A.M Revok, Jahn Wells, Juhana Sadeharju, for their feedback and John Little for compiling the FAQ and man pages on the web. version 1.2.0 --------------- Thanks to Chris Martin, Jerome Zago, Tim Doerzbacher, Sebastien Robart, Kurt Palmer, Bernd Storsberg, Egor Cheshkov, Richard Bratt for feedback and suggestions. I must add a special Thanks to Chris Martin who realized a simple and efficient tool that generates the usage C++ code from files written in XML syntax, making automatic word wrapping and indentation. This power tool named "dar-help" is licensed as part of DAR, but has been totally designed and written by Chris. version 1.1.0 --------------- Thanks to Moritz Franosch for intensive testing, for finding several bugs and proposing patches, to John Little for testing dar and for providing a forum area, to Ulrich Hagen, Francois Botha, Tobias Jahn for feedback, and to Andreas Gruenbacher for its Extended Attributes software, and its useful answers to my questions. Thanks to the many other people that just encouraged me by their remarks and compliments. dar-2.4.8/m4/0000755000175000017520000000000012023053205007606 500000000000000dar-2.4.8/m4/lib-link.m40000644000175000017520000010020212023053123011463 00000000000000# lib-link.m4 serial 21 (gettext-0.18) dnl Copyright (C) 2001-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. AC_PREREQ([2.54]) dnl AC_LIB_LINKFLAGS(name [, dependencies]) searches for libname and dnl the libraries corresponding to explicit and implicit dependencies. dnl Sets and AC_SUBSTs the LIB${NAME} and LTLIB${NAME} variables and dnl augments the CPPFLAGS variable. dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_LINKFLAGS], [ AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) pushdef([Name],[translit([$1],[./-], [___])]) pushdef([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) AC_CACHE_CHECK([how to link with lib[]$1], [ac_cv_lib[]Name[]_libs], [ AC_LIB_LINKFLAGS_BODY([$1], [$2]) ac_cv_lib[]Name[]_libs="$LIB[]NAME" ac_cv_lib[]Name[]_ltlibs="$LTLIB[]NAME" ac_cv_lib[]Name[]_cppflags="$INC[]NAME" ac_cv_lib[]Name[]_prefix="$LIB[]NAME[]_PREFIX" ]) LIB[]NAME="$ac_cv_lib[]Name[]_libs" LTLIB[]NAME="$ac_cv_lib[]Name[]_ltlibs" INC[]NAME="$ac_cv_lib[]Name[]_cppflags" LIB[]NAME[]_PREFIX="$ac_cv_lib[]Name[]_prefix" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME) AC_SUBST([LIB]NAME) AC_SUBST([LTLIB]NAME) AC_SUBST([LIB]NAME[_PREFIX]) dnl Also set HAVE_LIB[]NAME so that AC_LIB_HAVE_LINKFLAGS can reuse the dnl results of this search when this library appears as a dependency. HAVE_LIB[]NAME=yes popdef([NAME]) popdef([Name]) ]) dnl AC_LIB_HAVE_LINKFLAGS(name, dependencies, includes, testcode, [missing-message]) dnl searches for libname and the libraries corresponding to explicit and dnl implicit dependencies, together with the specified include files and dnl the ability to compile and link the specified testcode. The missing-message dnl defaults to 'no' and may contain additional hints for the user. dnl If found, it sets and AC_SUBSTs HAVE_LIB${NAME}=yes and the LIB${NAME} dnl and LTLIB${NAME} variables and augments the CPPFLAGS variable, and dnl #defines HAVE_LIB${NAME} to 1. Otherwise, it sets and AC_SUBSTs dnl HAVE_LIB${NAME}=no and LIB${NAME} and LTLIB${NAME} to empty. dnl Sets and AC_SUBSTs the LIB${NAME}_PREFIX variable to nonempty if libname dnl was found in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_HAVE_LINKFLAGS], [ AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) pushdef([Name],[translit([$1],[./-], [___])]) pushdef([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) dnl Search for lib[]Name and define LIB[]NAME, LTLIB[]NAME and INC[]NAME dnl accordingly. AC_LIB_LINKFLAGS_BODY([$1], [$2]) dnl Add $INC[]NAME to CPPFLAGS before performing the following checks, dnl because if the user has installed lib[]Name and not disabled its use dnl via --without-lib[]Name-prefix, he wants to use it. ac_save_CPPFLAGS="$CPPFLAGS" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INC]NAME) AC_CACHE_CHECK([for lib[]$1], [ac_cv_lib[]Name], [ ac_save_LIBS="$LIBS" dnl If $LIB[]NAME contains some -l options, add it to the end of LIBS, dnl because these -l options might require -L options that are present in dnl LIBS. -l options benefit only from the -L options listed before it. dnl Otherwise, add it to the front of LIBS, because it may be a static dnl library that depends on another static library that is present in LIBS. dnl Static libraries benefit only from the static libraries listed after dnl it. case " $LIB[]NAME" in *" -l"*) LIBS="$LIBS $LIB[]NAME" ;; *) LIBS="$LIB[]NAME $LIBS" ;; esac AC_TRY_LINK([$3], [$4], [ac_cv_lib[]Name=yes], [ac_cv_lib[]Name='m4_if([$5], [], [no], [[$5]])']) LIBS="$ac_save_LIBS" ]) if test "$ac_cv_lib[]Name" = yes; then HAVE_LIB[]NAME=yes AC_DEFINE([HAVE_LIB]NAME, 1, [Define if you have the lib][$1 library.]) AC_MSG_CHECKING([how to link with lib[]$1]) AC_MSG_RESULT([$LIB[]NAME]) else HAVE_LIB[]NAME=no dnl If $LIB[]NAME didn't lead to a usable library, we don't need dnl $INC[]NAME either. CPPFLAGS="$ac_save_CPPFLAGS" LIB[]NAME= LTLIB[]NAME= LIB[]NAME[]_PREFIX= fi AC_SUBST([HAVE_LIB]NAME) AC_SUBST([LIB]NAME) AC_SUBST([LTLIB]NAME) AC_SUBST([LIB]NAME[_PREFIX]) popdef([NAME]) popdef([Name]) ]) dnl Determine the platform dependent parameters needed to use rpath: dnl acl_libext, dnl acl_shlibext, dnl acl_hardcode_libdir_flag_spec, dnl acl_hardcode_libdir_separator, dnl acl_hardcode_direct, dnl acl_hardcode_minus_L. AC_DEFUN([AC_LIB_RPATH], [ dnl Tell automake >= 1.10 to complain if config.rpath is missing. m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([config.rpath])]) AC_REQUIRE([AC_PROG_CC]) dnl we use $CC, $GCC, $LDFLAGS AC_REQUIRE([AC_LIB_PROG_LD]) dnl we use $LD, $with_gnu_ld AC_REQUIRE([AC_CANONICAL_HOST]) dnl we use $host AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT]) dnl we use $ac_aux_dir AC_CACHE_CHECK([for shared library run path origin], [acl_cv_rpath], [ CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh . ./conftest.sh rm -f ./conftest.sh acl_cv_rpath=done ]) wl="$acl_cv_wl" acl_libext="$acl_cv_libext" acl_shlibext="$acl_cv_shlibext" acl_libname_spec="$acl_cv_libname_spec" acl_library_names_spec="$acl_cv_library_names_spec" acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" acl_hardcode_direct="$acl_cv_hardcode_direct" acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" dnl Determine whether the user wants rpath handling at all. AC_ARG_ENABLE([rpath], [ --disable-rpath do not hardcode runtime library paths], :, enable_rpath=yes) ]) dnl AC_LIB_FROMPACKAGE(name, package) dnl declares that libname comes from the given package. The configure file dnl will then not have a --with-libname-prefix option but a dnl --with-package-prefix option. Several libraries can come from the same dnl package. This declaration must occur before an AC_LIB_LINKFLAGS or similar dnl macro call that searches for libname. AC_DEFUN([AC_LIB_FROMPACKAGE], [ pushdef([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) define([acl_frompackage_]NAME, [$2]) popdef([NAME]) pushdef([PACK],[$2]) pushdef([PACKUP],[translit(PACK,[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) define([acl_libsinpackage_]PACKUP, m4_ifdef([acl_libsinpackage_]PACKUP, [acl_libsinpackage_]PACKUP[[, ]],)[lib$1]) popdef([PACKUP]) popdef([PACK]) ]) dnl AC_LIB_LINKFLAGS_BODY(name [, dependencies]) searches for libname and dnl the libraries corresponding to explicit and implicit dependencies. dnl Sets the LIB${NAME}, LTLIB${NAME} and INC${NAME} variables. dnl Also, sets the LIB${NAME}_PREFIX variable to nonempty if libname was found dnl in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_LINKFLAGS_BODY], [ AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) pushdef([NAME],[translit([$1],[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) pushdef([PACK],[m4_ifdef([acl_frompackage_]NAME, [acl_frompackage_]NAME, lib[$1])]) pushdef([PACKUP],[translit(PACK,[abcdefghijklmnopqrstuvwxyz./-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ___])]) pushdef([PACKLIBS],[m4_ifdef([acl_frompackage_]NAME, [acl_libsinpackage_]PACKUP, lib[$1])]) dnl Autoconf >= 2.61 supports dots in --with options. pushdef([P_A_C_K],[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]),[2.61]),[-1],[translit(PACK,[.],[_])],PACK)]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_ARG_WITH(P_A_C_K[-prefix], [[ --with-]]P_A_C_K[[-prefix[=DIR] search for ]PACKLIBS[ in DIR/include and DIR/lib --without-]]P_A_C_K[[-prefix don't search for ]PACKLIBS[ in includedir and libdir]], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" if test "$acl_libdirstem2" != "$acl_libdirstem" \ && ! test -d "$withval/$acl_libdirstem"; then additional_libdir="$withval/$acl_libdirstem2" fi fi fi ]) dnl Search the library and its dependencies in $additional_libdir and dnl $LDFLAGS. Using breadth-first-seach. LIB[]NAME= LTLIB[]NAME= INC[]NAME= LIB[]NAME[]_PREFIX= dnl HAVE_LIB${NAME} is an indicator that LIB${NAME}, LTLIB${NAME} have been dnl computed. So it has to be reset here. HAVE_LIB[]NAME= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='$1 $2' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" dnl See if it was already located by an earlier AC_LIB_LINKFLAGS dnl or AC_LIB_HAVE_LINKFLAGS call. uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$value" else dnl An earlier call to AC_LIB_HAVE_LINKFLAGS has determined dnl that this library doesn't exist. So just drop it. : fi else dnl Search the library lib$name in $additional_libdir and $LDFLAGS dnl and the already constructed $LIBNAME/$LTLIBNAME. found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" dnl The same code as in the loop below: dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then dnl Found the library. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then dnl Linking with a shared library. We attempt to hardcode its dnl directory into the executable's runpath, unless it's the dnl standard /usr/lib. if test "$enable_rpath" = no \ || test "X$found_dir" = "X/usr/$acl_libdirstem" \ || test "X$found_dir" = "X/usr/$acl_libdirstem2"; then dnl No hardcoding is needed. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl Use an explicit option to hardcode DIR into the resulting dnl binary. dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi dnl The hardcoding into $LIBNAME is system dependent. if test "$acl_hardcode_direct" = yes; then dnl Using DIR/libNAME.so during linking hardcodes DIR into the dnl resulting binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode DIR into the resulting dnl binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else dnl Rely on "-L$found_dir". dnl But don't add it if it's already contained in the LDFLAGS dnl or the already constructed $LIBNAME haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl We cannot use $acl_hardcode_runpath_var and LD_RUN_PATH dnl here, because this doesn't fit in flags passed to the dnl compiler. So give up. No hardcoding. This affects only dnl very old systems. dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then dnl Linking with a static library. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_a" else dnl We shouldn't come here, but anyway it's good to have a dnl fallback. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir -l$name" fi fi dnl Assume the include files are nearby. additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` if test "$name" = '$1'; then LIB[]NAME[]_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; */$acl_libdirstem2 | */$acl_libdirstem2/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'` if test "$name" = '$1'; then LIB[]NAME[]_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then dnl Potentially add $additional_includedir to $INCNAME. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's /usr/local/include and we are using GCC on Linux, dnl 3. if it's already present in $CPPFLAGS or the already dnl constructed $INCNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INC[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $INCNAME. INC[]NAME="${INC[]NAME}${INC[]NAME:+ }-I$additional_includedir" fi fi fi fi fi dnl Look for dependencies. if test -n "$found_la"; then dnl Read the .la file. It defines the variables dnl dlname, library_names, old_library, dependency_libs, current, dnl age, revision, installed, dlopen, dlpreopen, libdir. save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" dnl We use only dependency_libs. for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` dnl Potentially add $additional_libdir to $LIBNAME and $LTLIBNAME. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's /usr/local/lib and we are using GCC on Linux, dnl 3. if it's already present in $LDFLAGS or the already dnl constructed $LIBNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem" \ && test "X$additional_libdir" != "X/usr/$acl_libdirstem2"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem" \ || test "X$additional_libdir" = "X/usr/local/$acl_libdirstem2"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LIBNAME. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LTLIBNAME. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) dnl Handle this in the next round. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) dnl Handle this in the next round. Throw away the .la's dnl directory; it is already contained in a preceding -L dnl option. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) dnl Most likely an immediate library name. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$dep" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$dep" ;; esac done fi else dnl Didn't find the library; assume it is in the system directories dnl known to the linker and runtime loader. (All the system dnl directories known to the linker should also be known to the dnl runtime loader, otherwise the system is severely misconfigured.) LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user must dnl pass all path elements in one option. We can arrange that for a dnl single library, but not when more than one $LIBNAMEs are used. alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done dnl Note: acl_hardcode_libdir_flag_spec uses $libdir and $wl. acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" else dnl The -rpath options are cumulative. for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then dnl When using libtool, the option that works for both libraries and dnl executables is -R. The -R options are cumulative. for found_dir in $ltrpathdirs; do LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-R$found_dir" done fi popdef([P_A_C_K]) popdef([PACKLIBS]) popdef([PACKUP]) popdef([PACK]) popdef([NAME]) ]) dnl AC_LIB_APPENDTOVAR(VAR, CONTENTS) appends the elements of CONTENTS to VAR, dnl unless already present in VAR. dnl Works only for CPPFLAGS, not for LIB* variables because that sometimes dnl contains two or three consecutive elements that belong together. AC_DEFUN([AC_LIB_APPENDTOVAR], [ for element in [$2]; do haveit= for x in $[$1]; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then [$1]="${[$1]}${[$1]:+ }$element" fi done ]) dnl For those cases where a variable contains several -L and -l options dnl referring to unknown libraries and directories, this macro determines the dnl necessary additional linker options for the runtime path. dnl AC_LIB_LINKFLAGS_FROM_LIBS([LDADDVAR], [LIBSVALUE], [USE-LIBTOOL]) dnl sets LDADDVAR to linker options needed together with LIBSVALUE. dnl If USE-LIBTOOL evaluates to non-empty, linking with libtool is assumed, dnl otherwise linking without libtool is assumed. AC_DEFUN([AC_LIB_LINKFLAGS_FROM_LIBS], [ AC_REQUIRE([AC_LIB_RPATH]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) $1= if test "$enable_rpath" != no; then if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode directories into the resulting dnl binary. rpathdirs= next= for opt in $2; do if test -n "$next"; then dir="$next" dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem" \ && test "X$dir" != "X/usr/$acl_libdirstem2"; then rpathdirs="$rpathdirs $dir" fi next= else case $opt in -L) next=yes ;; -L*) dir=`echo "X$opt" | sed -e 's,^X-L,,'` dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem" \ && test "X$dir" != "X/usr/$acl_libdirstem2"; then rpathdirs="$rpathdirs $dir" fi next= ;; *) next= ;; esac fi done if test "X$rpathdirs" != "X"; then if test -n ""$3""; then dnl libtool is used for linking. Use -R options. for dir in $rpathdirs; do $1="${$1}${$1:+ }-R$dir" done else dnl The linker is used for linking directly. if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user dnl must pass all path elements in one option. alldirs= for dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="$flag" else dnl The -rpath options are cumulative. for dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="${$1}${$1:+ }$flag" done fi fi fi fi fi AC_SUBST([$1]) ]) dar-2.4.8/m4/iconv.m40000644000175000017520000001653712023053123011121 00000000000000# iconv.m4 serial 11 (gettext-0.18.1) dnl Copyright (C) 2000-2002, 2007-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. AC_DEFUN([AM_ICONV_LINKFLAGS_BODY], [ dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_LIB_LINKFLAGS_BODY([iconv]) ]) AC_DEFUN([AM_ICONV_LINK], [ dnl Some systems have iconv in libc, some have it in libiconv (OSF/1 and dnl those with the standalone portable GNU libiconv installed). AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) dnl Add $INCICONV to CPPFLAGS before performing the following checks, dnl because if the user has installed libiconv and not disabled its use dnl via --without-libiconv-prefix, he wants to use it. The first dnl AC_TRY_LINK will then fail, the second AC_TRY_LINK will succeed. am_save_CPPFLAGS="$CPPFLAGS" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCICONV]) AC_CACHE_CHECK([for iconv], [am_cv_func_iconv], [ am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no AC_TRY_LINK([#include #include ], [iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);], [am_cv_func_iconv=yes]) if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" AC_TRY_LINK([#include #include ], [iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);], [am_cv_lib_iconv=yes] [am_cv_func_iconv=yes]) LIBS="$am_save_LIBS" fi ]) if test "$am_cv_func_iconv" = yes; then AC_CACHE_CHECK([for working iconv], [am_cv_func_iconv_works], [ dnl This tests against bugs in AIX 5.1, HP-UX 11.11, Solaris 10. am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi AC_TRY_RUN([ #include #include int main () { /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static const char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } /* Test against Solaris 10 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646"); if (cd_ascii_to_88591 != (iconv_t)(-1)) { static const char input[] = "\263"; char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_ascii_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static const char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) return 1; } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) return 1; return 0; }], [am_cv_func_iconv_works=yes], [am_cv_func_iconv_works=no], [case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac]) LIBS="$am_save_LIBS" ]) case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then AC_DEFINE([HAVE_ICONV], [1], [Define if you have the iconv() function and it works.]) fi if test "$am_cv_lib_iconv" = yes; then AC_MSG_CHECKING([how to link with libiconv]) AC_MSG_RESULT([$LIBICONV]) else dnl If $LIBICONV didn't lead to a usable library, we don't need $INCICONV dnl either. CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi AC_SUBST([LIBICONV]) AC_SUBST([LTLIBICONV]) ]) dnl Define AM_ICONV using AC_DEFUN_ONCE for Autoconf >= 2.64, in order to dnl avoid warnings like dnl "warning: AC_REQUIRE: `AM_ICONV' was expanded before it was required". dnl This is tricky because of the way 'aclocal' is implemented: dnl - It requires defining an auxiliary macro whose name ends in AC_DEFUN. dnl Otherwise aclocal's initial scan pass would miss the macro definition. dnl - It requires a line break inside the AC_DEFUN_ONCE and AC_DEFUN expansions. dnl Otherwise aclocal would emit many "Use of uninitialized value $1" dnl warnings. m4_define([gl_iconv_AC_DEFUN], m4_version_prereq([2.64], [[AC_DEFUN_ONCE( [$1], [$2])]], [[AC_DEFUN( [$1], [$2])]])) gl_iconv_AC_DEFUN([AM_ICONV], [ AM_ICONV_LINK if test "$am_cv_func_iconv" = yes; then AC_MSG_CHECKING([for iconv declaration]) AC_CACHE_VAL([am_cv_proto_iconv], [ AC_TRY_COMPILE([ #include #include extern #ifdef __cplusplus "C" #endif #if defined(__STDC__) || defined(__cplusplus) size_t iconv (iconv_t cd, char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft); #else size_t iconv(); #endif ], [], [am_cv_proto_iconv_arg1=""], [am_cv_proto_iconv_arg1="const"]) am_cv_proto_iconv="extern size_t iconv (iconv_t cd, $am_cv_proto_iconv_arg1 char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);"]) am_cv_proto_iconv=`echo "[$]am_cv_proto_iconv" | tr -s ' ' | sed -e 's/( /(/'` AC_MSG_RESULT([ $am_cv_proto_iconv]) AC_DEFINE_UNQUOTED([ICONV_CONST], [$am_cv_proto_iconv_arg1], [Define as const if the declaration of iconv() needs const.]) fi ]) dar-2.4.8/m4/gettext.m40000644000175000017520000003513212023053123011457 00000000000000# gettext.m4 serial 63 (gettext-0.18) dnl Copyright (C) 1995-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2006, 2008-2010. dnl Macro to add for using GNU gettext. dnl Usage: AM_GNU_GETTEXT([INTLSYMBOL], [NEEDSYMBOL], [INTLDIR]). dnl INTLSYMBOL can be one of 'external', 'no-libtool', 'use-libtool'. The dnl default (if it is not specified or empty) is 'no-libtool'. dnl INTLSYMBOL should be 'external' for packages with no intl directory, dnl and 'no-libtool' or 'use-libtool' for packages with an intl directory. dnl If INTLSYMBOL is 'use-libtool', then a libtool library dnl $(top_builddir)/intl/libintl.la will be created (shared and/or static, dnl depending on --{enable,disable}-{shared,static} and on the presence of dnl AM-DISABLE-SHARED). If INTLSYMBOL is 'no-libtool', a static library dnl $(top_builddir)/intl/libintl.a will be created. dnl If NEEDSYMBOL is specified and is 'need-ngettext', then GNU gettext dnl implementations (in libc or libintl) without the ngettext() function dnl will be ignored. If NEEDSYMBOL is specified and is dnl 'need-formatstring-macros', then GNU gettext implementations that don't dnl support the ISO C 99 formatstring macros will be ignored. dnl INTLDIR is used to find the intl libraries. If empty, dnl the value `$(top_builddir)/intl/' is used. dnl dnl The result of the configuration is one of three cases: dnl 1) GNU gettext, as included in the intl subdirectory, will be compiled dnl and used. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 2) GNU gettext has been found in the system's C library. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 3) No internationalization, always use English msgid. dnl Catalog format: none dnl Catalog extension: none dnl If INTLSYMBOL is 'external', only cases 2 and 3 can occur. dnl The use of .gmo is historical (it was needed to avoid overwriting the dnl GNU format catalogs when building on a platform with an X/Open gettext), dnl but we keep it in order not to force irrelevant filename changes on the dnl maintainers. dnl AC_DEFUN([AM_GNU_GETTEXT], [ dnl Argument checking. ifelse([$1], [], , [ifelse([$1], [external], , [ifelse([$1], [no-libtool], , [ifelse([$1], [use-libtool], , [errprint([ERROR: invalid first argument to AM_GNU_GETTEXT ])])])])]) ifelse(ifelse([$1], [], [old])[]ifelse([$1], [no-libtool], [old]), [old], [AC_DIAGNOSE([obsolete], [Use of AM_GNU_GETTEXT without [external] argument is deprecated.])]) ifelse([$2], [], , [ifelse([$2], [need-ngettext], , [ifelse([$2], [need-formatstring-macros], , [errprint([ERROR: invalid second argument to AM_GNU_GETTEXT ])])])]) define([gt_included_intl], ifelse([$1], [external], ifdef([AM_GNU_GETTEXT_][INTL_SUBDIR], [yes], [no]), [yes])) define([gt_libtool_suffix_prefix], ifelse([$1], [use-libtool], [l], [])) gt_NEEDS_INIT AM_GNU_GETTEXT_NEED([$2]) AC_REQUIRE([AM_PO_SUBDIRS])dnl ifelse(gt_included_intl, yes, [ AC_REQUIRE([AM_INTL_SUBDIR])dnl ]) dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Sometimes libintl requires libiconv, so first search for libiconv. dnl Ideally we would do this search only after the dnl if test "$USE_NLS" = "yes"; then dnl if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl tests. But if configure.in invokes AM_ICONV after AM_GNU_GETTEXT dnl the configure script would need to contain the same shell code dnl again, outside any 'if'. There are two solutions: dnl - Invoke AM_ICONV_LINKFLAGS_BODY here, outside any 'if'. dnl - Control the expansions in more detail using AC_PROVIDE_IFELSE. dnl Since AC_PROVIDE_IFELSE is only in autoconf >= 2.52 and not dnl documented, we avoid it. ifelse(gt_included_intl, yes, , [ AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) ]) dnl Sometimes, on MacOS X, libintl requires linking with CoreFoundation. gt_INTL_MACOSX dnl Set USE_NLS. AC_REQUIRE([AM_NLS]) ifelse(gt_included_intl, yes, [ BUILD_INCLUDED_LIBINTL=no USE_INCLUDED_LIBINTL=no ]) LIBINTL= LTLIBINTL= POSUB= dnl Add a version number to the cache macros. case " $gt_needs " in *" need-formatstring-macros "*) gt_api_version=3 ;; *" need-ngettext "*) gt_api_version=2 ;; *) gt_api_version=1 ;; esac gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc" gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl" dnl If we use NLS figure out what method if test "$USE_NLS" = "yes"; then gt_use_preinstalled_gnugettext=no ifelse(gt_included_intl, yes, [ AC_MSG_CHECKING([whether included gettext is requested]) AC_ARG_WITH([included-gettext], [ --with-included-gettext use the GNU gettext library included here], nls_cv_force_use_gnu_gettext=$withval, nls_cv_force_use_gnu_gettext=no) AC_MSG_RESULT([$nls_cv_force_use_gnu_gettext]) nls_cv_use_gnu_gettext="$nls_cv_force_use_gnu_gettext" if test "$nls_cv_force_use_gnu_gettext" != "yes"; then ]) dnl User does not insist on using GNU NLS library. Figure out what dnl to use. If GNU gettext is available we use this. Else we have dnl to fall back to GNU NLS library. if test $gt_api_version -ge 3; then gt_revision_test_code=' #ifndef __GNU_GETTEXT_SUPPORTED_REVISION #define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1) #endif changequote(,)dnl typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1]; changequote([,])dnl ' else gt_revision_test_code= fi if test $gt_api_version -ge 2; then gt_expression_test_code=' + * ngettext ("", "", 0)' else gt_expression_test_code= fi AC_CACHE_CHECK([for GNU gettext in libc], [$gt_func_gnugettext_libc], [AC_TRY_LINK([#include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern int *_nl_domain_bindings;], [bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_domain_bindings], [eval "$gt_func_gnugettext_libc=yes"], [eval "$gt_func_gnugettext_libc=no"])]) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl Sometimes libintl requires libiconv, so first search for libiconv. ifelse(gt_included_intl, yes, , [ AM_ICONV_LINK ]) dnl Search for libintl and define LIBINTL, LTLIBINTL and INCINTL dnl accordingly. Don't use AC_LIB_LINKFLAGS_BODY([intl],[iconv]) dnl because that would add "-liconv" to LIBINTL and LTLIBINTL dnl even if libiconv doesn't exist. AC_LIB_LINKFLAGS_BODY([intl]) AC_CACHE_CHECK([for GNU gettext in libintl], [$gt_func_gnugettext_libintl], [gt_save_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $INCINTL" gt_save_LIBS="$LIBS" LIBS="$LIBS $LIBINTL" dnl Now see whether libintl exists and does not depend on libiconv. AC_TRY_LINK([#include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *);], [bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("")], [eval "$gt_func_gnugettext_libintl=yes"], [eval "$gt_func_gnugettext_libintl=no"]) dnl Now see whether libintl exists and depends on libiconv. if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then LIBS="$LIBS $LIBICONV" AC_TRY_LINK([#include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *);], [bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("")], [LIBINTL="$LIBINTL $LIBICONV" LTLIBINTL="$LTLIBINTL $LTLIBICONV" eval "$gt_func_gnugettext_libintl=yes" ]) fi CPPFLAGS="$gt_save_CPPFLAGS" LIBS="$gt_save_LIBS"]) fi dnl If an already present or preinstalled GNU gettext() is found, dnl use it. But if this macro is used in GNU gettext, and GNU dnl gettext is already preinstalled in libintl, we update this dnl libintl. (Cf. the install rule in intl/Makefile.in.) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \ || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \ && test "$PACKAGE" != gettext-runtime \ && test "$PACKAGE" != gettext-tools; }; then gt_use_preinstalled_gnugettext=yes else dnl Reset the values set by searching for libintl. LIBINTL= LTLIBINTL= INCINTL= fi ifelse(gt_included_intl, yes, [ if test "$gt_use_preinstalled_gnugettext" != "yes"; then dnl GNU gettext is not found in the C library. dnl Fall back on included GNU gettext library. nls_cv_use_gnu_gettext=yes fi fi if test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions used to generate GNU NLS library. BUILD_INCLUDED_LIBINTL=yes USE_INCLUDED_LIBINTL=yes LIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LIBICONV $LIBTHREAD" LTLIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LTLIBICONV $LTLIBTHREAD" LIBS=`echo " $LIBS " | sed -e 's/ -lintl / /' -e 's/^ //' -e 's/ $//'` fi CATOBJEXT= if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions to use GNU gettext tools. CATOBJEXT=.gmo fi ]) if test -n "$INTL_MACOSX_LIBS"; then if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Some extra flags are needed during linking. LIBINTL="$LIBINTL $INTL_MACOSX_LIBS" LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS" fi fi if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then AC_DEFINE([ENABLE_NLS], [1], [Define to 1 if translation of program messages to the user's native language is requested.]) else USE_NLS=no fi fi AC_MSG_CHECKING([whether to use NLS]) AC_MSG_RESULT([$USE_NLS]) if test "$USE_NLS" = "yes"; then AC_MSG_CHECKING([where the gettext function comes from]) if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then gt_source="external libintl" else gt_source="libc" fi else gt_source="included intl directory" fi AC_MSG_RESULT([$gt_source]) fi if test "$USE_NLS" = "yes"; then if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then AC_MSG_CHECKING([how to link with libintl]) AC_MSG_RESULT([$LIBINTL]) AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCINTL]) fi dnl For backward compatibility. Some packages may be using this. AC_DEFINE([HAVE_GETTEXT], [1], [Define if the GNU gettext() function is already present or preinstalled.]) AC_DEFINE([HAVE_DCGETTEXT], [1], [Define if the GNU dcgettext() function is already present or preinstalled.]) fi dnl We need to process the po/ directory. POSUB=po fi ifelse(gt_included_intl, yes, [ dnl If this is used in GNU gettext we have to set BUILD_INCLUDED_LIBINTL dnl to 'yes' because some of the testsuite requires it. if test "$PACKAGE" = gettext-runtime || test "$PACKAGE" = gettext-tools; then BUILD_INCLUDED_LIBINTL=yes fi dnl Make all variables we use known to autoconf. AC_SUBST([BUILD_INCLUDED_LIBINTL]) AC_SUBST([USE_INCLUDED_LIBINTL]) AC_SUBST([CATOBJEXT]) dnl For backward compatibility. Some configure.ins may be using this. nls_cv_header_intl= nls_cv_header_libgt= dnl For backward compatibility. Some Makefiles may be using this. DATADIRNAME=share AC_SUBST([DATADIRNAME]) dnl For backward compatibility. Some Makefiles may be using this. INSTOBJEXT=.mo AC_SUBST([INSTOBJEXT]) dnl For backward compatibility. Some Makefiles may be using this. GENCAT=gencat AC_SUBST([GENCAT]) dnl For backward compatibility. Some Makefiles may be using this. INTLOBJS= if test "$USE_INCLUDED_LIBINTL" = yes; then INTLOBJS="\$(GETTOBJS)" fi AC_SUBST([INTLOBJS]) dnl Enable libtool support if the surrounding package wishes it. INTL_LIBTOOL_SUFFIX_PREFIX=gt_libtool_suffix_prefix AC_SUBST([INTL_LIBTOOL_SUFFIX_PREFIX]) ]) dnl For backward compatibility. Some Makefiles may be using this. INTLLIBS="$LIBINTL" AC_SUBST([INTLLIBS]) dnl Make all documented variables known to autoconf. AC_SUBST([LIBINTL]) AC_SUBST([LTLIBINTL]) AC_SUBST([POSUB]) ]) dnl gt_NEEDS_INIT ensures that the gt_needs variable is initialized. m4_define([gt_NEEDS_INIT], [ m4_divert_text([DEFAULTS], [gt_needs=]) m4_define([gt_NEEDS_INIT], []) ]) dnl Usage: AM_GNU_GETTEXT_NEED([NEEDSYMBOL]) AC_DEFUN([AM_GNU_GETTEXT_NEED], [ m4_divert_text([INIT_PREPARE], [gt_needs="$gt_needs $1"]) ]) dnl Usage: AM_GNU_GETTEXT_VERSION([gettext-version]) AC_DEFUN([AM_GNU_GETTEXT_VERSION], []) dar-2.4.8/m4/lib-ld.m40000644000175000017520000000660312023053123011137 00000000000000# lib-ld.m4 serial 4 (gettext-0.18) dnl Copyright (C) 1996-2003, 2009-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl Subroutines of libtool.m4, dnl with replacements s/AC_/AC_LIB/ and s/lt_cv/acl_cv/ to avoid collision dnl with libtool.m4. dnl From libtool-1.4. Sets the variable with_gnu_ld to yes or no. AC_DEFUN([AC_LIB_PROG_LD_GNU], [AC_CACHE_CHECK([if the linker ($LD) is GNU ld], [acl_cv_prog_gnu_ld], [# I'd rather use --version here, but apparently some GNU ld's only accept -v. case `$LD -v 2>&1 conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by GCC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]* | [A-Za-z]:[\\/]*)] [re_direlt='/[^/][^/]*/\.\./'] # Canonicalize the path of ld ac_prog=`echo $ac_prog| sed 's%\\\\%/%g'` while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL([acl_cv_path_LD], [if test -z "$LD"; then IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS="${IFS}${PATH_SEPARATOR-:}" for ac_dir in $PATH; do test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then acl_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some GNU ld's only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$acl_cv_path_LD" -v 2>&1 < /dev/null` in *GNU* | *'with BFD'*) test "$with_gnu_ld" != no && break ;; *) test "$with_gnu_ld" != yes && break ;; esac fi done IFS="$ac_save_ifs" else acl_cv_path_LD="$LD" # Let the user override the test with a path. fi]) LD="$acl_cv_path_LD" if test -n "$LD"; then AC_MSG_RESULT([$LD]) else AC_MSG_RESULT([no]) fi test -z "$LD" && AC_MSG_ERROR([no acceptable ld found in \$PATH]) AC_LIB_PROG_LD_GNU ]) dar-2.4.8/m4/po.m40000644000175000017520000004461612023053123010420 00000000000000# po.m4 serial 17 (gettext-0.18) dnl Copyright (C) 1995-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ([2.50]) dnl Checks for all prerequisites of the po subdirectory. AC_DEFUN([AM_PO_SUBDIRS], [ AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl AC_REQUIRE([AM_PROG_MKDIR_P])dnl defined by automake AC_REQUIRE([AM_NLS])dnl dnl Release version of the gettext macros. This is used to ensure that dnl the gettext macros and po/Makefile.in.in are in sync. AC_SUBST([GETTEXT_MACRO_VERSION], [0.18]) dnl Perform the following tests also if --disable-nls has been given, dnl because they are needed for "make dist" to work. dnl Search for GNU msgfmt in the PATH. dnl The first test excludes Solaris msgfmt and early GNU msgfmt versions. dnl The second test excludes FreeBSD msgfmt. AM_PATH_PROG_WITH_TEST(MSGFMT, msgfmt, [$ac_dir/$ac_word --statistics /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) AC_PATH_PROG([GMSGFMT], [gmsgfmt], [$MSGFMT]) dnl Test whether it is GNU msgfmt >= 0.15. changequote(,)dnl case `$MSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) MSGFMT_015=: ;; *) MSGFMT_015=$MSGFMT ;; esac changequote([,])dnl AC_SUBST([MSGFMT_015]) changequote(,)dnl case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;; *) GMSGFMT_015=$GMSGFMT ;; esac changequote([,])dnl AC_SUBST([GMSGFMT_015]) dnl Search for GNU xgettext 0.12 or newer in the PATH. dnl The first test excludes Solaris xgettext and early GNU xgettext versions. dnl The second test excludes FreeBSD xgettext. AM_PATH_PROG_WITH_TEST(XGETTEXT, xgettext, [$ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) dnl Remove leftover from FreeBSD xgettext call. rm -f messages.po dnl Test whether it is GNU xgettext >= 0.15. changequote(,)dnl case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;; *) XGETTEXT_015=$XGETTEXT ;; esac changequote([,])dnl AC_SUBST([XGETTEXT_015]) dnl Search for GNU msgmerge 0.11 or newer in the PATH. AM_PATH_PROG_WITH_TEST(MSGMERGE, msgmerge, [$ac_dir/$ac_word --update -q /dev/null /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1], :) dnl Installation directories. dnl Autoconf >= 2.60 defines localedir. For older versions of autoconf, we dnl have to define it here, so that it can be used in po/Makefile. test -n "$localedir" || localedir='${datadir}/locale' AC_SUBST([localedir]) dnl Support for AM_XGETTEXT_OPTION. test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS= AC_SUBST([XGETTEXT_EXTRA_OPTIONS]) AC_CONFIG_COMMANDS([po-directories], [[ for ac_file in $CONFIG_FILES; do # Support "outfile[:infile[:infile...]]" case "$ac_file" in *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; esac # PO directories have a Makefile.in generated from Makefile.in.in. case "$ac_file" in */Makefile.in) # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix="/`echo "$ac_dir"|sed 's%^\./%%'`" ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Treat a directory as a PO directory if and only if it has a # POTFILES.in file. This allows packages to have multiple PO # directories under different names or in different locations. if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then rm -f "$ac_dir/POTFILES" test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES" cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES" POMAKEFILEDEPS="POTFILES.in" # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend # on $ac_dir but don't depend on user-specified configuration # parameters. if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then # The LINGUAS file contains the set of available languages. if test -n "$OBSOLETE_ALL_LINGUAS"; then test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete" fi ALL_LINGUAS_=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"` # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$ALL_LINGUAS_' POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS" else # The set of available languages was given in configure.in. # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$OBSOLETE_ALL_LINGUAS' fi # Compute POFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po) # Compute UPDATEPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update) # Compute DUMMYPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop) # Compute GMOFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo) case "$ac_given_srcdir" in .) srcdirpre= ;; *) srcdirpre='$(srcdir)/' ;; esac POFILES= UPDATEPOFILES= DUMMYPOFILES= GMOFILES= for lang in $ALL_LINGUAS; do POFILES="$POFILES $srcdirpre$lang.po" UPDATEPOFILES="$UPDATEPOFILES $lang.po-update" DUMMYPOFILES="$DUMMYPOFILES $lang.nop" GMOFILES="$GMOFILES $srcdirpre$lang.gmo" done # CATALOGS depends on both $ac_dir and the user's LINGUAS # environment variable. INST_LINGUAS= if test -n "$ALL_LINGUAS"; then for presentlang in $ALL_LINGUAS; do useit=no if test "%UNSET%" != "$LINGUAS"; then desiredlanguages="$LINGUAS" else desiredlanguages="$ALL_LINGUAS" fi for desiredlang in $desiredlanguages; do # Use the presentlang catalog if desiredlang is # a. equal to presentlang, or # b. a variant of presentlang (because in this case, # presentlang can be used as a fallback for messages # which are not translated in the desiredlang catalog). case "$desiredlang" in "$presentlang"*) useit=yes;; esac done if test $useit = yes; then INST_LINGUAS="$INST_LINGUAS $presentlang" fi done fi CATALOGS= if test -n "$INST_LINGUAS"; then for lang in $INST_LINGUAS; do CATALOGS="$CATALOGS $lang.gmo" done fi test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile" sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile" for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do if test -f "$f"; then case "$f" in *.orig | *.bak | *~) ;; *) cat "$f" >> "$ac_dir/Makefile" ;; esac fi done fi ;; esac done]], [# Capture the value of obsolete ALL_LINGUAS because we need it to compute # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS. But hide it # from automake < 1.5. eval 'OBSOLETE_ALL_LINGUAS''="$ALL_LINGUAS"' # Capture the value of LINGUAS because we need it to compute CATALOGS. LINGUAS="${LINGUAS-%UNSET%}" ]) ]) dnl Postprocesses a Makefile in a directory containing PO files. AC_DEFUN([AM_POSTPROCESS_PO_MAKEFILE], [ # When this code is run, in config.status, two variables have already been # set: # - OBSOLETE_ALL_LINGUAS is the value of LINGUAS set in configure.in, # - LINGUAS is the value of the environment variable LINGUAS at configure # time. changequote(,)dnl # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix="/`echo "$ac_dir"|sed 's%^\./%%'`" ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Find a way to echo strings without interpreting backslash. if test "X`(echo '\t') 2>/dev/null`" = 'X\t'; then gt_echo='echo' else if test "X`(printf '%s\n' '\t') 2>/dev/null`" = 'X\t'; then gt_echo='printf %s\n' else echo_func () { cat < "$ac_file.tmp" if grep -l '@TCLCATALOGS@' "$ac_file" > /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/\..*$//' -e 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/'` cat >> "$ac_file.tmp" < /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/_/-/g' -e 's/^sr-CS/sr-SP/' -e 's/@latin$/-Latn/' -e 's/@cyrillic$/-Cyrl/' -e 's/^sr-SP$/sr-SP-Latn/' -e 's/^uz-UZ$/uz-UZ-Latn/'` cat >> "$ac_file.tmp" <> "$ac_file.tmp" <, 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ([2.50]) AC_DEFUN([AM_NLS], [ AC_MSG_CHECKING([whether NLS is requested]) dnl Default is enabled NLS AC_ARG_ENABLE([nls], [ --disable-nls do not use Native Language Support], USE_NLS=$enableval, USE_NLS=yes) AC_MSG_RESULT([$USE_NLS]) AC_SUBST([USE_NLS]) ]) dar-2.4.8/m4/lib-prefix.m40000644000175000017520000002042212023053123012030 00000000000000# lib-prefix.m4 serial 7 (gettext-0.18) dnl Copyright (C) 2001-2005, 2008-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. dnl AC_LIB_ARG_WITH is synonymous to AC_ARG_WITH in autoconf-2.13, and dnl similar to AC_ARG_WITH in autoconf 2.52...2.57 except that is doesn't dnl require excessive bracketing. ifdef([AC_HELP_STRING], [AC_DEFUN([AC_LIB_ARG_WITH], [AC_ARG_WITH([$1],[[$2]],[$3],[$4])])], [AC_DEFUN([AC_][LIB_ARG_WITH], [AC_ARG_WITH([$1],[$2],[$3],[$4])])]) dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed dnl to access previously installed libraries. The basic assumption is that dnl a user will want packages to use other packages he previously installed dnl with the same --prefix option. dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate dnl libraries, but is otherwise very convenient. AC_DEFUN([AC_LIB_PREFIX], [ AC_BEFORE([$0], [AC_LIB_LINKFLAGS]) AC_REQUIRE([AC_PROG_CC]) AC_REQUIRE([AC_CANONICAL_HOST]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_LIB_ARG_WITH([lib-prefix], [ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib --without-lib-prefix don't search for libraries in includedir and libdir], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" fi fi ]) if test $use_additional = yes; then dnl Potentially add $additional_includedir to $CPPFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's already present in $CPPFLAGS, dnl 3. if it's /usr/local/include and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= for x in $CPPFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $CPPFLAGS. CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir" fi fi fi fi dnl Potentially add $additional_libdir to $LDFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's already present in $LDFLAGS, dnl 3. if it's /usr/local/lib and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then haveit= for x in $LDFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then if test -n "$GCC"; then case $host_os in linux*) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LDFLAGS. LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir" fi fi fi fi fi ]) dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix, dnl acl_final_exec_prefix, containing the values to which $prefix and dnl $exec_prefix will expand at the end of the configure script. AC_DEFUN([AC_LIB_PREPARE_PREFIX], [ dnl Unfortunately, prefix and exec_prefix get only finally determined dnl at the end of configure. if test "X$prefix" = "XNONE"; then acl_final_prefix="$ac_default_prefix" else acl_final_prefix="$prefix" fi if test "X$exec_prefix" = "XNONE"; then acl_final_exec_prefix='${prefix}' else acl_final_exec_prefix="$exec_prefix" fi acl_save_prefix="$prefix" prefix="$acl_final_prefix" eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" prefix="$acl_save_prefix" ]) dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the dnl variables prefix and exec_prefix bound to the values they will have dnl at the end of the configure script. AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX], [ acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" $1 exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" ]) dnl AC_LIB_PREPARE_MULTILIB creates dnl - a variable acl_libdirstem, containing the basename of the libdir, either dnl "lib" or "lib64" or "lib/64", dnl - a variable acl_libdirstem2, as a secondary possible value for dnl acl_libdirstem, either the same as acl_libdirstem or "lib/sparcv9" or dnl "lib/amd64". AC_DEFUN([AC_LIB_PREPARE_MULTILIB], [ dnl There is no formal standard regarding lib and lib64. dnl On glibc systems, the current practice is that on a system supporting dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under dnl $prefix/lib64 and 32-bit libraries go under $prefix/lib. We determine dnl the compiler's default mode by looking at the compiler's library search dnl path. If at least one of its elements ends in /lib64 or points to a dnl directory whose absolute pathname ends in /lib64, we assume a 64-bit ABI. dnl Otherwise we use the default, namely "lib". dnl On Solaris systems, the current practice is that on a system supporting dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under dnl $prefix/lib/64 (which is a symlink to either $prefix/lib/sparcv9 or dnl $prefix/lib/amd64) and 32-bit libraries go under $prefix/lib. AC_REQUIRE([AC_CANONICAL_HOST]) acl_libdirstem=lib acl_libdirstem2= case "$host_os" in solaris*) dnl See Solaris 10 Software Developer Collection > Solaris 64-bit Developer's Guide > The Development Environment dnl . dnl "Portable Makefiles should refer to any library directories using the 64 symbolic link." dnl But we want to recognize the sparcv9 or amd64 subdirectory also if the dnl symlink is missing, so we set acl_libdirstem2 too. AC_CACHE_CHECK([for 64-bit host], [gl_cv_solaris_64bit], [AC_EGREP_CPP([sixtyfour bits], [ #ifdef _LP64 sixtyfour bits #endif ], [gl_cv_solaris_64bit=yes], [gl_cv_solaris_64bit=no]) ]) if test $gl_cv_solaris_64bit = yes; then acl_libdirstem=lib/64 case "$host_cpu" in sparc*) acl_libdirstem2=lib/sparcv9 ;; i*86 | x86_64) acl_libdirstem2=lib/amd64 ;; esac fi ;; *) searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` if test -n "$searchpath"; then acl_save_IFS="${IFS= }"; IFS=":" for searchdir in $searchpath; do if test -d "$searchdir"; then case "$searchdir" in */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; */../ | */.. ) # Better ignore directories of this form. They are misleading. ;; *) searchdir=`cd "$searchdir" && pwd` case "$searchdir" in */lib64 ) acl_libdirstem=lib64 ;; esac ;; esac fi done IFS="$acl_save_IFS" fi ;; esac test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem" ]) dar-2.4.8/m4/ChangeLog0000644000175000017520000000063512023053123011303 000000000000002012-09-09 gettextize * gettext.m4: New file, from gettext-0.18.1. * iconv.m4: New file, from gettext-0.18.1. * lib-ld.m4: New file, from gettext-0.18.1. * lib-link.m4: New file, from gettext-0.18.1. * lib-prefix.m4: New file, from gettext-0.18.1. * nls.m4: New file, from gettext-0.18.1. * po.m4: New file, from gettext-0.18.1. * progtest.m4: New file, from gettext-0.18.1. dar-2.4.8/m4/progtest.m40000644000175000017520000000557312023053123011650 00000000000000# progtest.m4 serial 6 (gettext-0.18) dnl Copyright (C) 1996-2003, 2005, 2008-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1996. AC_PREREQ([2.50]) # Search path for a program which passes the given test. dnl AM_PATH_PROG_WITH_TEST(VARIABLE, PROG-TO-CHECK-FOR, dnl TEST-PERFORMED-ON-FOUND_PROGRAM [, VALUE-IF-NOT-FOUND [, PATH]]) AC_DEFUN([AM_PATH_PROG_WITH_TEST], [ # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "$2", so it can be a program name with args. set dummy $2; ac_word=[$]2 AC_MSG_CHECKING([for $ac_word]) AC_CACHE_VAL([ac_cv_path_$1], [case "[$]$1" in [[\\/]]* | ?:[[\\/]]*) ac_cv_path_$1="[$]$1" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in ifelse([$5], , $PATH, [$5]); do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&AS_MESSAGE_LOG_FD if [$3]; then ac_cv_path_$1="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" dnl If no 4th arg is given, leave the cache variable unset, dnl so AC_PATH_PROGS will keep looking. ifelse([$4], , , [ test -z "[$]ac_cv_path_$1" && ac_cv_path_$1="$4" ])dnl ;; esac])dnl $1="$ac_cv_path_$1" if test ifelse([$4], , [-n "[$]$1"], ["[$]$1" != "$4"]); then AC_MSG_RESULT([$][$1]) else AC_MSG_RESULT([no]) fi AC_SUBST([$1])dnl ]) dar-2.4.8/man/0000755000175000017520000000000012023053205010041 500000000000000dar-2.4.8/man/Makefile.in0000644000175000017520000003274212023053152012037 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = man DIST_COMMON = $(dist_man_MANS) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' man1dir = $(mandir)/man1 am__installdirs = "$(DESTDIR)$(man1dir)" NROFF = nroff MANS = $(dist_man_MANS) DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ dist_man_MANS = dar.1 dar_manager.1 dar_slave.1 dar_xform.1 dar_cp.1 all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu man/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu man/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(dist_man_MANS) @$(NORMAL_INSTALL) test -z "$(man1dir)" || $(MKDIR_P) "$(DESTDIR)$(man1dir)" @list=''; test -n "$(man1dir)" || exit 0; \ { for i in $$list; do echo "$$i"; done; \ l2='$(dist_man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.1[a-z]*$$/p'; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(dist_man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.1[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ test -z "$$files" || { \ echo " ( cd '$(DESTDIR)$(man1dir)' && rm -f" $$files ")"; \ cd "$(DESTDIR)$(man1dir)" && rm -f $$files; } tags: TAGS TAGS: ctags: CTAGS CTAGS: distdir: $(DISTFILES) @list='$(MANS)'; if test -n "$$list"; then \ list=`for p in $$list; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ if test -f "$$d$$p"; then echo "$$d$$p"; else :; fi; done`; \ if test -n "$$list" && \ grep 'ab help2man is required to generate this page' $$list >/dev/null; then \ echo "error: found man pages containing the \`missing help2man' replacement text:" >&2; \ grep -l 'ab help2man is required to generate this page' $$list | sed 's/^/ /' >&2; \ echo " to fix them, install help2man, remove and regenerate the man pages;" >&2; \ echo " typically \`make maintainer-clean' will remove them" >&2; \ exit 1; \ else :; fi; \ else :; fi @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(MANS) installdirs: for dir in "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ distclean distclean-generic distclean-libtool distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-man1 \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ uninstall uninstall-am uninstall-man uninstall-man1 # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/man/dar_xform.10000644000175000017430000001245712010477201012037 00000000000000.TH DAR_XFORM 1 "March 3rd, 2012" .UC 8 .SH NAME dar_xform \- disk archive "re-slicer" .SH SYNOPSIS dar_xform [options] [/]source [/]destination .P dar_xform -h .P dar_xform -V .SH DESCRIPTION .B dar_xform changes the size of slices of an existing archive. .PP Source is the basename of the existing archive, destination is the basename of the archive to be created. If source basename is "-", the archive is read from standard input. If the destination basename is "-", the archive is written to standard output and -s option is not available. .SH OPTIONS .PP .TP 20 -h displays help usage. .TP 20 -V displays version information. .TP 20 -b make the terminal ring when user interaction is required (like for example the creation of a new slice when using the -p option) .TP 20 -s Size of the slices in bytes. If the number is followed by k (or K), M, G, T or P the size is in kilobytes, megabytes, gigabytes, terabytes or petabytes respectively. Example: by default "20M" means 20 megabytes it is the same as giving 20971520 as argument (see also -aSI and -abinary options). If -s is not present the backup will be written to a single slice whatever the size of the backup may be (there is probably some filesystem limitation, thus you might expect problems with file size over 2 gigabytes, depending on your filesystem). .TP 20 -S -S gives the size of the first slice which may be chosen independently of the size of following slices. This option needs -s and by default, the size of the first slice is the same as the one of the following slices. .TP 20 -p [] pauses before writing to a new slice (this requires -s). By default there is no pause, all slices are output in the same directory, up to the end of the backup or until the filesystem is full. In this later case, the user is informed of the lack of disk space and dar stops for user interaction. As soon as some disk space is available, the user can continue the backup. The optional integer that this option can receive tells dar to only pause very 'n' slice. Giving 3 for 'n' will make dar pause only after slices 3, 6, 9 and so on. If this integer is not specified, the behavior is as if '1' was given as argument which makes dar pause after each slice. .TP 20 -n Do not allow overwriting of any slice. .TP 20 -w Do not warn before overwriting slice. By default (no -n and no -w) overwriting is allowed but a warning is issued before proceeding. .TP 20 -E the string is a command-line to be launched between the slices of the destination archive. See dar(1) man page (same option) for more information. .TP -F the string is a command-line to be launched between the slices of the source archive. See dar(1) man page (same option) for more information. .TP 20 -aSI[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the SI meaning: multiple of 10^3 (a Mega is 1,000,000). .TP 20 -abinary[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the historical computer science meaning: multiple of 2^10 (a Mega is 1,048,576). .P -aSI and -abinary can be used several times, they affect all prefix which follow even those found in file included by -B option up to the next -a... occurrence. Note that if in a file included by -B option an -abinary or -aSI is met, it affects all the following prefix even those outside the included files (for example in the following "-B some.dcf -s 1K" 1K may be equal to 1000 or 1024 depending on the presence of an -aSI or -abinary in the file some.dcf. By default (before any -aSI/binary argument has been reached), binary interpretation of suffix is done (for compatibility with older versions). .TP 20 -Q Do not display any message on stderr when not launched from a terminal (for example when launched from an at job or crontab). Remains that any question to the user will be assumed a 'no' answer, which most of the time will abort the program. .TP 20 -j when virtual memory is exhausted, as user to make room before trying to continue. By default, when memory is exhausted dar aborts. .TP 20 -^ perm[:user[:group]] defines the permission and ownership to use for created slices. .TP 20 -3, --hash Beside each created slice is generated an on-fly hash file using the specified algorithm. Available algorithm are "md5" and "sha1", by default no hash file is generated. This option description is more detailed in dar man page (where it has the same designation as here). .TP 20 -; [,] Defines the minimum number of digit to use for the source archive and for the destination archive. If you the source has not been defined with a minimum number of digit and you want to define a value for the destination archive, use zero (or one) as value for src. See the same option in dar man page for more details. .SH NOTES Dar_xform is not concerned by encryption or compression. It does not need to be aware of it to be able to change the slice scheme. Thus, it is not able to uncompress or uncipher an archive. .SH EXIT CODES .B dar_xform uses the same exit status as dar does, see dar(1) man page. .SH SIGNALS Any signal sent to dar_xform will abort the program immediately, there is no way to have a proper termination before the end of the process .SH SEE ALSO dar(1), dar_slave(1), dar_manager(1), dar_cp(1) .SH KNOWN BUGS None actually. .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.4.8/man/dar.10000644000175000017430000034703012010477201010622 00000000000000.TH DAR 1 "March. 3rd, 2012" .UC 8 .SH NAME dar \- create, test, list, extract, compare, merge, isolate dar archives .SH SYNOPSIS dar [-c | -x | -l | -d | -t | -C | -+] [/] [] [] .P dar -h .P dar -V .SH DESCRIPTION .B dar is a full featured backup tool, aimed for disks (floppy, CD-R(W), DVD-R(W), zip, jazz, etc.) and since release 2.4.0 adapted to tapes. .PP .B dar can store a backup in several files (called "slices" in the following) of a given size, eventually pausing or running a user command/script before starting the next slice. This can allow for example, the burning of the last generated slice on a CD-R, or changing a floppy disk before continuing on the next one. Like its grand-brother, the great "tar" command, .B dar may also use compression, at the difference that compression is used inside the archive to be able to have compressed slices of the defined size. But the most important feature of .B dar is its ability to make differential backups. In other words, backups that contain only new files or files that have changed from a backup of reference. Moreover with differential backup, .B dar also stores files that have been deleted since the backup of reference. Thus, when restoring, first a full backup, then additional differential backups, at each restoration you get the exact state of the filesystem at the time of the backup. And of course, the reference backup may be a full or a differential backup itself. .PP .B dar is the first backup program I know that can also remove files during restoration! By the way, in this document, "archive" and "backup" mean the same thing, and are used interchangeably. .PP Unlike the .B tar command, .B dar has not to read a whole archive to know its contents: dar archive contains a table of contents (aka "catalogue") located at the end of the archive, so it seeks into the archive forth and backward to extract only the required files, which is much faster than what tar is used to do. Since release 2.4.0 dar can also use a sequential reading mode, in which dar acts like tar, just reading byte by byte the whole archive to know its contents and eventually extracting file at each step. In other words, the archive contents is located at both locations, all along the archive used for tar-like behavior suitable for sequential access media (tapes) and at the end for faster access, suitable for random access media (disks). However note that tar archive and dar archive are not compatible. Dar does not known anything about tar archive structure, neither tar known anything about dar archive structure. So keep using tar if you are used to it or find no advantage in using dar. Note also that the sequential reading mode let you extract data from a partially written archive (those that failed to complete due to a lack of disk space for example). .PP Since release 2.4.0, a "relax" reading mode is available that let dar to either ignore some incoherence in archive structure, or use internal redundant information to overcome what seems to be data corruption, and in last resort ask the user on what to do when some archive structure information is missing. This relax mode can be used with both sequential and direct access read modes. Note however that you should rather use Parchive to protect your data rather than just relying on the "relax" mode, which has to be seen as a the last chance solution, as this mode cannot repair the archive, but may only lead to partial archive extraction upon archive corruption. For immediate Parchive integration with dar, use the "par2" user target defined in /etc/darrc. .PP A few words about slice before going deeper in detail: a slice is just a simple file which name is composed of a "basename" followed by a dot, then a number, again a dot and the extension (dar) to form the filename of that slice. On the command line you will never have to give the full file name of a slice, just the basename. The number between the dots is the slice number, which starts from 1 and may be arbitrary large (as large as your system can support the corresponding filename). .TP 20 Let's take an example: considering the basename "joe", dar will make one or several slices during backup process (depending on your choice). The filenames of these slices will be: joe.1.dar joe.2.dar ... joe.10.dar ... etc. If you want to extract, list, or use this backup as reference, you will only have to use the basename, which is the string "joe" in this example. .SH OPTIONS .PP .B COMMANDS: .PP Only six commands define what action will be done by dar: Archive creation, archive extraction, archive listing, archive testing, archive comparison with filesystem, catalogue isolation and archive merging. These commands are described here below. Once defined, a large set of options can be used to modify the way the command is performed. These options are described just after the commands chapter. Last, optional user targets may follow options, their use is described at the end of this document. .PP .B Important note: Not all system actually support long options (Solaris, FreeBSD, ...). For example --create will not be available on these systems, and you will have to use -c instead. In the same way, not all system do support optional arguments (FreeBSD without GNU getopt for example), you then need to explicitly give the argument, for example in place of "-z" you will need to give "-z 9", see "EXPLICIT OPTIONAL ARGUMENTS" paragraph near the end of this document for details on that point. .TP 20 -c, --create [/] creates a backup with the name based on . All the slices will be created in the directory if specified, else in the current directory. If the destination filesystem is too small to contain all the slices of the backup, the -p option (pausing before starting new slices) might be of interest. Else, in the case the filesystem is full, dar will suspend the operation, asking for the user to make free space, then continue its operation. To make free space, the only thing you cannot do is to touch the slice being written. If the filename is "-" *and* no slicing is asked for (no -s option) the archive is produced on the standard output allowing the user to send the resulting archive through a pipe (or into a tape device). .TP 20 -x, --extract [/] extracts files from the given backup. Slices are expected to be in the current directory or in the directory given by . It is also possible to use symbolic links to gather slices that are not in the same directory. Path may also point to a removable device (floppy, CD, etc.), in this case, to be able to mount/unmount the device, you must not launch dar from that directory. In other words, the current directory must not be that directory (see tutorial for details). The basename may be set to "-", in direct access mode (the default an historical mode), you will need dar_slave to work with dar (see -i and -o options, as well as dar_slave man page). However in sequential read mode (--sequential-mode is used on command-line), dar will read the archive from standard input (see also -i option). .TP 20 -l, --list [/] lists the contents of the given backup. .B dar will only require the last slice of the archive in direct access mode. If however sequential mode is used, dar will read the overall archive, from the first slice to the last one. "-" can be used as basename, the behavior is the same as with -x option (read just above). .TP 20 -t, --test [/] checks the backup integrity. Even without compression, dar is able to detect at least one error per file in the archive, thanks to a variable length CRC recorded per file in the catalogue. Same remark here, "-" may be used as basename (see -x option above for details). .TP 20 -d, --diff [/] compares saved files in the backup with those in the filesystem. may also be "-" (see -x option above for details). .TP 20 -C, --isolate [/] isolate a catalogue from its archive. The argument is the basename of the file that will contain the catalogue. The -A option is mandatory here to give the name of the archive to extract the catalogue from. Slicing is available (-s -S -p -b etc.). If the filename is "-" *and* no slice is asked (no -s option) the isolated catalogue is produced on the standard output, allowing the user to send the resulting archive through a pipe. Note that there is no difference in concept between an isolated catalogue and an archive. Thus you can do all operation on an isolated catalogue, in particular take it as reference for a differential archive. An archive produced with -C is almost equivalent to differential archive done right after a full backup, (no data is in it). Since release 2.4.0 you can use an isolated catalogue to rescue the internal catalogue when it is corrupted (see -A option). .TP 20 -+, --merge [/] create a subset archive from one or two existing archives (the resulting archive name is the argument to this command). The dar file selection mechanism (see below) let the user decide which files will be present in the resulting archive and which one will be ignored. This option thus let the user merge two archives in a single one (with a filtering mechanism that accepts all files), as well as this option let the user create a smaller archive which data is taken from one or two archives of reference. Note that at no time the contents of the archives of reference is extracted to real files and directories: this is an archive to archive transfer, thus you may lack support for Extended Attribute while you will be able to fully manipulate files with their Extended Attributes from one archive to the resulting one. If the basename is "-" *and* no slice is asked (no -s option), the archive is produced on standard output allowing the user to send the resulting archive through a pipe. The first mandatory archive of reference if provided thanks to the -A option, while the second "auxiliary" (and optional) archive of reference is provided thanks to the -@ option. When a tie contention occurs (same file names from both archive have to be merged), the overwriting policy (-/ option) is used to define the one to keep in the resulting archive. By default, archive data selected for merging is uncompressed, and re-compressed. Thus the merging operation can be used to change compression algorithm of given archive as well as change its encryption. But, for better performance it is also possible thanks to the -ak option (see below the -ak option for usage restrictions) to merge files keeping them compressed, thus no decompression/re-compression is performed at all, which make the operation faster. Last it is not possible to merge two isolated catalogues. .TP 20 -h, --help displays help usage. .TP 20 -V, --version displays version information. .PP .B GENERAL OPTIONS: .TP 20 -v, --verbose[=s[kipped]] verbose output. --verbose and --verbose=skipped are independent. --verbose=skipped displays the files being excluded by filters, while --verbose display actions under process. You can still use dar's exit status to known which way the operation ended (seen EXIT CODES at the end of this document). .TP 20 -q, --quiet Suppress the final statistics report. If no verbose output is asked beside this option, nothing is displayed if the operation succeeds. .TP 20 -b, --beep makes the terminal ring when user action is required (like for example the creation of a new slice using the -p option) .TP 20 -n, --no-overwrite do not allow overwriting of any slice. If an overwriting policy is specified (see -/ option) -n option do only apply to slices overwriting, the overwriting of files during restoration or merging is handled by the overwriting policy. -n option stay valid to forbid slice overwriting (merging, saving, isolation). .TP 20 -w, --no-warn Do not warn before overwriting (applied for slice overwriting and for overwriting decision make by the overwriting policy). By default overwriting is allowed but a warning is issued before proceeding. This option may receive 'a' as argument (see just below): .TP 20 -wa, --no-warn=all This implies the -w option, and means that over avoiding warning for file overwriting, DAR also avoid signaling a file about to be removed when its type is not the expected one. File are removed when they have been recorded as deleted since the archive of reference. At restoration of the differential archive, if a file of the given name exists, it is remove, but if the type does not match the file that was present at the time of the archive of reference (directory, plain file, fifo, socket, char or block device, etc.), a warning is normally issued to prevent the accidental removal of data that was not saved in the backup of reference. (See also -k option) .TP 20 -R, --fs-root The path points to the directory tree containing all the files that will be enrolled in the operation (backup, restoration or comparison). By default the current directory is used. All other paths used in -P or -g options on the command line are and must be relative to this path (or to current directory if -R is not present). Note that -R is useless for testing (-t option) isolation (-C option) and merging (-+ option) .TP 20 -X, --exclude The mask is a string with wildcards (like * and ? see glob(7) for details) which is applied to filenames which are not directories. If a given file matches the mask, it is excluded from the operation. By default (no -X on the command line), no file is excluded from the operation. -X may be present several times on the command line, in that case a file will not be considered for the given operation if it matches at least one -X mask. See also -ar and -am options. .TP 20 -I, --include The mask is applied to filenames which are not directories (see glob(7) for details on wildcard characters). If a given file matches the mask and does not match any mask given with -X, the file is selected for the operation. By default (no -I and no -X on the command line), all files are included for the operation. -I may be present several times on the command line, in that case all file that match one of the -I mask will be considered for the given operation, if they do not also match one of the -X mask. See also -ar and -am options. .TP 20 -P, --prune Do not consider file or directory sub-tree given by the path. -P may be present several time on the command line. The difference with -X is that the mask is not applied only to the filename, but also include the path. Moreover it applies also to directories (-X does not). By default (no -P on the command-line), no sub-tree or file is excluded from the operation, and all the directory tree (as indicated by -R option) is considered. Note that may contains wildcards like * or ? see .B glob(7) man page for more information. .TP 20 -g, --go-into Files or directory to only take in account, as opposed to -P. -g may be present several time on command-line. Same thing here, the difference with -I is that the mask is applied to the path+filename and also concerns directories. By default all files under the -R directory are considered. Else, if one or more -g option is given, just those are selected (if they do not match any -P option). All paths given this way must be relative to the -R directory. This is equivalent as giving out of any option. .B Warning, -g option cannot receive wildcards, these would not be interpreted. .TP 20 -[, --include-from-file Files listed in the listing file are included for the operation. No wildcard expression is interpreted in the listing file, the null character is not allowed and the carriage return is used to separate file names (one file name per line). Note that this option applies to any files and directory exactly as -g does, with an important difference however: -g option only uses relative paths to the root directory (the directory given with the -R option), while -[ can use absolute path as well. Another difference is when the argument is a directory -g will include all the subdirectories under that directory, while when the same entry is found in a listing file given to -[ only that directory will be included, no subdirectory or subfile would be enrolled in the backup, with -[ you need to list the exact set of file you want to backup. You can thus generate a listing file with the 'find / -print > somefile' command and give 'somefile' as argument to -[ option. Note that however, dar will never save files out of the -R given root directory tree, even if some are listed in the 'somefile' file. .TP 20 -], --exclude-from-file Files listed in the listing file are excluded from the operation. If a directory is listed in the file all its contents is excluded (unless using ordered method and another mask includes some of its subfiles or subdirectories). This option is the opposite of -[ and acts the same was as -P option does (in particular it is compared to the whole path+filename and applies to files and directories). As for -[ option, -] listing file can contain absolute paths, but wildcards are not expanded, neither. .P .B File selection in brief: .P As seen above, -I -X -P, -g, -[ and -] options are used to select the files to operate on. -I and -X only use the name of files and do not apply to directories, while -P, -g -[ and -] use the filename *and* the path, they *do* apply to directories. .P since version 2.2.0 two modes of interpretation of these options exist. The normal original method and the ordered method: .RS .TP 5 the normal method is the default and is the one that has been presented above: A directory is elected for operation if no -P or -] option excludes it. If at least one -g or -[ option is given one command line, one -g option must cover it, else it is not elected for operation. If a directory is not selected, no recursion is done in it (the directory is pruned). For non directories files, the same is true (P, -g, -[ and -] do apply) and a second test must also be satisfied: no -X option must exclude the filename, and if at least one -I option is given, one must match the given filename (using or not wildcards). .TP 5 the ordered method (when -am option is given on command-line): The ordered method takes care of the order of presence between -X and -I in one hand and of -P, -g, -[ and -] in the other hand (note that it has also the same action concerning EA selection when using -u and -U options, but that's no more file selection). In the ordered method the last argument take precedence over all the previous ones, let's take an example: .RS .TP 5 -X "*.mp?" -I "*.mp3" -I "toto*" Here dar will include all files except file of name "*.mp?" (those ending with "mpX" where X is any character), but it will however include those ending with ".mp3". It will also include files which name begin by "toto" whatever they end with. This way, "toto.mp2" will be saved (while it matches "*.mp?" it also begins by "toto") as well as "toto.txt" as well as "joe.mp3" (while it matches "*.mp?" it also ends by "mp3"). But will not be saved "joe.mp2" (because it does not begin by "toto", nor ends by "mp3", and match "*.mp?" mask). As we see the last option (-I or -X) overcomes the previous one. -P, -g, -[ and -] act together the same but as seen above they do not only act on filename, but on the whole path+filename. Note that (-g, -P, -[, -]) and (-X , -I) are independent concerning their relative order. You can mix -X -I -g -P -] -[ in any order, what will be important is the relative positions of -X options compared to -I options, and the relative positions of -g -[ -] and -P options between them. .RE .P In logical terms, if is the mask generated by all previous mask on the command line, -I generates the new following mask: or . While -X generates the new following mask: and not . This is recursive each time you add a -I or -X option. Things work the same with -P, -g, -[ and -] options. .RE .B This ends the file selection explication let's continue with other options. .P .TP 20 -u, --exclude-ea Do not consider the Extended Attributes (EA) that are matched by the given mask. By default, no EA are excluded, if the support for EA has been activated at compilation time. This option can be used multiple times. .TP 20 -U, --include-ea Do only consider the EA that match the given mask. By default, all EA are included if no -u or -U option is present and if the support for EA has been activated at compilation time. This option can be used multiple times. See also the -am and -ae options, they also apply to -U and -u options and read below the Note concerning EA. .P .B Note concerning Extended Attributes (EA) .P .RS Support for EA must be activated at compilation time (the configure script tries to do so if your system has all the required support for that). Thus you can get two binaries of dar (of the same version), one supporting EA and another which does not (dar -V to see whether EA support is activated). The archives they produce are the same and can be read by each other. The only difference is that the binary without EA support is not able to save or restore EAs, but is still able to test them and list their presence. .P In the following when we will speak about Extended Attribute (EA) or EA entry, we will only consider a particular Extended Attribute key and its value. By opposition, the set of all EA associated to a file will be designated by "EA set". .P Since version 2.3.x the name of EA entries include the namespace for dar be able to consider any type of EA (not only "system" and "user" as previously). Thus the two previous options -u and -U have changed and now take an argument which is a mask applied to EA entry names written in the following form .I namespace.name where "namespace" is for example "user". Note that the mask may or may not include the dot (.) and may match arbitrary part of the EA namespace+name, just remind that masks will be applied to the "namespace.name" global string. .P the -am flag here also enables the ordered method, for EA selection too. The ordered versus normal method have been explained above in the file selection note, with some examples using -X and -I. Here this is the same with -U and -u, (just replace -X by -u and -I by -U and remember that the corresponding mask will apply to Extended Attribute selection in place of file selection). Another point, independently of the -am option the -ae option can be used at restoration time only. If set, when a file is about to be overwritten, all EA will be first erased before restoring those selected for restoration in the archive (according to the -U and -u options given). If not set, the EA of the existing file will be overwritten, those extra EA that are not in the archive or are not selected for restoration in regard to the -u and -U options will be preserved. If you have not used any -u/-U option at backup time and want to restore from a set of full/differential backups the EA exactly as they were, you have to use -ae for dar removes the EA before overwriting their set of EA as stored in the archive. Without -ae option dar will simply add EA to existing ones, thus get a different set of EA for a give file than those recorded at the time of the backup. .P Last point the -acase and -an options alters the case sensitivity of the -U and -u masks that follow them on the command-line/included files as they do for -I, -X, -P, -g, -[ and -] as well. Very last point ;-), if -ac option is used during backup dar set back the atime after having read each file (see -aa/-ac options), this has as side effect to modify the ctime date of each file. But ctime change is used by dar to detect EA changes. In brief, the next time you backup a file that had to be read (thus which contents changed), its EA will be saved even if they had not changed. To avoid this side effect, don't use the -ac option if not necessary. .RE .B This ends the Extended Attribute selection explication let's continue with other options. .P .TP 20 -i, --input is available when reading from pipe (basename is "-" for -x, -l, -t, -d or for -A when -c, -C or -+ is used). When reading from pipe, standard input is used, but with this option, the file (usually a named pipe) is used instead. This option is to receive output from dar_slave program (see doc/usage_notes.html for examples of use). Note that when --sequential-read is used, dar uses a single pipe and does no more rely on dar_slave, -i option can be used to tell dar which named pipe to read the archive from, instead of the standard input. .TP 20 -o, --output is available when reading from pipe (basename is "-" for -x, -l, -t, -d or for -A when -c, -C or -+ is used). When reading from pipe, standard output is used to send request to dar_slave, but with this option, the file (usually a named pipe) is used instead. When standard output is used, all messages goes to standard error (not only interactive messages). See doc/usage_notes.html for examples of use. .TP 20 -O, --comparison-field[=] When comparing with the archive of reference (-c -A) during a differential backup, when extracting (-x) or when comparing (-d) do only consider certain fields. The available flags are: .RS .TP 15 ignore-owner all fields are considered except ownership. .BR This is useful when dar is used by a non-privileged user. It will not consider a file has changed just because of a uid or gid mismatch and at restoration dar will not even try to set the file ownership. .TP 15 mtime only inode type and last modification date is considered as well as inode specific attributes like file size for plain files. Ownership is ignored, permission is ignored. During comparison, difference on ownership or permission is ignored and at restoration time dar will not try to set the inode permission and ownership. .TP 15 inode-type Only the inode type is considered. Ownership, permission and dates are ignored. Inode specific attributes are still considered (like file size for plain files). Thus comparison will ignore differences for ownership, permission, and dates and at restoration dar will not try to set the ownership, permission and dates. .RE .P When no flag is provided to this option, -O option acts as if the "ignore-owner" flag was set, which is the behavior in older releases (< 2.3.0). Note also that for backward compatibility, --ignore-owner option still exists and since version 2.3.0 is just an alias to the --comparison-field=ignore-owner option. Of course if this option is not used, all fields are used for comparison or restoration. .TP 20 -H[num], --hour[=num] if -H is used, two dates are considered equal if they differ from a integer number of hours, and that number is less than or equal to [num]. If not specified, num defaults to 1. This is used when making a differential backup, to compare last_modification date of inodes, at restoration or merging time if overwriting policy is based on file's data or EA being more recent and last, when comparing an archive with a filesystem (-d option). This is to workaround some filesystems (like Samba filesystem) that seems to change the dates of files after having gone from or to daylight saving time (winter/summer time). Note that -H option has influence on the overwriting policy (see -/ option) only if it is found before on command-line or in an included file (using -B option). .TP 20 -E, --execute the string is a .B user command-line to be launched between slices. For reading (thus using -t, -d, -l or -x options), the command is executed before the slice is read or even asked, for writing instead (thus using -c, -C or -+ option), the command is executed once the slice has been completed. Some substitution string can be used in the string: .RS .TP 10 %% will be replaced by % .TP 10 %p will be replaced by the slice path .TP 10 %b will be replaced by the slice basename .TP 10 %n will be replaced by the slice number (to be read or just written). For reading, dar often needs the last slice, but initially it does not know its number. If it cannot be found in the current directory, the user command-line is then called with %n equal to 0. This is a convenient way to inform the user command to provide the last slice. If after it is still not present, dar asks the user (as usually) with a message on the terminal. Once the last slice is found, the user command-line is called a second time, with %n equal to the value of the last slice number. .TP 10 %N is the slice number with the leading zero as defined by --min-digits option. If this option is not used, %N is equivalent to %n. .TP 10 %e will be replaced by the slice extension (always substituted by "dar") .TP 10 %c will be replaced by the context. Actually three possible values exist: "init", "operation" and "last_slice". When reading an archive for (testing, extraction, diff, listing, or while reading the archive of reference, see below the -F option), the "init" context takes place from the beginning up to the time the catalogue is retrieved. On a multiple slice archive this correspond to the first slice request and to the last slice requests. After, that point comes the "operation" context. While creating an archive, the context is always "operation" except when the last slice has been created, in which case the context is set to "last_slice". .RE Several -E option can be given, given commands will then be called in the order they appear on the command line, and included files. See also the environment variable DAR_DUC_PATH in the ENVIRONMENT section at the end of this document. .TP 20 -F, --ref-execute same as -E but is applied between slices of the reference archive (-A option). --execute-ref is a synonym. .TP 20 -K, --key [[]:] encrypt/decrypt the archive using the cipher with the as pass phrase. An encrypted archive can only be read if the same pass phrase is given. Available ciphers are "blowfish" (alias "bf"), "aes", "twofish", "serpent" and "camellia" for strong encryption and "scrambling" (alias "scram") for a very weak encryption. By default if no or no ':' is given, the blowfish cipher is assumed. If your password contains a column ':' you need to specify the cipher to use (or at least use the initial ':' which is equivalent to 'bf:'). If the is empty the pass phrase will be asked at execution time. Thus, the smallest argument that -K can receive is ':' which means blowfish cipher with the pass phrase asked at execution time. .P The old "blowfish_weak" implementation has been removed and is no more supported. .P Note that giving the passphrase as argument to -K (or -J see below) may let other users learn pass phrase (thanks to the ps, or top program for examples). It is thus wise to either use an empty pass which will make dar ask the pass phrase when needed, or use -K (or -J option) from an Dar Command File (see -B option), assuming it has the appropriated permission to avoid other users reading it. For those paranoids that are really concerned about security of their passwords, having a password read from a DCF is not that secure, because while the file gets parsed, dar makes use of "unsecured" memory (memory than can be swapped to disk under heavy memory load conditions). It is only when the passphrase has been identified that locked memory is used to store the parsed passphrase. So, the most secure way to transmit a passphrase to dar, then to libdar, then to libgcrypt, is having dar asking passphrase at execution time, dar then makes use of secured (locked) memory from the time the password is received by dar from user (but through the operating system) up to its usage inside libgcrypt (after having been passed through libdar which also makes use of locked memory). .TP 20 -J, --ref-key [[]:] same as -K but the given key is used to decrypt the archive of reference (given with -A option). --key-ref is a synonym. .TP 20 -#, --crypto-block to be able to randomly access data in an archive, it is not encrypted globally but block by block. You can define the encryption block size thanks to this argument which default to 10240 bytes. Note that syntax used for -s option is also available here. Note also that crypto-block is stored as a 32 bits integer thus value larger than 4GB will cause an error. Note last, that the block size given here must be provided when reading this resulting archive (through the -* or -# options). If it is not the correct one, the archive will not be possible to decrypt, it is thus safe to keep the default value (and not use at all the -# option). .TP 20 -*, --ref-crypto-block same as --crypto-block but for the archive of reference (same default value). --crypto-block-ref is a synonym. .TP 20 -B, --batch You can put in the file any option or argument as used on command line, that will be parsed as if they were in place of the "-B " option. This way you can overcome the command line size limitation. Commands in the file may be disposed on several lines, and -B option can also be used inside files, leading a file to include other files. But an error occurs in case of loop (a file includes itself) and DAR aborts immediately. Comments are allowed, and must start by a hash `#' character on each line. Note that for a line to be considered as comment the hash character must be the first character of the line (space or tab can still precede the hash). See .B Conditional Syntax bellow for a more rich syntax in configuration files. See also the environment variable DAR_DCF_PATH in the ENVIRONMENT section at the end of this document. .TP 20 -N, --noconf Do not try to read neither ~/.darrc nor /etc/darrc configuration files. See .B files section bellow. .TP 20 -e, --dry-run Do not perform any action (backup, restoration or merging), displays all messages as if it was for real ("dry run" action). The --empty option is a synonym. .TP 20 -aSI, --alter=SI[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the SI meaning: multiple of 10^3 (a Mega is 1,000,000). .TP 20 -abinary, --alter=binary[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the historical computer science meaning: multiple of 2^10 (a Mega is 1,048,576). .P The --alter=SI and --alter=binary options can be used several times on the command line. They affect all prefixes which follow, even those found in files included by the -B option, up to the next --alter=binary or --alter=SI occurrence. Note that if in a file included by the -B option, an --alter=binary or --alter=SI is encountered, it affects all the following prefixes, even those outside the included files. For example, when running with the parameters "-B some.dcf -s 1K", 1K may be equal to 1000 or 1024, depending on --alter=binary or --alter=SI being present in the some.dcf file. By default (before any --alter=SI/binary option is reached), binary interpretation of prefixes is done, for compatibility with older versions. .TP 20 -Q Do not display an initial warning on stderr when not launched from a terminal (when launched from a cronjob for example). This means that all questions to the user will be answered with 'no', which most of the time will abort the program. Please note that this option cannot be used in a configuration file, it must be given on the command line. Since version 2.2.2, giving this option also forces the non-interactive mode, even if dar is launched from a terminal. This makes it possible for dar to run in the background. When you do, it's recommended to redirect stdout and/or sterr to files. .TP 20 -ac, --alter=ctime When reading a filesystem (during a backup or comparison), restores the atime of all files to what it was before the file was read. This makes it appear as if it had not been read at all. However, because there is no system call to let applications changing the ctime (last inode change) of a file, setting back the atime results in the ctime being changed (hence the alter=ctime). Some recent unix system allow an application to get 'furtive read mode' to the filesystem (see below). On older systems, however, for most users, having the atimes of the files changed shouldn't be a problem, since they can be changed by any other program (running by any user!) as well (like the content-index program Beagle). Ctimes on the other hand, are the only way for security software to detect if files on your system have been replaced (by so called root-kits mostly). This means, that should you run dar with -ac, security software which uses ctimes to check, will mark every file on your system as compromised after the backup. In short, this means this option should only be used by people who know what they are doing. It's the opinion of this writer that any software susceptible to atime changes is flakey or even broken (because of the afore mentioned reasons why atimes can change). But, that doesn't take away that there are programs who rely on atimes remaining the same, like Leafnode NNTP chaching software. Therefore this option exists. .TP 20 -aa, --alter=atime When specifying -aa (by opposition to -ac), the atime of every read file and directory is updated, and the ctime remains the same. In other words, Dar itself does nothing with atimes and ctimes, it only let the system do its job to update atimes when files are accessed for reading. This is in accordance with what atimes and ctimes were meant to represent. This is Dar's default (since version 2.4.0), unless 'furtive read mode' (see below) is supported by your system and dar has been compiled with this support activated. .P .B Furtive read mode is a mode in which neither atime nor ctime are modified while dar reads each file and directory. This provides also better performances as nothing has to be wrote back to disk. A known Unix kernel that supports this feature is Linux 2.6.8 and above (support must also be present in the standard C library of the system for dar to be able to activate this feature at compilation time). When this feature is activated, it becomes the default behavior of dar for super user ; for other users the default is -aa. If however as root user, you do not want to use "furtive read mode" (while it has been activated at compilation time), you need to explicitly specify either -aa or -ac option. .TP 20 -am, --alter=mask set the ordered mode for mask. This affects the way -I and -X options are interpreted, as well as -g, -P, -[ and -] options, -Z and -Y options and -U and -u options. It can take any place on the command-line and can be placed only once. See the .B file selection in brief paragraph above for a detailed explanation of this option. It has also an incidence on the --backup-hook-exclude and --backup-hook-include options. .TP 20 -an, --alter=no-case set the filters in case insensitive mode. This concerns only masks specified after this option (see also -acase option below). This changes the behavior of -I, -X, -g, -P, -Z, -Y, -u and -U options. .TP 20 -acase, --alter=case set back to case sensitive mode for filters. All following masks are case sensitive, up to end of parsing or up to the next -an option. This changes the behavior of -I, -X, -g, -P, -Z, -Y, -u and -U options. .TP 20 -ar, --alter=regex set the filters to be interpreted as regular expressions (man .B regex(7) ) instead of the default glob expression (man .B glob(7) ) This modifies the -I, -X, -g, -P, -Z, -Y, -u and -U options that follows up to an eventual -ag option (see just below). Note that for -P option, the given mask matches the relative path part of the files path: Let's take an example, assuming you have provided /usr/local to the -R option, the mask "^foo$" will replaced internally by "^/usr/local/foo$" while the mask "foo$" will be replaced internally by "^/usr/local/.*foo$". .TP 20 -ag, --alter=glob This option returns to glob expressions mode (which is the default) after an -ar option has been used, this applies to any -I, -X, -g, -P, -Z, -Y, -u and -U options that follow up to an eventual new -ar option (see just above). .TP 20 -at, --alter=tape-marks For archive creation and merging, the default behavior (since release 2.4.0) is to add escape sequences (aka tape marks) followed by inode information all along the archive. If -at is given, dar will not add this information to the archive, resulting in a slightly smaller archive and faster backup. When reading an archive, the default behavior is to ignore these escape sequences and rather rely on the catalogue located at the end of the archive. If instead --sequential-read is given on command-line (see below), dar will avoid using the catalogue at the end of the archive and will rely on these escape sequences to know the contents of the archive, which will lead to a sequential reading of the archive, operation suitable for tape media. Note that it is not recommended to disable escape sequences (aka tape marks) by using -at option except if you are more concerned by the resulting size and execution speed of your backup (in particular if you have a lot of small files) than by the possibility to recover your data in case of corrupted or partially written archive. Whithout escape sequences, dar cannot sequential read an archive, which is the only way to use an archive that has a corrupted catalogue or has no catalogue at all, thing that happens if a system crash occurred during the archive creation or due to lack of disk space to complete the archive. .TP 20 -0, --sequential-read Change dar's behavior when reading an archive. By default, the traditional way is used, which relies on the table of contents (aka "the catalogue") located at the end of the archive. With the --sequential-read option instead, dar will rely on escape sequences that are inserted all along the archive with each file's inode information. This will lead to a sequential reading of the archive, operation suitable for tape medium. However, this feature is only available for archive format starting revision "08" (i.e.: since release 2.4.0) and if -at option has no been used during archive creation or merging. This option is available for archive testing (-t), comparison (-d), restoration (-x), listing (-l) and to read the archive of reference (-A option) for isolation (-C) archive creation (-c). The sequential reading of an archive is always much slower than the usual reading method, so you should not use this option unless you really need it. .TP 20 -j, --jog when virtual memory is exhausted, ask user to make room before trying to continue. By default, when memory is exhausted dar aborts. Note that on several system, when memory is exhausted the kernel is likely to kill the process that failed to obtain virtual memory, thus on some systems, dar may not be able to ask user for what to do when memory is exhausted. .TP 20 -;, --min-digits [,[,]] By default slice number contained in filename do not have any padded zeros, which, when sorting a directory contents alphabetically leads to read all the slice starting by '1', then by '2'. for example, slice 1, 10, 11, 12, 13, ... 2, 20, 21, 23, ... etc. While dar is absolutely not perturbed by this display problem, some user shall like to have the slices sorted by order. For that reason, the --min-digits option lets you ask dar to prepend enough zeros in the slice number for it be as wide as the argument passed to --min-digits. For example, if you provide 3 for that number, dar will store the slice number as 001, 002, 003, ... 999. Well, next slice will be 1000, thus it will break again the alphabetical sorting order. You are thus advised to use a number large enough to convert the number of slice you expect to use. Then, when reading your archive, you will also need to provide this same argument, else dar will fail finding the slice. In effect, when looking for slice 1 for example, dar should try opening the file "basename.1.dar", but if it fails, it should try opening the file "basename.01.dar", then "basename.001.dar", ... up to infinity. If the slice is just missing, dar would never ask you to provide it, being still looking for a slice name with an additional leading zero. The problem also arise when doing differential backup, merging or on-fly isolation, dar must know the number of zero to prepend for each of these archive. This is why the --min-digits option may receive up to three integer values, the first for the archive to create or read, the second for the archive of reference (-A option), the third for the auxiliary archive of reference (-@ option). By default, no zero is added, and it is also well working this way. But you might well set for example "--min-digits 5,5,5" in your ($HOME)/.darrc file to do it once and for all. Last important point, on command-line (not in DCF files), the short form of this option (-;) need to be quoted ('-;') to avoid the shell interpreting the ';' character. .TP 20 --pipe-fd will read further arguments from the file-descriptor . The arguments read through this file-descriptor must follow a TLV (Type/Length/Value) list format. This option is not intended for human use, but for other programs launching dar like dar_manager. This feature has been added to overcome the command line length limit. .PP .B SAVING, ISOLATION AND MERGING OPTIONS (to use with -c, -C or -+) .PP .TP 20 -z[[algo:]level], --compression[=[algo][:][level]] add compression within slices using gzip, bzip2 or lzo algorithm (if -z is not specified, no compression is performed). The compression level (an integer from 1 to 9) is optional, and is 9 by default, which is max compression/slow processing. At the opposite, 1 means less compression and faster processing. "Algo" is optional, it specifies the compression algorithm to use and can take the following values "gzip" "bzip2" or "lzo". "gzip" algorithm is used by default (for historical reasons see --gzip below). If both algorithm and compression are given, a ':' must be placed between them. Valid usage of -z option is for example: -z, -z9, -zlzo, -zgzip, -zbzip2, -zlzo:6, -zbzip2:2, -zgzip:1 and so on. Usage for long option is the same: --compression, --compression=9, --compression=lzo, --compression=gzip, --compression=bzip2, --compression=lzo:6, --compression=bzip2:2, --compression=gzip:1 and so on. .TP 20 --gzip[=level] Same as -z (see just above). Historically -z/--gzip was for gzip while -y/--bzip2 was for bzip2. But due to the lack of available unused letter for command line options, lzo compression could not be added without extending -z option grammar. For backward compatibility --gzip is kept, but is deprecated. Rather use --compression[=level] or -z[level]. .TP 20 -y[level], --bzip2[=level] compresses using bzip2 algorithm. See -z above for usage details. This option is DEPRECATED and WILL DISAPPEAR in a future version. Please use -zbzip2:level or --compression=bzip2:level. .TP 20 -s, --slice Size of the slices in bytes. If the number is appended by k (or K), M, G, T, P E, Z or Y the size is in kilobytes, megabytes, gigabytes, terabytes, petabytes, exabytes, zettabytes or yottabytes respectively. Example: "20M" means 20 megabytes, by default, it is the same as giving 20971520 as argument (see also -aSI and -abinary options). If -s is not present the backup will be written to a single slice whatever the size of the backup may be (there is probably some filesystem limitation, thus you might expect problems for file size over 2 gigabytes, depending on your filesystem, but this is not a limitation of dar). .TP 20 -S, --first-slice -S gives the size of the first slice which may be chosen independently of the size of following slices. This option needs -s and by default, the size of the first slice is the same as the one of the following slices. .TP 20 -p [], --pause[=] pauses before writing to a new slice (this requires -s). By default there is no pause, all slices are written in the same directory, up to the end of the backup or until the filesystem is full. In this later case, the user is informed of the lack of disk space and dar stops for user action. As soon as some disk space is available, the user can continue the backup. The optional integer that this option can receive tells dar to only pause very 'n' slice. Giving 3 for 'n' will make dar pause only after slices 3, 6, 9 and so on. If this integer is not specified, the behavior is as if '1' was given as argument which makes dar pause after each slice. .TP 20 -A, --ref []/ Depending on the context, it specifies the archive to use as reference (mandatory with -C and -+) or the rescue catalogue to use (when used with -x -t or -d). All slices of the reference backup are expected to be on the same directory given by or the current directory by default. Usually only the last slice is required to extract the catalogue of reference. If necessary the use of symbolic links is also possible here to gather slices that do not reside in the same directory. You can also point to a floppy or any other mounted directory, because .B dar will pause and ask the user for required slices if they are not present. The argument to -A may be of four types: .RS .TP 5 - An existing archive basename, which will be taken as reference .TP 5 - a dash ("-") in direct access mode (default mode) it may imply the use of -o and -i options, this allows the archive of reference to be read from a pair of pipes with dar_slave at the other ends. Dar_slave can be run through ssh on a remote host for example. Note that this type of argument ("-") is not available when -A is used with -x, -d or -t. In sequential mode (--sequential-mode is used), the archive of reference is read from standard input or on named pipe specified by -i option. -o option has no use in sequential mode. Note that merging (-+ option) cannot read archive of reference in sequential mode. .TP 5 - a plus sign ("+") which makes the reference be the current directory status (only available with -c option). In other word, no file's data will be saved, just the current status of the inodes will be recorded in the catalogue. This is like an extracted catalogue form a virtual full backup, it can be taken for further reference without having to make the full backup itself. This feature is known as the "snapshot" backup. .TP 5 - a , if -af option has been placed before -A on the command-line (or in a included file, see -B option). For more about that feature see -af option below. .PP During backup (-c option) the archive of reference given thanks to this option is used to compare existing files on the filesystem. Dar will then backup only files that have changed since the archive of reference was done. During merging (-+ option), the contents of the given archive will been taken with the contents of the archive specified with -@ option (see below). During Catalogue isolation (-C option), dar will create the isolated catalogue from the one given with -A option. During testing, diff or extraction, (-t, -d or -x options respectively), the table of contents (the catalogue) will be read from the archive given with -A instead of using the internal catalogue of the archive (given with -t, -d or -x option). The archive given for rescue must has been previously isolated from this same archive (else the contents will not match and dar will refuse to proceed to this operation). This gives a solution to the case of corruption inside an archive's catalogue, while the best way is still to use Parchive to protect your data against media error. .RE .TP 20 -@, --aux []/, --on-fly-isolate []/ specifies an auxiliary archive of reference (merging context) or the name of the on-fly isolated catalogue (creation context). This option is thus only available with -+ option (merging) and -c option (archive creation). Over -A option which is mandatory with -+ option, you may give a second archive of reference thanks to the -@ option (merging context). This allows you to merge two archives into a single one. See also -$, -~ and -% for other options concerning auxiliary archive of reference. While creating an archive (backup context) this option let the user specify the archive name for an on-fly isolation (former -G option), you can also use -$ and -~ to define encryption of the archive containing the on-fly isolated catalogue. On-fly isolated catalogue is always bzip2 if possible else gzip else lzo compressed (using compression level 9) else not compressed, and it is also always a single sliced archive. Due to command-line exiguity, it is not possible to change compression algo nor slice size for the on-fly isolation. If you need a more complicated isolation, either look for a GUI over libdar, or do a normal (= not an on-fly) isolation operation (By the way it is possible to isolate an already isolated catalogue, this is equivalent to doing a copy, but you can change encryption, compression or slicing, for example), you can also use dar_xform if you only want to change slices size (this is faster as no decompression/re-compression is done). Using the merging operation on an isolated catalogue instead of isolating the isolated catalogue, leads the resulting archive to not be able to be used as a rescue for internal catalogue of the original archive. --aux-ref is a synonym. .TP 20 -D, --empty-dir At backup time, when excluding directories either explicitly using -P or -] options, or implicitly by giving a -g or -[ options (a directory is excluded if it does not match mask given with -g options or -[ options) .B dar does not store anything about these. But with -D option, dar stores them as empty directories. This can be useful, if excluding a mount point (like /proc or /dev/pts). At restoration time, dar will then recreate these directories (if necessary). This option has no meaning with -C and is ignored in that case. Independently of that, -D can also be used at restoration time, but it activates a slightly different feature (see restoration options below). .TP 20 -Z, --exclude-compression Filenames covered by this mask are not compressed. It is only useful with -z option. By default, all file are compressed (if compression is used). This option can be used several times, in that case a file that matches one of the -Z mask will not be compressed. Argument given to -Z must not be include any path, just the filename (eventually/probably using wildcards). .TP 20 -Y, --include-compression Filenames covered by this mask (and not covered by -Z) are the only to be compressed. It is only available with -z option. By default all files are compressed. This option can be used several times, in that case all files that match one of the -Y will be compressed, if they do not also match on of the -Z masks. The ordered method here applies too when activated (with -am option), it works exactly the same as -I and -X options, but apply to file compression, not file selection. In other word, it matches only on the file name, not on the path of files. .TP 20 -m, --mincompr files which size is below this value will not be compressed. If -m is not specified it is equivalent to giving .B -m 100 as argument. If you want to compress all file whatever their size is you thus need to type .B -m 0 on the command line. The same number extensions as those used with -s or -S are available here, if you want to specify the size in kilobyte, megabyte, gigabyte etc. .TP 20 -1, --sparse-file-min-size Define the minimum length of zeroed bytes to replace by "holes". By default, this feature is activated with a value of 15 bytes. To completely disable it, set the size to zero. Disabling this feature will bring some noticeable speed improvement but will probably make the archive slightly bigger (depending on the nature of the data). Sparse files are files that contain so called holes. On a filesystem, the portion of zeroed bytes is not stored on disk, thus an arbitrary large file with huge portion of zeros may only require a few bytes of disk storage. While dar cannot detect how is allocated a given file because it makes a filesystem abstraction (it does not know the implementation of any particular filesystem, where from its portability), however when it finds a sequence of zeroed bytes larger than the given threshold it can assume that it is in presence of a hole. Doing so, it does not store the given zeroed bytes into the archive, but place a tag beside the saved data to record the size of the hole and thus where to place the next no zeroed bytes. This makes dar archive disk space requirement much smaller when a sparse files is met. At restoration time, dar will restore holes writing normal data and seeking over the hole to write down the normal data after each hole. If the underlying file system supports sparse files, this will restore the holes. Note that there is no difference for applications whether a file is sparse or not, thus dar may well transform normal files into sparse files and viceversa, only the disk requirement will change. Last point, if dar can reduce disk requirement for archive with holes as small as 15 bytes (smaller value works but the overhead cost more than what is required to store the zeroed bytes normally), it may not be the same at restoration, because filesystem allocation unit is usually several kilobytes, however restored file will never be larger than it could be without holes. The only drawback of this feature is the additional CPU cycle it requires. .TP 20 -ak, --alter=keep-compressed During merging operation, keep files compressed, this has several restrictions : -z, -Z, -Y, -m are ignored, if two archives have to be merged, both must use the same compression algorithm or one of them must not use compression at all (this last restriction will probably disappear in a next version). The advantage of this option is a greater speed of execution (compression is usually CPU intensive). .TP 20 -ah, --alter=holes-recheck For merging, the sparse file detection mechanism is disabled by default. However if you want to activate it (assuming you have an old archive you want to convert the current archive format taking care of sparse files), you need to use -ah option to reactivate the sparse file detection mechanism. Then for merging --sparse-file-min-size can be used as described above for archive creation. In particular setting --sparse-file-min-size to zero beside -ah during merging, may also be used to convert file saved as sparse file into plain normal files. .TP 20 -af, --alter=fixed-date Modify the -A option behavior, making it receiving a as argument in place of the []/ default argument. The is used to define which file to save (file which modification is newer or equal to ) and which to consider unchanged (those older than ). This option has only a sense when creating an archive (not when merging or isolating). .P must be a date in the following possible formats: .TP 30 - a number of second since Jan 1st, 1970 .TP 30 - a date in the following form [[[year/]month/]day-]hour:minute[:second] .P Here are some examples of date: .RS 91836383927108078 .P 2005/11/19-19:38:48 Which is 38 past 7 PM and 48 seconds, the 19th of November 2005 .P 20:20 Which is 8 PM of the current day .P 2-00:08 Which is 8 past noon, the second day of the current month .P 2/2-14:59 Which is 1 to 3 PM, the 2nd of February in the current year .RE .TP 20 --nodump do not save files which have the 'd' flag set (see chattr(1) lsattr(1) ext2 commands). This option may not be available if the system dar has been compiled on did not provide support for ext2 flags. Note that this option does nothing with -+ option (merging) as no filesystem is used for that operation. .TP 20 -M, --no-mount-points stay in the same filesystem as the root directory (see -R option), subdirectory that are mounting points for other filesystems will not be saved (or saved empty if -D option is used). This option is useless and ignored for merging operation. .TP 20 -, , --cache-directory-tagging don't save contents of directories that use the Cache Directory Tagging Standard. See http://www.brynosaurus.com/cachedir/spec.html for details. (this option is useless with -+ option) .TP 20 -$, --aux-key [[]:] same as -J but for the auxiliary archive of reference (-@ option). .TP 20 -~, --aux-execute same as -F but for the auxiliary archive of reference (-@ option). .TP 20 -%, --aux-crypto-block same as -* but for the auxiliary archive of reference (-@ option). .TP 20 -/ , --overwriting-policy This option let the user define when or how file overwriting can occur at restoration or archive merging time. It does no apply to slice overwriting which are driven by the -n option, it does instead apply to file during extraction and files inside archives when merging two of them. When considering overwriting, a file is said to be 'in place' while an other is known as 'new' or 'to be added'. At restoration time, the 'in place' is the one that is present in filesystem while the 'to be added' is the one from the archive. At merging time, the 'in place' is the one of the '-A' archive of reference while the 'to be added' is the one from the auxiliary '-@' archive or reference. .P As soon as you use -/ option -n only applies only to slice overwriting and the -r, -k and -ae options are ignored (restoration options). .P The given argument is composed of actions and eventually of conditional expressions. Actions do define how to solve overwriting conflict about file's data and file's EA. An action is thus a couple of action for Data and for EA. Actions for Data are represented by uppercase letters, while action for EA are defined by lowercase letters. Both actions are independent of each other: .RS .TP 5 P means 'Preserve'. When merging two archives, the data of the resulting archive will be taken from the 'in place' file. While when extracting, the data of the inode in filesystem will be preserved (thus no overwriting will occur for the data). .TP 5 O means 'Overwrite'. When merging two archives, the data of the resulting archive will be taken from the 'to be added' file. While when extracting, the data of the inode in filesystem will be overwritten by data from the archive. .TP 5 S means 'mark Saved and preserve'. When merging two archives, the data of the resulting archive will be marked as already saved in the archive of reference (making thus a differential archive, even if none of the original archive were differential archives). All data will be dropped in the resulting archive, but the last modification date [aka mtime] (used to detect change in file's data) will be taken from the 'in place' file. This action does not apply when extracting files, it is thus considered equal to "Preserve" (P) in that situation. .TP 5 T means 'mark Saved and overwrite'. When merging two archives, the data of the resulting archive will be marked as already saved (same as 'S' action): all data will be dropped in the resulting archive, however the last modification date [aka mtime] (used to detect changes in a file's data) will be taken from the 'to be added' file. This action does not apply when extracting files, it is thus considered equal to "Overwrite" (O) in that situation. .TP 5 R means 'Remove'. When merging two archives, the resulting archive will not contain any entry corresponding to the file that were in conflict. This also implies that no EA will be stored for that particular entry as the entry will no more exist in the resulting archive (as if it had never yet existed). When extracting files, this will lead to file's suppression. .TP 5 p means 'Preserve', same as 'P' (but lowercase letter) preserve the whole EA set. When merging two archives, the EA set of the resulting file will be the EAs of the 'in place' file (whatever is the overwriting action taken for its data). While when extracting files to filesystem, the EA of the file in filesystem will not be changed (whatever is the overwriting action taken for its data, unless the file is removed using the 'R' policy). .TP 5 o means 'Overwrite', same as 'O' (but lowercase letter) overwrite the whole EA set. When merging two archives, the EA set of the resulting file will be taken from the 'to be added' file. While when extracting files, the EA set of the file in the filesystem will have its EA erased and replaced by those of the file in the archive (still independent of what overwriting action is taken for file's data). .TP 5 s means 'mark Saved and preserve', same as 'S' (but lowercase letter) for EA instead of data. When merging two archives, the EA of the resulting file are marked as already saved in the archive of reference, thus they are dropped but the date of last inode change [aka ctime] (used to detect changes in file's EA) will be taken from the 'in place' file. This action does not apply when extracting files, it is thus considered equivalent to "Preserve" (p) in that situation. .TP 5 t means 'mark Saved and overwrite', same as 'T' (but lowercase letter) for EA instead of data. When merging two archives, the EA of the resulting file are marked as already saved in the archive of reference, thus they are dropped but the date of last inode change [aka ctime] (use to track changes in EA) will be taken from the 'to be added' file. This action does not apply when extracting files, it is thus considered an equivalent to "Overwrite" (o) in that situation. .TP 5 m means 'merge EA and preserve'. The resulting file in the merged archive will have EA entries from both the 'in place' and the 'to be added' files. If both files share a same EA entry (same key for a given association) the one of the 'in place' file is kept (where from the 'preserve' notion). When extracting a file, the file in the filesystem will have its EA set enriched by EA entries of the file in the archive that do not exist on filesystem, but its already existing EA will stay untouched. .TP 5 n means 'merge EA and overwrite'. The resulting file in the merged archive will have EA entries from both the 'in place' and the 'to be added' files. If both files share a same EA entry (same key for a given association) the one of the 'to be added' file will be kept (where from the 'overwrite' notion). When extracting file, the file in the filesystem will have its EA set enriched by EA entries of the file in the archive with some EA entry possibly been overwritten. .TP 5 r means 'remove', same as 'R' but for the EA set (thus all EA entries) of a given file ('r' is lowercase letter here). The file of the resulting archive during merging operation will not own any EA, even if the 'in place' and/or the 'to be added' files did have some. For file extraction, this means that the file in the filesystem will loose all its EA set. As for all the previous tests, this EA operation is independent of the operation chosen for file's data (uppercase letters). .TP d means 'delete'. When a same EA entry is found both in the 'in place' and 'to be added' files these entries will be absent in the resulting archive. In other words, when merging, the EA set will only contain EA entries specific to the 'in place' and those specific to the 'to be added' file. Entries in common will not be present. When extracting a file from an archive, the file on filesystem will have its EA set enriched by entries of the 'to be added' file that are new to the 'in place' file. The other entries (which are thus present in both archive and filesystem) will be removed from the EA set. .TP 5 * is valid for both EA and data. It tells that the action is not yet defined at this step of the evaluation and that further evaluation is required (see the 'chain' operator below). .TP 5 A means 'Ask for user decision'. This uppercase letter concerns Data overwriting. An application interaction let the user define the action for each file in conflict. Note, that this action if used alone may become very boring or painful. The idea is to use it in conditional statements (which are described below) to have dar ask for only non obvious cases. .TP 5 a means 'Ask for user decision'. This lowercase letter is the equivalent for EA of the 'A' action. It is intended to be used in the same conditional statements described below. .P An action is thus a couple of letters, the first being uppercase (for file's data) the second being lowercase (for file's EA). When -/ option is not given, the action is equivalent to '-/ Oo', making dar proceed to file and EA overwriting. This is to stay as close as possible to the former default action where neither -n nor -w where specified. Note that -w option stays untouched, in consequences, in this default condition for -/ option, a confirmation will be asked to the user before dar proceed to any overwriting. The former -n option (still used to handle slice overwriting) can be replaced by its equivalent '-/ Pp' for resolving file overwriting conflict (never overwrite). Here follows some examples of actions, all these are done for any entry found in conflict during archive merging or archive extraction, we will see further how to define conditional actions. .TP 5 -/ Rr will result lead dar to remove any file from filesystem that ought to be restored(!). Note the action for EA is useless, the EA will always be erased as well as data using 'R'. Thus '-/ Rp' would lead to the same result. .TP 5 -/ Po will keep data of the 'in place' file and EA set from the 'to be added' file. .TP 5 -/ Ss Using this option when merging an archive with itself (used both as archive of reference (-A option) and auxiliary archive of reference (-@ option) ) will provide the same action as an archive isolation of the archive of reference, but using twice more memory (so keep using the isolation operation as before! Here this is just an illustration of the possibility) .P As seem previously -u and -U options can be used to filter which EA entry to consider and which to ignore. The question here is to explain how this filtering mechanism interacts with the different policies we just presented above. For files that are not in conflict (found only as 'in place' or as 'to be added'), only the EA entries matching the EA filter are kept. For files in conflict, the overwriting policy is evaluated first, then the filtering mechanism is applied *after* it. Thus for example, using the following [ -/ "Po" -u "*test" ], when merging two archives, only EA ending with "test" will be retained, and when a conflict takes place, this "*test" ending EA will be taken from the 'to be added' file if it has some EA of that type, its other EA entry will be ignored as well as any EA entry of the 'in place' file even those ending by "test". At restoration in using the same options, file without conflict will get restored but only EA entry ending with "test" will be restored, and for file with conflict (already present in filesystem), EA set of file in filesystem will be removed and replaced the EA entries of the file in archive that ends by "test", if some exist. .P Well, now let's see how to bring some more fun using conditional statements in all these actions. The structure to use is the following: .TP 5 {}[] This syntax let you place an action (as the ones we saw just above) inside the brackets '[' and ']' (for example [Pp]) that will take effect only if the evaluation of the is true. Stated that a such statement is a new type of action, you may have guessed that you may use it recursively: {}[{}[]). Well so far it seems useless. But instead of the "if then else " paradigm common to programming languages, due to the command line context it has been chosen to instead use and implicit "OR" operator between actions. Thus you can "stack" conditional statements this way: {}[] {}[] . In this example, if is true then will be used, ELSE if is true then will be used ELSE will be used. This leads to the same possibilities as what is available with programming languages, but with a slightly more simple syntax. Seen this, the recursion of conditional syntax is more interesting. For readability, you are allowed to add any space or tab in the overwriting policy, but the resulting overwriting policy must be given as a single argument to dar, thus the use of quotes (either simple ' or double ") is necessary. .P The last operator we will see is the 'chain' operator. Once an expression is evaluated, the resulting couple of action may contain an '*' (undefined action for EA or data). Further evaluation must be done. The chain operator which is represented by a semi-column ';' let one to separate several independent expressions that will be evaluated in turn up to the time the couple of action is fully defined. Once an action (for EA or for Data) is defined, it can be redefined by a subsequent evaluation in the chain, however if the action is defined it cannot be set back to undefined, thus '*' will never overwrite a previously defined action. If at the end of the policy the couple of action is not fully defined, the 'preserve' action is used ('P' or 'p' depending on which of EA or Data is left undefined). Here follow a example of syntax: .TP 5 -/ "{}[P*] O* ; {[*p] *o} ; Rr" The first expression will evaluate to either P* or O*. At this step, as the action is not completely defined, the second part of the chain is evaluated, It will end with either *p or *o. In any case, we have after this second statement of the chain a fully defined action for both data and EA (either Pp, Po, Op or Oo). Thus the evaluation stops here and the "Rr" policy will never be evaluated. .P We now have one last thing to see: the available conditions (what to place between braces '{' and '}'). Conditions are defined each by a letter, eventually followed by an argument between parenthesis. The usual logical operators are available: negation (!), conjunction (&) disjunction (|). These characters must be escaped or quoted to not be interpreted by the shell when used on command-line. In particular the '!' under most shell must be quoted and escaped (-/ '{\\!R}[..]..', The escape character '\\' is not necessary inside DCF files (those given to -B option) as no shell is used to interpret these files. To these usual operators has been added a new one: the "inversion" operator, noted '~'. Like the negation, it is an unary operator but unlike the negation, it inverses the roles of 'in place' and 'to be added' for the evaluation, which is slightly different from taking the negation of the result of the evaluation. All these operators follow the usual precedence: unary operators ('!' and '~') are evaluated first, then the conjunction '&' then the disjunction '|'. To override this, you can use parenthesis '(' and ')' inside the condition. Over these logical operators, the conditions are based on atomic operator that compare the 'in place' file to the 'to be added' file. Here they follow: .TP 5 I true only if the 'in place' entry is an inode (a 'detruit' which record the fact that a file has been removed since the archive of reference is not an inode for example). This condition do not have any consideration toward the to be added object, as some others but not all below. .TP 5 D true only if the 'in place' entry is a directory. To know whether the 'to be added' is a directory or not, one would use the "inversion" operator: ~D .TP 5 F true only if the 'in place' entry is a plain file (true also if this plain file is a 'hard link', that's it if its inode is linked several times to the directory tree) .TP 5 H true only if the 'in place' entry is an inode linked several times to the directory tree (= hard link) it may be a plain file, a Unix socket, a pipe, char device, a block device for example. .TP 5 A same as H but the current 'in place' entry is the first link we meet pointing to that hard linked inode. .TP 5 R true if the 'in place' entry is more recent than or of same date as the 'to be added' entry. The last modification date [aka mtime] is used for this comparison. If the 'to be added' entry is not an inode (and thus has no mtime), the 'in place' is considered to be more recent than the 'to be added' entry. Same thing if the 'in place' entry is not an inode (ad has no mtime available for comparison), it is here too assumed to be more recent. .TP 5 R() true if the 'in place' entry is more recent than or of the same date as the fixed given in argument. No consideration is done toward the 'to be added' element. The format is the same as the one used with -af option. If an entry has no mtime (it is not an inode for example) it is assumed an virtual mtime of zero. .TP 5 B true only if both 'in place' and 'to be added' are plain file (hard linked or not) and if the 'in place' file's data is larger or equal to the 'to be added' file's data. If one or both entry are not plain files (or hard link to plain file) and thus the file size comparison is not possible, the 'in place' entry is assumed to be 'bigger' than the 'to be added' entry. .TP 5 S true only if the 'in place' data is saved in the archive (not marked as unchanged since the archive of reference). Note that while extracting files from an archive, the 'in place' file is the one in the filesystem, which always has its data 'saved' (from libdar point of view). The 'inversion' of this atomic operator ~S may still be interesting in the context of restoration. .TP 5 Y true only if the 'in place' data is saved but dirty (plain file having its data changed at the time it was read for backup). Note, that restoring in sequential read mode, it is not possible to known whether a file is dirty (it is possible to know it once having read its data, but sequential reading does not allows then to skip forward to get the dirty state of the file and skip backward to eventually restore that file, depending on the overwriting policy result). .TP 5 X true only if the 'in place' data is a sparse file .TP 5 T true only if the 'in place' and 'to be added' entries are of same type (plain file, Unix socket, named pipe, block device, char device, symlink, directory, 'detruit' (which stands for file deleted since the archive of reference was done), and so on). Note that the number of links to inode (i.e. whether this is a hard links or not) is not taken into account. .TP 5 e true if the 'in place' entry has EA (may they be saved or just recorded as existing). .TP 5 r true if the 'in place' entry has more recent or equal dated EA to the 'to be added' entry. If 'to be added' has no EA or is even not an inode, true is returned. If 'in place' has no EA or is even not an inode, true is returned unless 'to be added' has some EA. The comparison is done on ctime dates. .TP 5 r() true if the 'in place' entry has more recent or equal dated EA to the fixed given in argument. No consideration is done toward the 'to be added' element. The format is the same as the one used with -af option. If an entry has no date (ctime date) (when it is not an inode for example) it is assumed an virtual ctime of value zero. .TP 5 m true only if 'in place' has more or equal number of EA entry in its set of EA than 'to be added' has. If an entry has not EA or is not even an inode, it is assumed it has zero entry. The comparison is done on this number. Note that the number of EA entry is not the size used to store these entries. For example, the EA entry "user.test" counts for 1, whatever is the length of the value associated to it. .TP 5 b true if the 'in place' entry has bigger EA set or equal size EA set than the 'to be added' entry. If an entry has no EA or is even not an inode, it is assumed that it has a zero byte length EA set. The comparison is done on this number in that case. Note that the comparison is done on the bytes used to store the whole EA set associated to a given file. .TP 5 s true if the 'in place' entry is an inode (or a hard linked inode) and has its EA saved in the archive of reference, not only marked present but unchanged since last backup. This test does not take the 'to be added' entry into account. .P Well, you've seen that uppercase letter are kept when comparison is based on the inode or data while lowercase letter is used for atomics based on EA. Now that we have completed our tour of this feature let's see some examples: .TP 5 -/ Pp as seen previously this is what does -n option for files when no overwriting policy is defined, which avoids any overwriting for Data as well as for EA. .TP 5 -/ "{!T}[Pp] {R}[{r}[Pp]Po] {r}[Op] Oo" Space and tabs are allowed to ease readability. Here the policy stands for: If files in conflicts are not of the same type then keep Data and EA of the entry 'in place'. Else if 'in place' has a more recent data then if 'in place' has more recent EA then keep both its Data and EA, else keep only its Data and overwrite its EA. Else (if 'in place' has not the more recent data), if it has the more recent EA then overwrite the data but keep its EA, else overwrite both its data and EA. This policy tends to preserve the most recent data or EA, but it does not take into account the fact that EA or Data is effectively saved into the archive of just marked as unchanged since the archive of reference. .TP 5 -/ "{!T}[{~D}[Oo] Pp]" If entries are not of the same type, if the 'to be added' entry is a directory then we keep it and overwrite the 'in place' entry, else we keep the 'in place' entry. If entry are of same type, the policy does not provide any action, thus the default action is used: "Pp". You can change this default action easily using a chain operator: .TP 5 -/ "{!T}[{~D}[Oo] Pp] ; Aa" In this case instead, if entry are of the same type, the user will be asked what to. .TP 5 -/ "{!T|!I}[{R}[Pp] Oo] {S}[{~S}[{R}[P*] O*] P*] {~S}[O*] {R}[P*] O*] ; {s}[{~s}[{r}[*p] *o] *p] {~s}[*o] {r}[*p] *o]" Well this may seems a bit too complex but just see it as an illustration of what is possible to do: If both 'in place' and 'to be added' are not of the same type we keep data and EA of the most recent file (last modification date). Else, both are of the same type. If both are inode we evaluate a two expressions chain (expressions are separated by a semi-column ';') we will see in detail further. Else if they are of same type but are not inode we take the EA and data of the most recent entry (this is the last 10 chars of the string). Well, now let's see the case of inode: The first expression in the chain sets the action for data and keep the action for EA undefined. While the seconds, is the exact equivalent but instead it leaves the action for data undefined '*' and set the action for EA. These two expressions follow the same principle: If both entries are saved (by opposition to be marked as unchanged since the archive of reference) in the archives, the most recent EA/Data is kept, else, the one of the inode that is saved is kept, but if none is saved in the archive the most recent entry (mtime/ctime) is kept. .RE .TP 20 -^, --slice-mode perm[:user[:group]] defines the permission and ownership to use for created slices. By default, dar creates slices with read and write available for anyone letting the umask variable disable some privileges according to user's preferences. If you need some more restricted permissions, you can provide the permission as an octal value (thus beginning by a zero), like 0600 to only grant read and write access to the user. Be careful not to avoid dar writing to its own slices, if for example you provide permission such as 0400. Note also that the umask is always applied thus specifying -^ 0777 will not grant word wide read-write access unless your umask is 0000. .TP 20 -_, --retry-on-change count[:max-byte] When a file has changed at the time it was read for backup, you can ask dar to retry saving it again. To activate this feature you must provide the maximum number a given file can be re-saved (this is the 'count' field).In option the overall maximum amount of byte allowed to be wasted due to retrying changing file's backup can be given after a column charactrer (:), this is the 'max-byte' field. By default (no --retry-on-change option specified) no retry is done. If 'max-byte' field is not specified, no limit on the bytes is used, each changing file will be saved up to 'count' times if necessary. A file is considered as changed when the last modification time has changed between the time the file has been opened for backup and the time it has been completely read. Retrying a backup cannot replace the already saved backup, a second copy of the file is added just after the first previous try and the previous try becomes inaccessible, however it holds some place in the archive, where from the designation of wasted bytes. You can remove all wasted bytes from an archive using the merging/fitering feature: dar -+ new_arch -A old_arch -ak .P .TP 20 -ad, --alter=decremental This flag is to be used only when merging two archives. Instead of the usual merging where each files of both archives are added to the resulting archive with eventually a tie using the overwriting policy (see -/ option), here the merging builds an archive which corresponds to the decremental backup done based on two full backups. the -A backup is expected to receive the older archive while the -@ is expected to point to the more recent one. If this option is used, the eventually overwriting policy is ignored and replaced by -/ "{T&R&~R&(A|!H)}[S*] P* ; {(e&~e&r&~r)|(!e&!~e)}[*s] *p". Additionally, files found int the newer archive that do not existed in the older are replaced by a 'detruit' entry, which marks them to be remove at restoration time. For more information about decremental backups read the usage_notes.html file in the documentation. .TP 20 -asecu, --alter=secu This option disable the ctime check done by default during an incremental backup: If the ctime of an inode has changed since the archive of reference was done with all other values being unchanged (inode type, ownership, permission, last modification date, file size), dar issues a "SECURITY WARNING", as this may be the sign of the presence of a rootkit. You should use this option to disable this type of warning, if you are doing a differential backup of a just restored data (differential backup with the archive used for restoration taken as reference), as it is not possible to restore ctime, restored data's ctime will have changed while other parameters will be unchanged for all restored files, leading dar to issue a warning for all restored files. This security check is disabled (implicitly) if dar is run with -ac option. Last, if a file has only its EA changed since the archive of reference was done, the security warning will show (false positive). .TP 20 -., --user-comment "" This option let the user add an arbitrary message into the archive header. Warning! this message is always stored in clear text, even if the archive is encrypted. You can see the message inserted in an archive displaying the archive summary (dar -l -q). Some macro can be used inside the : .RS .TP 5 %c is replaced by the command line used. Note that for security, any option related to archive encryption is removed (-K, -J, -$, -#, -*, -%). The command included from a DCF file (see -B option) are never added by this macro. As a consequence, if you do not want to see --user-comment stored in user comments you can add the --user-comment definition in an included file like ~/.darrc for example. .TP 5 %d this is the current date and time .TP 5 %u this is the uid under which dar has been run .TP 5 %g this is the gid under which dar has been run .TP 5 %h the hostname on which the archive has been created .TP 5 %% the % character. .RE .TP 20 -3, --hash When creating, isolating or merging an archive, beside each slice is generated an on-fly hash file using the specified algorithm. Available algorithm are "md5" and "sha1", by default no hash file is generated. The hash file generated is named based on the name of the slice with the .md5 or .sha1 extension added to it at the end. These hash files can be processes by md5sum and sha1sum usual commands (md5sum -c ) to verify that the slice has not been corrupted. Note that the result is different than generating the hash file using md5sum or sha1sum once the slice is created, in particular if the media is faulty: calling md5sum or sha1sum on the written slice will make you compute the hash result on an already corrupted file, thus the corruption will not be seen when testing the file against the hash at a later time. Note also that the creation of a hash file is not available when producing the archive on a pipe ("dar -c -"). .TP 20 -<, --backup-hook-include The mask is applied to path+filename during backup operation only. If a given file matches the mask, a user command (see -= option below) will be run before proceeding to the backup and once the backup will be completed. See also -> option below. IMPORTANT: if using the short option, you need to enclose it between quotes: '-<' for the shell not to interpret the < as a redirection. .TP 20 -> --backup-hook-exclude The mask is applied to path+filename during backup operation only. If a given file matches the mask, even if it matches a mask given after -< option, no user command will be executed before and after its backup. The -< and -> options act like -g and -P, they can receive wildcard expression and thus have their comportment driven by the --alter=globe and --alter=regex expressions seen above, as well as the --alter=mask option. Last the --alter=case and --alter=no-case modify also the way case sensitivity is considered for these masks. By default, no -> or -< option, no file get selected for backup hook. IMPORTANT: if using the short option, you need to enclose it between quotes: '->' for the shell not to interpret the > as a redirection. .TP 20 -=, --backup-hook-execute for files covered by the mask provided thanks to the -< and -> options, the given string is executed by a shell before the backup of that file starts and once it has completed. Several macro can be used that are substituted at run time: .RS .TP 10 %% will be replaced by a literal % .TP 10 %p will be replaced by the full path under backup .TP 10 %f will be replaced by the filename (without the path) .TP 10 %u will be replaced by the UID of the file .TP 10 %g will be replaced by the GID of the file .TP 10 %c and most interesting, %c (c for context), will be replaced by "start" or by "end" when the command is executed before or after the backup respectively. .RE This way, one can stop a database just before it was about to be backed up, and restart it once the backup is completed. Note that the masks seen above that drive the execution of this command can be applied to a directory or a plain file for example. When a directory is selected for this feature, the command is logically ran before starting (with the context "start") to backup any file located in that directory or in a subdirectory of it, and once all file in that directory or subdirectories have been saved, the command is ran a second time (with the context "end"). During that time, if any file do match the backup-hook masks, no command will be executed for these. It is assumed that when a directory has been asked for a backup-hook to be executed this hook (or user command) is prepare for backup all data located in that directory. The environment variable DAR_DUC_PATH also applies to these user commands (see -E above, or the ENVIRONMENT paragraph below). .TP 20 -ai, --alter=ignore-unknown-inode-type When dar meets an inode type it is not aware about (some times ago, it was the case for Door inode on Solaris for example, Door inodes are handled by dar since release 2.4.0), it issues a warning about its inability to handle such inode. This warning occurs even if that entry is filtered out by mean of -X, -I, -P, -g, -[ or -] options, as soon as some other entry in that same directory has to be considered for backup, leading dar to read that directory contents and failing on that unknown inode type (filtering is done based on the result of directory listing). This option is to avoid dar issuing such warning in that situation. .PP .PP .B RESTORATION OPTIONS (to use with -x) .TP 20 -k[{ignored|only}], --deleted[={ignore|only}] Without argument or with the "ignore" argument, this option leads dar at restoration time to not delete files that have been deleted since the backup of reference (file overwriting can still occur). By default, files that have been destroyed since the backup of reference are deleted during restoration, but a warning is issued before proceeding, except if -w is used. If -n is used, no file will be deleted (nor overwritten), thus -k is useless when using -n. If -/ option is used, this option without argument is ignored! With the "only" argument, this option only consider files marked as to be removed in the archive to restore, no file are restored but some file are removed. When -konly (or --deleted=only) is used, the -/ option is ignored (at the opposition of the "--no-delete=ignore" option which is ignored when the -/ is used). Of course "--no-delete=ignore" and "--no-delete=only" are mutually exclusive, because if both of them were available at the same time dar would do nothing at all. .TP 20 -r, --recent only restore files that are absent or more recent than those present in filesystem. If -/ option is used, this option is ignored! .TP 20 -f, --flat do not restore directory structure. All file will be restored in the directory given to -R, if two files of the same name have to be restored, the usual scheme for warning (-w option) and overwriting (-n option) is used. No rename scheme is planned actually. When this option is set, dar does not remove files that have been stored as deleted since last backup. (-f implicitly implies -k). .TP 20 -ae, --alter=erase_ea [DEPRECATED use -/ instead] Drop all existing EA of files present in filesystem that will have to be restored. This way, the restored files will have the exact set of EA they had at the time of the backup. If this option is not given, a file to restore will have its EA overwritten by those present in the backup and if some extra EAs are present they will remain untouched. See the .B Note concerning Extended Attributes (EA) above for a detailed explanation about this behavior. If -/ option is used, this option is ignored! .TP 20 -A, --ref []/ -A takes a different meaning when used with -x, -d or -t. The archive given after -A must be an extracted catalogue based on the archive given after -x, -d or -t. This extracted catalogue will be used for the operation in place of the catalogue located in the archive. This feature is intended for the case of corruption within an archive that affects the internal catalogue. If you have a sane extracted catalogue of a given archive you can use it as replacement of the one located in the archive itself. Note that with the -A option are also available the following options: -F, -J and -*. .TP 20 -D, --empty-dir At restoration time, if -D is not specified (default) any file and directory is restored in regard to the filtering mechanism specified (see -I, -X, -P, -g, -[ and -] options). But if -D option is provided the restoration skips directory trees that do not contain saved files. This avoid having a huge empty tree with a few restored files especially when restoring a differential archive in an empty place. Note: This feature cannot work when --sequential-read is used, as it is not possible to know whether a directory contains or not some saved files at the time the directory inode is read from the archive in sequential reading mode. .TP 20 -2, --dirty-behavior { ignore | no-warn } At restoration time, if a file in the archive is flagged as "dirty" (meaning that it had changed at the time it was saved), user is asked for confirmation before restoring it. Specifying "ignore" will skip those dirty files, while "no-warn" will restore them without user confirmation. This feature is incompatible with sequential reading mode, in this mode dar cannot know whether a file is dirty before having restored it, in consequences, dar cannot warn nor ignore that a file is dirty before restoring it, at that time if a file is dirty it will be removed unless dirty-behavior is set to "no-warn". .TP 20 -al, --alter=lax Dar will try to workaround data corruption of slice header, archive header and catalogue. This option is to be used as last resort solution when facing media corruption. It is rather and still strongly encourage to test archives before relying on them as well as using Parchive to do parity data of each slice to be able to recover data corruption in a much more effective manner and with much more chance of success. Dar also has the possibility to backup a catalogue using an isolated catalogue, but this does not face slice header corruption or even saved file's data corruption (dar will detect but will not correct such event). .TP 20 -/, --overwriting-policy Overwriting policy can be used for archive restoration to define when and how file overwriting can occur. See above the description of this option. .PP .B TESTING AND DIFFERENCE OPTIONS (to use with -t or -d) .PP .TP 20 -ado-not-compare-symlink-mtime, --alter=do-not-compare-symlink-mtime Whith this option set, when comparing a symlink, mtime difference, no message shows when symlink in archive ans symlink on filesystem do only differ by their by their mtime. See also -O option. .P No other specific option, but all general options are available except for example -w which is useless, as testing and comparing only read data. -A option is available as described just above for extraction (backup of internal catalogue). .P Doing a difference in sequential read mode is allowed but hard linked inodes can only be compared to the filesystem the first time they are met, next hard links to this same inode cannot obtain the corresponding data because skipping backward in sequential read mode is forbidden. In that situation, the hard links are reported as skipped, meaning that data comparison could not be performed. .PP .B LISTING OPTIONS (to use with -l) .PP .TP 20 -T, --list-format=, --tree-format By default, listing provides a tar-like output (the 'normal' output). You can however get a tree-like output (the 'tree' output) or an XML structured output (the 'xml' output). Providing -T without argument gives the same as providing the 'tree' argument to it. The option --tree-format is an alias to --list-format=tree (backward compatibility). Note that the files doc/dar-catalog-*.dtd define the format of the XML output listing (This file is also installed under $PREFIX/share/doc) .TP 20 -as, --alter=saved list only saved files .TP 20 -alist-ea, --alter=list-ea list Extended Attributes name for each file that has some. .TP 20 -I, -X, -P, -g, -[, -] can be used to filter file to list base on their name or path. .P Else only -v and -b from general options are useful. Note that -v displays an archive summary first, where a detailed of information about the archive can be obtained. If you want to display only this summary use -q with -l option. .TP 10 displayed fields .RS .TP 10 [data] possible values are [ ] or [Saved] or [InRef] or[DIRTY]. [ ] means that the data has not been saved because there is no change since backup of reference. [Saved] means that the data has been saved, and thus this archive is able to restore the file. [InRef] was used in archive generated by dar version 2.3.x and before, when isolating a catalogue from an archive and means that the file was saved in the reference archive. Last, [DIRTY] means that data is saved (like [Saved]) but has changed at the time dar was reading it for backup, leading dar to possibly store the file in a state it never had. .TP 10 [EA] possible values are " " (empty string) or [ ] or [InRef], [Saved] or [Suppr]. It Shows whether Extended Attributes are present and saved ([Saved]), are present but not saved ([ ]) which means there is no change since backup of reference, if there is no EA saved for this file (empty string) or if some EA were present in the archive of reference but none is currently available ([Suppr]). [InRef] was used when isolating a catalogue (release 2.3.x and before) from an archive and means that the file was saved in the reference archive. .TP 10 [compr] possible values are [....%] or [-----] or [ ] or [worse]. Shows if the file has been compressed and the compression ratio "compressed/uncompressed" ([...%], for example [ 33%] means that the compressed data takes a third of the corresponding uncompressed data, thus the lower is this ratio, the better is the compression), or if the file is stored without compression ([ ] see -Y and -Z options) or if the file is not subject to compression because it is not a saved regular file ([----]), or if the file takes more space compressed than its original size ([worse]), due to compression overhead. Note that the compression ratio used here is the inverse of what compression tools usually provide (uncompressed/compressed). The reason of this choice is that the ratio used here has the advantage to always stay between 0 and 100%, which is much more easy to work with to provide a well formatted output. .TP 10 [S] possible values are [ ] or [X]. [X] only applies to saved plain files, and tells that the file is stored using sparse file data structure: not all data is stored, long sequence of zeros are skipped. This also means that at restoration time, if the filesystem supports it, holes will be restored. To store hole information libdar uses escape sequence (special sequence of byte), but to avoid real data to be considered as such escape sequence, a special escape sequence is used when data looks like an escape sequence. So if a data contains a such escape sequence, it must be read as if it contains holes to be able to restore back the data in its original form. For that reason, in some rare circumstances (saving an dar archive inside a dar archive without compression or encryption, for example) a file without hole may be marked [X] as if it had holes and will be longer by on byte for each data sequence looking like an escape sequence. .TP 10 permission see ls man page. Note that a star (*) is prepended to the permission string if the corresponding inode is linked several times to the directory structure (hard link). .TP 10 user owner of the file .TP 10 group group owner of the file .TP 10 size size in byte of the file (if compression is enabled, the real size in the archive is "compression rate" time smaller). .TP 10 date the last modification date of the file. The last access time is also saved and restored, but not displayed. .TP 10 filename The name of the file. .TP 10 When using --alist-ea option, for hard linked inode, the filename is followed by an integer between braces: Entries with the same number do point the the same inode. .PP .SH EXPLICIT OPTIONAL ARGUMENTS When dar has not been compiled with GNU getopt, which is not present by default on some systems like FreeBSD, you may lack the optional arguments syntax. For example "-z" will create a parse error on command-line, or in -B configuration files. The solution is to explicitly give the argument. Here follows a list of explicit argument to use in place of optional ones: .TP 20 -z must be replaced by -z 9 .TP 20 -w must be replaced by -w d or -w default .TP 20 -H must be replaced by -H 1 .TP 20 -0 must be replaced by -0 ref .P .B important ! When using GNU getopt(), optional arguments are available by sticking the argument to the short option: "-z" for example is available as well as "-z9". But "-z 9" is wrong, it will be read as "-z" option and "9", a command line argument (not an argument to the -z option). In the other side, when using a non GNU getopt this time, "-z" becomes an option that always requires an argument, and thus "-z 9" is read as "-z" option with "9" as argument, while "-z9" will be rejected as a unknown option, and "-z" alone will generate an error as no argument is provided. In consequences, you .B need a space between the option (like "-z") and its argument (like "9"), when dar does not rely on a GNU getopt() call, which also imply you to explicitly use arguments to options listed just above. .SH EXIT CODES .B dar exits with the following code: .TP 10 0 Operation successful. .TP 10 1 Syntax error on command-line. .TP 10 2 Error due to a hardware problem or a lack of memory. .TP 10 3 Detection of a condition that should never happen, and which is considered as a bug of the application. .TP 10 4 Code issued when the user has aborted the program upon dar question from dar. This also happens when dar is not run from a terminal (for example launched from crontab) and dar has a question to the user. In that case, dar aborts the same way as if the user pressed the escape key at the question prompt. .TP 10 5 is returned when an error concerning the treated data has been detected. While saving, this is the case when a file could not be opened or read. While restoring, it is the case when a file could not be created or replaced. While comparing, it is the case when a file in the archive does not match the one in the filesystem. While testing, it is the case when a file is corrupted in the archive. .TP 10 6 an error occurred while executing user command (given with -E or -F option). Mainly because the creation of a new process is not possible (process table is full) or the user command returned an error code (exit status different of zero). .TP 10 7 an error has occurred when calling a libdar routine. This means the caller (dar program), did not respect the specification of the API (and this can be considered as a particular case of a bug). .TP 10 8 the version of dar used is based in finite length integers (it has been compiled with the option --enable-mode=...). This code is returned when an integer overflow occurred. use the full version (based in the so called "infinint" class) to avoid this error. .TP 10 9 this code indicates an unknown error. I have probably forgotten to update the exception caching code to take care of new exceptions... this is a minor bug you are welcome to report. .TP 10 10 you have tried to use a feature that has been disabled at compilation time. .TP 10 11 some saved files have changed while dar was reading them, this may lead the data saved for this file not correspond to a valid state for this file. For example, if the beginning and the end of the file have been modified at the same time (while dar is reading it), only the change at the end will be saved (the beginning has already been read), the resulting state of the file as recorded by dar has never existed and may cause problem to the application using it. .SH SIGNALS If dar receives a signal (see kill(2) man page) it will take the default behavior which most of the time will abruptly abort the program, except for the following signals: .TP 10 SIGINT This signal is generated by the terminal when hitting CTRL-C (with the terminal's default settings), it can also be generated with the kill command .TP 10 SIGTERM This signal is generated by the system when changing of run-level in particular when doing a shutdown, it can also be generated with the kill command .TP 10 SIGHUP Depending on the system, this signal may be sent before the SIGTERM signal at shutdown time, it can also be generated with the kill command .TP 10 SIGQUIT This signal is generated by the terminal when hitting CTRL-\\ (with the terminal's default settings), it can also be generated with the kill command .TP 10 SIGUSR1 This signal can be generated by the kill command .TP 10 SIGUSR2 This signal can be generated by the kill command .P For those previous signals, two behavior exit. For SIGHUP, SIGINT, SIGQUIT, SIGTERM and SIGUSR1, a .B delayed termination is done: the backup or isolation operation is stopped, the catalogue is appended to the archive and the archive is properly completed with the correct terminator string, this way the generated archive is usable, and can be used as reference for a differential backup at a later time. Note that if an on-fly isolation had been asked, it will *not* be performed, and no user command will be launched even if dar has been configured for (-E option). For SIGUSR2 instead a .B fast termination is done: in case of backup or isolation, the archive is not completed at all, only memory and mutex are released properly. .P For both type of termination and other operations than backup or isolation, dar's behavior is the same: For restoration, all opened directories are closed and permissions are set back to their original values (if they had to be changed for restoration). For listing, comparison, testing, the program aborts immediately. .P Another point, when using one of the previous signals, dar will return with the exist status .B 4 meaning that the user has aborted the operation. Note that answering "no" to a question from dar may also lead dar to exit this way. last, If before the end of the program the same signal is received a second time, dar will abort immediately. .SH FILES .B $HOME/.darrc and .B /etc/darrc if present are read for configuration option. They share the same syntax as file given to -B option. If $HOME/.darrc is not present and only in that case, /etc/darrc is consulted. You can still launch /etc/darrc from .darrc using a statement like .B -B /etc/darrc. None of these file need to be present, but if they are they are parsed AFTER any option on the command line and AFTER included files from the command line (files given to the -B option). NOTE: if $HOME is not defined $HOME/.darrc default to /.darrc (at the root of the filesystem). Else you can see .B conditional syntax bellow, and -N option above that leads dar to ignore the /etc/darrc and $HOME/.darrc files. .SH CONDITIONAL SYNTAX configuration files (-B option, $HOME/.darrc and /etc/darrc) usually contain a simple list of command-line arguments, split or not over several lines, and eventually mixed with comments (see -B option for more). But, you can also use make-like targets to ask for a particular set of commands to be used in certain conditions. A condition takes the form of reserved word immediately followed by a colon ':'. This word + colon must stand alone on its line, eventually with spaces or tabs beside it. The available conditions are: .TP 20 extract: all option listed after this condition get used if previously on command line or file the -x option has been used .TP 20 create: all option listed after this condition get used if previously on command line or file (-B option) the -c option has been used .TP 20 list: (or listing:) if -l option has been used .TP 20 test: if -t option has been used .TP 20 diff: if -d option has been used .TP 20 isolate: if -C option has been used .TP 20 merge: if -+ option has been used .TP 20 reference: if -A option has been used (except when -A is used for the snapshot feature or in conjunction with -af) .TP 20 auxiliary: if -@ option has been used .TP 20 all: in any case .TP 20 default: if no -c, -d, -x, -t, -C, -l or -+ option has been used at this point of the parsing. .P The condition stops when the next condition starts, or at End of File. The commands inserted before any condition are equivalent to those inserted after the "all:" condition. Remark : -c -d -x -t -C and -l are mutual exclusive, only one of them can be used while calling dar. .P Here is an example of conditional syntax .RS .P create: # upon creation exclude the .br # following files from compression .br -Z "*.mp3" -Z "*.mpg" .P all: .br -b .br -p .P default: .br # this will get read if not .br # command has been set yet .br -V .br # thus by default dar shows its version .P all: .br -v .br # for any command we also ask to be verbose .br # this is added to the previous all: condition .RE .P Last point, you may have several time the same condition (several .B all: ) for example. They will be concatenated together. .SH USER TARGETS User targets are arbitrary words found on command line, that do not start by a dash ('-'). On most system they should be placed after command and options. They are collected from command-line first, then comes the parsing of command and optional arguments. Their use is to extend conditional syntax described just above by having a set of options activated by the user just adding a single word on command-line. Of course user targets must not be equal to one of the reserved words of the conditional syntax (extract, create, ... all, default). A valid target is a word (thus without space) composed of lowercase or uppercase letters (case is sensitive) with eventually digits, dashes '-' or underscores '_' characters. .P Let's see an example of use: .P first a DCF file named 'example.dcf' that will be given on command line: .P .RS # normal set of files considered for backup .P create: -R / -P proc -P sys -P mnt -D # if the "home" user target is applied on command line the following command get added .P home: -g home # if the "verbose" user target is used, we will have some more verbosity ... .P verbose: -v -vs .RE .P Then we could run dar in the following ways: .TP 20 dar -c test -B example.dcf in that case only the command in the "create:" section of example.dcf would be used. .TP 20 dar -c test -B example.dcf verbose here over the "create:" target the commands under the "verbose:" target (-v and -vs) would be also used .TP 20 dar -c test -B example.dcf verbose home last we use two user targets "verbose:" and "home:" in addition the the "create:" target of the usual conditional syntax. .P Note that if the last option *may* receive an argument, the first user target that follows it will be assumed an argument to that option. To avoid this, either change the order of options on command line for the last option been an option that never or always uses an argument (for example -b never has an argument while -s always has one). Or separate the options from the user targets by the -- word. And of course you can also use the explicit argument of the last option (see EXPLICIT OPTIONAL ARGUMENT section, above). .P Second point: It is allowed to have user targets inside a DCF file. Note however that targets are collected in a first phase, which leads some part of the file to be hidden (because the corresponding conditional syntax or user target is not present). Then, the remaining part of the file is then parsed and actions for each option found is taken. At that time, new user targets found are just recorded, but they do not modify the current DCF file layout, in particular, hidden part of the file stay hidden even if the corresponding user target is read in this same file. Next DCF parsing (which may be triggered by a second -B option on the command line, or by a -B option inside the current parsed DCF file) will thus be done with the additional targets found in that first DCF file, so in a way you may have user targets that activate other user targets. Here follows an examples of two DCF files, first.dcf and second.dcf: .RS # cat first.dcf target3: -K toto target1: target2 -B second.dcf target3 target2: #never reached -s 10k # cat second.dcf target2: -v target3: -b .RE In that example, target1 activates both target2 and target3, but at the time of the parsing of first.dcf, neither target2 nor target3 were yet activated thus '-K toto' and '-s 10k' will never be given to dar (unless activated beside target1 before first.dcf get parsed), however when comes the time to parse second.dcf, target2 *and* target3 are activated, thus both '-v' and '-b' will be passed to dar, even if 'target3' is located after '-B second.dcf' in the file first.dcf .SH ENVIRONMENT .TP 10 DAR_DCF_PATH if set, dar looks for Dar Configuration File (DCF files, see -B option) that do not have an fully qualified path in the directories listed in DAR_DCF_PATH environment variable. This variable receives a column (:) separated list of paths and look in each of them in turn, up to the first file found under the requested name. .TP 10 DAR_DUC_PATH if set, dar looks for Dar User Command (DUC files, see -E, -F, -~, -= options) that do not have a fully qualified path in the directories listed in DAR_DUC_PATH. This variable receives a column (:) separated list of paths and looks in each of them in turn, up to the first file found under the requested name. .SH EXAMPLES You can find some more examples of use in the tutorial, mini-howto, sample scripts, and other related documentation. All these are available in dar's source package, and are also installed beside dar in the <--prefix>/share/dar directory. This documentation is also available on-line at http://dar.linux.free.fr/doc/index.html#2 .SH SEE ALSO dar_xform(1), dar_slave(1), dar_manager(1), dar_cp(1), TUTORIAL and NOTES included in the source package and also available at http://dar.linux.free.fr/doc/index.html .SH KNOWN BUGS dar cannot restore time of symbolic links. Many (all ?) UNIX do not provide any way to do that, the utime() system call changes the file pointed to by the link rather than the date of the link itself. .P dar saves and restores atime and mtime, but cannot restore ctime (last inode change), there does not seems to be a standard call to do that under UNIX. .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.4.8/man/dar_slave.10000644000175000017430000000405312010477201012007 00000000000000.TH DAR_SLAVE 1 "March 3rd, 2012" .UC 8 .SH NAME dar_slave \- disk archive slave for remote access to a dar archive .SH SYNOPSIS dar_slave [options] [/]source .P dar_slave -h .P dar_slave -V .SH DESCRIPTION .B dar_slave reads an archive and listens for .B dar orders on standard input, sending asked parts of the archive on standard output. .PP Source is the basename of the archive to read. By default, .B dar_slave uses standard input and output to communicate with a .B dar process, which may be running on the local host or on a remote host. It is possible to change the behavior of .B dar_slave to use named pipe instead of standard input and output .SH OPTIONS .PP .TP 20 -h Displays help usage. .TP 20 -V Displays version information. .TP 20 -i reads the orders from the named file instead of standard input. In most cases the filename will be a named pipe. .TP 20 -o sends the data to the named file instead of standard output. In most cases the filename will be a named pipe. .TP 20 -E string is a command to be launched between slices. See dar(1) man page (same option) for more information. .TP 20 -Q Do not display any message on stderr when not launched from a terminal (for example when launched from an at job or crontab). Remains that any question to the user will be assumed a 'no' answer, which most of the time will abort the program. .TP 20 -j when virtual memory is exhausted, as user to make room before trying to continue. By default, when memory is exhausted dar aborts. .TP 20 -; Defines the minimum number of digits to use for archive slice numbers. See the same option in dar man page for more details. .SH EXIT CODES .B dar_slaves exists with the same codes as dar does, see dar(1) man page. .SH SIGNALS Any signal sent to dar_slave will abort the program immediately, there is no way to have a proper termination before the end of the process .SH SEE ALSO dar(1), dar_xform(1), dar_manager(1), dar_cp(1) .SH KNOWN BUGS None actually. .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.4.8/man/dar_cp.10000644000175000017430000000302112010477201011271 00000000000000.TH DAR_CP 1 "March 3rd, 2012" .UC 8 .SH NAME dar_cp \- 'cp' clone that does not stop at the first I/O error met .SH SYNOPSIS dar_cp .P dar_cp -h .P dar_cp -V .SH DESCRIPTION .B dar_cp is similar to the standard 'cp' command, except it does not stop copying when an I/O error is met. Instead, it skips a bit further and continues to copy the rest of the file, as much as possible, filling the gaps by zeroed bytes. .PP .B dar_cp only accepts full filename (not directory) as argument. The reason of its existence in dar package is that you need it if you use Parchive with dar and have a corruption on a CD-R or any other read-only medium. You need to copy the corrupted slice on a read-write filesystem, for Parchive be able to repair it using slice associated redundancy files. .SH OPTIONS .PP .TP 20 -h Displays help usage. .TP 20 -V Displays version information. .SH EXIT CODES .B dar_cp exists with the following codes: .TP 10 0 upon normal execution (be some corrupted parted skipped or not) .TP 10 1 syntax error on command-line .TP 10 2 could not open source or destination files .TP 10 3 any other system error met during the copy .TP 10 5 when some data could not be copied due to I/O error .SH SIGNALS Any signal sent to dar_cp will abort the program immediately, there is no way to have a proper termination before the end of the process .SH SEE ALSO dar(1), dar_xform(1), dar_manager(1), dar_slave(1) .SH KNOWN BUGS None actually. .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.4.8/man/Makefile.am0000644000175000017430000000010512003275573012025 00000000000000dist_man_MANS = dar.1 dar_manager.1 dar_slave.1 dar_xform.1 dar_cp.1 dar-2.4.8/man/dar_manager.10000644000175000017430000003447512010477201012322 00000000000000.TH DAR_MANAGER 1 "March 3rd, 2012" .UC 8 .SH NAME dar_manager \- compiles several archives contents in a database to ease file restoration .SH SYNOPSIS dar_manager [-v] [-j] -C [/] dar_manager [-v] [-j] -B [/] -A [/] [-; ] [[/]] dar_manager [-v] [-j] -B [/] -l dar_manager [-v] [-j] -B [/] -D [-] dar_manager [-v] [-j] -B [/] -b dar_manager [-v] [-j] -B [/] -p dar_manager [-v] [-j] -B [/] -o [list of options to pass to dar] dar_manager [-v] [-j] -B [/] -d [] dar_manager [-v] [-j] -B [/] [-N] [-k] [-w ] [-e ""] -r [list of files to restore] dar_manager [-v] [-j] -B [/] -u dar_manager [-v] [-j] -B [/] -f file dar_manager [-v] [-j] -B [/] -s dar_manager [-v] [-j] -B [/] -m dar_manager [-v] [-j] -B [/] -c dar_manager [-v] [-j] -B [/] -i dar_manager [-v] [-j] -B [/] -@ { | "-" } dar_manager -h dar_manager -V .SH DESCRIPTION .B dar_manager is part of the Disk Archive suite. Its purpose is to simplify the restoration of a set of few files present in many backup, full or differential. This is achieved by gathering the catalogue of each archive (this has to be done once). At any time you just have to give the relative path to the files you want to restore, dar_manager will call dar with the proper options and restore the last version of each file (or the last version before given date). Note that dar_manager is to be used when you have remove some files by accident some time ago and wish to recover them. It thus not adapted to restore the state a directory tree had at a given time, in particular when some files have to be removed. For that you must use dar directly with the corresponding archive to the date for which you wish to restore the state. .PP you can restore any file by hand without .B dar_manager , but if you make a lot of differential backup, you may spend many time to find the archive that contains the last version of your file, as dar will not save it if it has not changed since previous backup. .B dar_manager simplify the process by looking in its internal database, built from archive "catalogues". .SH OPTIONS .TP 20 -C, --create [/] creates an empty database that will collect information about several archives. The is a filename that is required for -B option. To destroy a just remove the file. .TP 20 -B, --base [/] specify the database to read or modify. The file must exist, and have a database structure (see -C option). .TP 20 -i, --interactive use a keyboard interactive text menu to do operations on the given database. So you may avoid reading the other options described in this manual page, if you wish, and just use the interactive option. You will however always have to create an empty database (-C option) and restore files manually (-r option). .TP 20 -A, --add [/] [ [/]] add an archive to the database. An isolated catalogue can also be used only if it has been produced by dar version 1.2.0 or above. Why ? Because, an isolated catalogue produced by older version will always tell that no files are saved in the archive of reference, in that case the solution is to provide the archive itself as argument. An optional second argument is the basename of the archive if it is different from the first argument (need for extraction of files). For example you could have an isolated catalogue in first argument and the basename of the original archive (where is stored the data) as second argument. By default, .TP 20 -;, --min-digits the slice number zeroed padding to use to get the slices filename (for more details see dar man page at this same option) .B dar_manager will look for an archive of reference in the command line used to create each archive, but in some cases, it may be necessary to specify the archive name (for example if you've changed its name). .TP 20 -l, --list displays the information about the archives compiled in the database. In particular, a number is given to each archive, which is required to some other option to design a particular archive within the database. Nothing avoids you to feed the database with several archive of the same basename ! You will just have to guess which one is asked under this name. :-) .TP 20 -D, --delete [-] removes an archive (or a range of archive) from the database. The number of the archive (or the min and max number or the archive range) is correspond to those given by the -l option. Note that all archive number greater than the one(s) to be delete will be decremented to keep continuous numbering of the archive inside the database. If a single number is given (not a range), it may be also a negative number, by which it means counting from the end. For example, -1 means the last archive of the base, -2 the penultimate, etc. .TP 20 -b, --base this option allows you to rename the archive basename (used when restoring files from it). Here too, the number may be also a negative number. .TP 20 -p, --path this option allows you to change the location of a given archive (used when restoring files from it). Here too, a negative number is allowed. .TP 20 -o, --options [list of option to pass to dar] Specify the option to use when calling dar. Each call erases the previous setting. Possible .B dar options are all the available ones except "-x" and simple arguments (the [list of path]) which will be added by .B dar_manager itself. .TP 20 -d, --dar [] Set the path to dar. If no argument is given, dar is expected to be located in the PATH .TP 20 -r, --restore [list of files or directories to restore] .B dar_manager will restore all (an only) the given files or directories, in their latest recorded status, or before the date give thanks to the -e option. If a directory is given all subfiles and subdirectories are restored recursively in it. You can filter out some files from this recursion thanks to dar usual filtering option (see dar man page) you can provide beside -r using the -e option (see below). Dar_manager lead dar to remove any file, if a file is stored as having been removed at date requested for restoration, it is simply not restored. Thus if you restore in an empty directory you will get all the files and directories you provided to dar_manager in the state they have at the date you asked. File that did not existed at that time will not be restored. However you can restore over an existing installation, dar will then warn you before overwriting files (see -w and -n options for dar) but will still not remove files that were recorded removed from a previous archive of reference. Note that files listed after -r option, must never have an absolute path. They will be restored under the directory specified with -R option of dar (passed to dar using -o or -e options), or by default, under the current directory. .TP 20 -w, --when alters the -r option behavior: still restores the files in the most recent version available but only before the given date (versions of more recent dates are ignored). The must respect the following format [ [ [year/]month/]day-]hour:minute[:second]. For example "22:10" for 10 PM past 10 or the current day, "7-22:10" for 10 PM past 10 the 7th of the current month, "3/07-22:10" for the 7th of march at 22:10 of the current year, "2002/03/31-14:00:00" the date of the first dar's release ;-). The given date must be in the past, of course, and is compared to the "last modification" date of the saved files and not to the date at which archives have been done. Thus if a file has been changed long ago but saved in a recent (full) archive, it will be elected for restoration even for dates older than the creation of the archive. In the other way, a file saved long time ago with a mtime that was set to a date in the future will not be elected for restoration when giving the date at which was done the archive. .TP 20 -e, --extra pass some more options to dar. While the -o options takes all that follows on the command line as argument to pass to dar and write these in the database, the -e option does not alter the database and has only one argument. In other words, if you need to pass several options to dar through the use of the -e option, you need to use quotes (simple quotes ' or double quotes ") to enclose these options. Example: .P .RS .RS dar_manager -B database.dmd -e "-w -v -p -b -r -H 1" -r some/files .RE .P while using -o option you must not use quotes: .P .RS dar_manager -B database.dmd -o -w -v -p -b -r -H 1 .RE .RE .TP 20 -u, --used list the files that the given archive owns as last version available. Thus when no file is listed, the given archive is no more useful in database, and can be removed safely (-D option). If is zero, all available file are listed, the status provided for each file present in the database is the most recent status. A negative number is allowed for this option (see -D option for details). .TP 20 -f, --file displays in which archive the given file is saved, and what are the modification date (mtime) and change date (ctime). .TP 20 -s, --stats show the number of most recent files by archive. This helps to determine which archive can be safely removed from the database. .TP 20 -m, --move changes the order of archives in the database. The first number is the number of the archive to move, while the second is the place where it must be shifted to. Archive order is important: An old archive must have a smaller index than a recent archive. If you add archive to a database in the order they have been created all should be fine. Else if a file has a more recent version in an archive which index is smaller, a warning will be issued (unless -ai option is used). This can occur if by mistake you added an archive to the database in the wrong order (old archive added after a recent one), in that case simply using the -m option will let you fix this mistake. If instead the problem is relative to a single file (or a small set of file), you should wonder why this file has its modification date altered in a way that it pretends to be older than its really is. Checking for the signs of a rootkit may be a good idea. .TP 20 -c, --check check the database consistency, in particular the date ordering is verified and warning are issued for each file having more recent version located in an archive with a smaller index inside the database. -ai option makes -c option useless. .TP 20 -N, --ignore-options-in-base Do not use the options stored in database when calling dar for restoration. This option is only useful while restoring files from dar_manager, either directly (-r option) or using a batch file (-@ option, see below). .TP 20 -k, --ignore-when-removed By default, dar_manager does not ask dar to restore file that have been removed at the requested date (or in the latest state available). This is useful for example to restore a directory in the state it has at a given date (only files that existed at that time are restored). However when you want to restore a file that has been destroyed by accident, you need to use -k option so you don't have to determine at which date that file existed to be be able to ask dar_manager to restore that file in the state it had before that date. In other words, -k option gives a behavior of dar_manager backward compatible with dar_manager released beside version 2.3.x of dar. .TP 20 -ai, --alter=ignore-order avoid dar_manager to issue a warning for each file not following a chronological order of modification date when the archive number in the database is growing. .TP 20 -@, --batch allows you to do several operations on a given database. All operations are defined in the provided and refer to the same database as defined by the -B switch on command line. This batch file, must thus not contain neither -B, -C, -i, -j or -ai option (-j and -ai are global to the batch operation). The batch file expected layout is one command per line, thus several arguments (like -l -v for example) may take place on a given line of the file (-v can be found both on command line for verbose output about the batch operation steps, as well as inside the batch file for verbose output of a particular batched command). Arguments are separated by spaces or tabs, other characters are passed as-is. In consequence, you should only need to use quotes (using " or ') if you intend to use an argument containing space. Last, comments may be placed on any line beginning by a hash character (#). .TP 20 -Q Do not display any message on stderr when not launched from a terminal (for example when launched from an at job or crontab). Remains that any question to the user will be assumed a 'no' answer, which most of the time will abort the program. .TP 20 -j, --jog when virtual memory is exhausted, as user to make room before trying to continue. By default, when memory is exhausted dar aborts. .TP 20 -v, --verbose displays additional information about what it is doing. .TP 20 -h, --help display help usage .TP 20 -V, --version display software version .SH EXIT CODES .B dar_manager exits with the following code: .TP 10 0 Operation successful. .TP 10 1 see dar manual page for signification .TP 10 2 see dar manual page for signification .TP 10 3 see dar manual page for signification .TP 10 5 see dar manual page for signification .TP 10 7 see dar manual page for signification .TP 10 8 see dar manual page for signification .TP 10 11 and above .B dar called from dar_manager has exited with non zero status. Subtract 10 to this exit code to get dar's exit code. .SH SIGNALS dar_manager acts like dar (see dar man page for list of signals), upon certain signal reception dar aborts cleanly .SH SEE ALSO dar(1), dar_xform(1), dar_slave(1), dar_cp(1) .SH LIMITATIONS at most 65534 archives can be compiled in a given database, which should be enough for most users. Dar_manager does not support encrypted archives for now and archive cannot neither be encrypted. See the FAQ for a workaround. .SH KNOWN BUGS none actually .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.4.8/config.guess0000755000175000000000000012763711344453613011543 00000000000000#! /bin/sh # Attempt to guess a canonical system name. # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, # 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 # Free Software Foundation, Inc. timestamp='2009-12-30' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street - Fifth Floor, Boston, MA # 02110-1301, USA. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Originally written by Per Bothner. Please send patches (context # diff format) to and include a ChangeLog # entry. # # This script attempts to guess a canonical system name similar to # config.sub. If it succeeds, it prints the system name on stdout, and # exits with 0. Otherwise, it exits with 1. # # You can get the latest version of this script from: # http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess;hb=HEAD me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] Output the configuration name of the system \`$me' is run on. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.guess ($timestamp) Originally written by Per Bothner. Copyright (C) 1992, 1993, 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" >&2 exit 1 ;; * ) break ;; esac done if test $# != 0; then echo "$me: too many arguments$help" >&2 exit 1 fi trap 'exit 1' 1 2 15 # CC_FOR_BUILD -- compiler used by this script. Note that the use of a # compiler to aid in system detection is discouraged as it requires # temporary files to be created and, as you can see below, it is a # headache to deal with in a portable fashion. # Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still # use `HOST_CC' if defined, but it is deprecated. # Portable tmp directory creation inspired by the Autoconf team. set_cc_for_build=' trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; : ${TMPDIR=/tmp} ; { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; dummy=$tmp/dummy ; tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; case $CC_FOR_BUILD,$HOST_CC,$CC in ,,) echo "int x;" > $dummy.c ; for c in cc gcc c89 c99 ; do if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then CC_FOR_BUILD="$c"; break ; fi ; done ; if test x"$CC_FOR_BUILD" = x ; then CC_FOR_BUILD=no_compiler_found ; fi ;; ,,*) CC_FOR_BUILD=$CC ;; ,*,*) CC_FOR_BUILD=$HOST_CC ;; esac ; set_cc_for_build= ;' # This is needed to find uname on a Pyramid OSx when run in the BSD universe. # (ghazi@noc.rutgers.edu 1994-08-24) if (test -f /.attbin/uname) >/dev/null 2>&1 ; then PATH=$PATH:/.attbin ; export PATH fi UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown # Note: order is significant - the case branches are not exclusive. case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in *:NetBSD:*:*) # NetBSD (nbsd) targets should (where applicable) match one or # more of the tupples: *-*-netbsdelf*, *-*-netbsdaout*, # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently # switched to ELF, *-*-netbsd* would select the old # object file format. This provides both forward # compatibility and a consistent mechanism for selecting the # object file format. # # Note: NetBSD doesn't particularly care about the vendor # portion of the name. We always set it to "unknown". sysctl="sysctl -n hw.machine_arch" UNAME_MACHINE_ARCH=`(/sbin/$sysctl 2>/dev/null || \ /usr/sbin/$sysctl 2>/dev/null || echo unknown)` case "${UNAME_MACHINE_ARCH}" in armeb) machine=armeb-unknown ;; arm*) machine=arm-unknown ;; sh3el) machine=shl-unknown ;; sh3eb) machine=sh-unknown ;; sh5el) machine=sh5le-unknown ;; *) machine=${UNAME_MACHINE_ARCH}-unknown ;; esac # The Operating System including object format, if it has switched # to ELF recently, or will in the future. case "${UNAME_MACHINE_ARCH}" in arm*|i386|m68k|ns32k|sh3*|sparc|vax) eval $set_cc_for_build if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ELF__ then # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). # Return netbsd for either. FIX? os=netbsd else os=netbsdelf fi ;; *) os=netbsd ;; esac # The OS release # Debian GNU/NetBSD machines have a different userland, and # thus, need a distinct triplet. However, they do not need # kernel version information, so it can be replaced with a # suitable tag, in the style of linux-gnu. case "${UNAME_VERSION}" in Debian*) release='-gnu' ;; *) release=`echo ${UNAME_RELEASE}|sed -e 's/[-_].*/\./'` ;; esac # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: # contains redundant information, the shorter form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. echo "${machine}-${os}${release}" exit ;; *:OpenBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} exit ;; *:ekkoBSD:*:*) echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} exit ;; *:SolidBSD:*:*) echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} exit ;; macppc:MirBSD:*:*) echo powerpc-unknown-mirbsd${UNAME_RELEASE} exit ;; *:MirBSD:*:*) echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} exit ;; alpha:OSF1:*:*) case $UNAME_RELEASE in *4.0) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` ;; *5.*) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` ;; esac # According to Compaq, /usr/sbin/psrinfo has been available on # OSF/1 and Tru64 systems produced since 1995. I hope that # covers most systems running today. This code pipes the CPU # types through head -n 1, so we only detect the type of CPU 0. ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` case "$ALPHA_CPU_TYPE" in "EV4 (21064)") UNAME_MACHINE="alpha" ;; "EV4.5 (21064)") UNAME_MACHINE="alpha" ;; "LCA4 (21066/21068)") UNAME_MACHINE="alpha" ;; "EV5 (21164)") UNAME_MACHINE="alphaev5" ;; "EV5.6 (21164A)") UNAME_MACHINE="alphaev56" ;; "EV5.6 (21164PC)") UNAME_MACHINE="alphapca56" ;; "EV5.7 (21164PC)") UNAME_MACHINE="alphapca57" ;; "EV6 (21264)") UNAME_MACHINE="alphaev6" ;; "EV6.7 (21264A)") UNAME_MACHINE="alphaev67" ;; "EV6.8CB (21264C)") UNAME_MACHINE="alphaev68" ;; "EV6.8AL (21264B)") UNAME_MACHINE="alphaev68" ;; "EV6.8CX (21264D)") UNAME_MACHINE="alphaev68" ;; "EV6.9A (21264/EV69A)") UNAME_MACHINE="alphaev69" ;; "EV7 (21364)") UNAME_MACHINE="alphaev7" ;; "EV7.9 (21364A)") UNAME_MACHINE="alphaev79" ;; esac # A Pn.n version is a patched version. # A Vn.n version is a released version. # A Tn.n version is a released field test version. # A Xn.n version is an unreleased experimental baselevel. # 1.2 uses "1.2" for uname -r. echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` exit ;; Alpha\ *:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # Should we change UNAME_MACHINE based on the output of uname instead # of the specific Alpha model? echo alpha-pc-interix exit ;; 21064:Windows_NT:50:3) echo alpha-dec-winnt3.5 exit ;; Amiga*:UNIX_System_V:4.0:*) echo m68k-unknown-sysv4 exit ;; *:[Aa]miga[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-amigaos exit ;; *:[Mm]orph[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-morphos exit ;; *:OS/390:*:*) echo i370-ibm-openedition exit ;; *:z/VM:*:*) echo s390-ibm-zvmoe exit ;; *:OS400:*:*) echo powerpc-ibm-os400 exit ;; arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) echo arm-acorn-riscix${UNAME_RELEASE} exit ;; arm:riscos:*:*|arm:RISCOS:*:*) echo arm-unknown-riscos exit ;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) echo hppa1.1-hitachi-hiuxmpp exit ;; Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. if test "`(/bin/universe) 2>/dev/null`" = att ; then echo pyramid-pyramid-sysv3 else echo pyramid-pyramid-bsd fi exit ;; NILE*:*:*:dcosx) echo pyramid-pyramid-svr4 exit ;; DRS?6000:unix:4.0:6*) echo sparc-icl-nx6 exit ;; DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) case `/usr/bin/uname -p` in sparc) echo sparc-icl-nx7; exit ;; esac ;; s390x:SunOS:*:*) echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4H:SunOS:5.*:*) echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) echo i386-pc-auroraux${UNAME_RELEASE} exit ;; i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) eval $set_cc_for_build SUN_ARCH="i386" # If there is a compiler, see if it is configured for 64-bit objects. # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. # This test works for both compilers. if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ grep IS_64BIT_ARCH >/dev/null then SUN_ARCH="x86_64" fi fi echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:6*:*) # According to config.sub, this is the proper way to canonicalize # SunOS6. Hard to guess exactly what SunOS6 will be like, but # it's likely to be more like Solaris than SunOS4. echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:*:*) case "`/usr/bin/arch -k`" in Series*|S4*) UNAME_RELEASE=`uname -v` ;; esac # Japanese Language versions have a version number like `4.1.3-JL'. echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` exit ;; sun3*:SunOS:*:*) echo m68k-sun-sunos${UNAME_RELEASE} exit ;; sun*:*:4.2BSD:*) UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` test "x${UNAME_RELEASE}" = "x" && UNAME_RELEASE=3 case "`/bin/arch`" in sun3) echo m68k-sun-sunos${UNAME_RELEASE} ;; sun4) echo sparc-sun-sunos${UNAME_RELEASE} ;; esac exit ;; aushp:SunOS:*:*) echo sparc-auspex-sunos${UNAME_RELEASE} exit ;; # The situation for MiNT is a little confusing. The machine name # can be virtually everything (everything which is not # "atarist" or "atariste" at least should have a processor # > m68000). The system name ranges from "MiNT" over "FreeMiNT" # to the lowercase version "mint" (or "freemint"). Finally # the system name "TOS" denotes a system which is actually not # MiNT. But MiNT is downward compatible to TOS, so this should # be no problem. atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) echo m68k-milan-mint${UNAME_RELEASE} exit ;; hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) echo m68k-hades-mint${UNAME_RELEASE} exit ;; *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) echo m68k-unknown-mint${UNAME_RELEASE} exit ;; m68k:machten:*:*) echo m68k-apple-machten${UNAME_RELEASE} exit ;; powerpc:machten:*:*) echo powerpc-apple-machten${UNAME_RELEASE} exit ;; RISC*:Mach:*:*) echo mips-dec-mach_bsd4.3 exit ;; RISC*:ULTRIX:*:*) echo mips-dec-ultrix${UNAME_RELEASE} exit ;; VAX*:ULTRIX*:*:*) echo vax-dec-ultrix${UNAME_RELEASE} exit ;; 2020:CLIX:*:* | 2430:CLIX:*:*) echo clipper-intergraph-clix${UNAME_RELEASE} exit ;; mips:*:*:UMIPS | mips:*:*:RISCos) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #ifdef __cplusplus #include /* for printf() prototype */ int main (int argc, char *argv[]) { #else int main (argc, argv) int argc; char *argv[]; { #endif #if defined (host_mips) && defined (MIPSEB) #if defined (SYSTYPE_SYSV) printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_SVR4) printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); #endif #endif exit (-1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && SYSTEM_NAME=`$dummy $dummyarg` && { echo "$SYSTEM_NAME"; exit; } echo mips-mips-riscos${UNAME_RELEASE} exit ;; Motorola:PowerMAX_OS:*:*) echo powerpc-motorola-powermax exit ;; Motorola:*:4.3:PL8-*) echo powerpc-harris-powermax exit ;; Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) echo powerpc-harris-powermax exit ;; Night_Hawk:Power_UNIX:*:*) echo powerpc-harris-powerunix exit ;; m88k:CX/UX:7*:*) echo m88k-harris-cxux7 exit ;; m88k:*:4*:R4*) echo m88k-motorola-sysv4 exit ;; m88k:*:3*:R3*) echo m88k-motorola-sysv3 exit ;; AViiON:dgux:*:*) # DG/UX returns AViiON for all architectures UNAME_PROCESSOR=`/usr/bin/uname -p` if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] then if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ [ ${TARGET_BINARY_INTERFACE}x = x ] then echo m88k-dg-dgux${UNAME_RELEASE} else echo m88k-dg-dguxbcs${UNAME_RELEASE} fi else echo i586-dg-dgux${UNAME_RELEASE} fi exit ;; M88*:DolphinOS:*:*) # DolphinOS (SVR3) echo m88k-dolphin-sysv3 exit ;; M88*:*:R3*:*) # Delta 88k system running SVR3 echo m88k-motorola-sysv3 exit ;; XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) echo m88k-tektronix-sysv3 exit ;; Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) echo m68k-tektronix-bsd exit ;; *:IRIX*:*:*) echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` exit ;; ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' i*86:AIX:*:*) echo i386-ibm-aix exit ;; ia64:AIX:*:*) if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} exit ;; *:AIX:2:3) if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include main() { if (!__power_pc()) exit(1); puts("powerpc-ibm-aix3.2.5"); exit(0); } EOF if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` then echo "$SYSTEM_NAME" else echo rs6000-ibm-aix3.2.5 fi elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then echo rs6000-ibm-aix3.2.4 else echo rs6000-ibm-aix3.2 fi exit ;; *:AIX:*:[456]) IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then IBM_ARCH=rs6000 else IBM_ARCH=powerpc fi if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${IBM_ARCH}-ibm-aix${IBM_REV} exit ;; *:AIX:*:*) echo rs6000-ibm-aix exit ;; ibmrt:4.4BSD:*|romp-ibm:BSD:*) echo romp-ibm-bsd4.4 exit ;; ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to exit ;; # report: romp-ibm BSD 4.3 *:BOSX:*:*) echo rs6000-bull-bosx exit ;; DPX/2?00:B.O.S.:*:*) echo m68k-bull-sysv3 exit ;; 9000/[34]??:4.3bsd:1.*:*) echo m68k-hp-bsd exit ;; hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) echo m68k-hp-bsd4.4 exit ;; 9000/[34678]??:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` case "${UNAME_MACHINE}" in 9000/31? ) HP_ARCH=m68000 ;; 9000/[34]?? ) HP_ARCH=m68k ;; 9000/[678][0-9][0-9]) if [ -x /usr/bin/getconf ]; then sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` case "${sc_cpu_version}" in 523) HP_ARCH="hppa1.0" ;; # CPU_PA_RISC1_0 528) HP_ARCH="hppa1.1" ;; # CPU_PA_RISC1_1 532) # CPU_PA_RISC2_0 case "${sc_kernel_bits}" in 32) HP_ARCH="hppa2.0n" ;; 64) HP_ARCH="hppa2.0w" ;; '') HP_ARCH="hppa2.0" ;; # HP-UX 10.20 esac ;; esac fi if [ "${HP_ARCH}" = "" ]; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #define _HPUX_SOURCE #include #include int main () { #if defined(_SC_KERNEL_BITS) long bits = sysconf(_SC_KERNEL_BITS); #endif long cpu = sysconf (_SC_CPU_VERSION); switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0"); break; case CPU_PA_RISC1_1: puts ("hppa1.1"); break; case CPU_PA_RISC2_0: #if defined(_SC_KERNEL_BITS) switch (bits) { case 64: puts ("hppa2.0w"); break; case 32: puts ("hppa2.0n"); break; default: puts ("hppa2.0"); break; } break; #else /* !defined(_SC_KERNEL_BITS) */ puts ("hppa2.0"); break; #endif default: puts ("hppa1.0"); break; } exit (0); } EOF (CCOPTS= $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` test -z "$HP_ARCH" && HP_ARCH=hppa fi ;; esac if [ ${HP_ARCH} = "hppa2.0w" ] then eval $set_cc_for_build # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler # generating 64-bit code. GNU and HP use different nomenclature: # # $ CC_FOR_BUILD=cc ./config.guess # => hppa2.0w-hp-hpux11.23 # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess # => hppa64-hp-hpux11.23 if echo __LP64__ | (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | grep -q __LP64__ then HP_ARCH="hppa2.0w" else HP_ARCH="hppa64" fi fi echo ${HP_ARCH}-hp-hpux${HPUX_REV} exit ;; ia64:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` echo ia64-hp-hpux${HPUX_REV} exit ;; 3050*:HI-UX:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include int main () { long cpu = sysconf (_SC_CPU_VERSION); /* The order matters, because CPU_IS_HP_MC68K erroneously returns true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct results, however. */ if (CPU_IS_PA_RISC (cpu)) { switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; default: puts ("hppa-hitachi-hiuxwe2"); break; } } else if (CPU_IS_HP_MC68K (cpu)) puts ("m68k-hitachi-hiuxwe2"); else puts ("unknown-hitachi-hiuxwe2"); exit (0); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && { echo "$SYSTEM_NAME"; exit; } echo unknown-hitachi-hiuxwe2 exit ;; 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) echo hppa1.1-hp-bsd exit ;; 9000/8??:4.3bsd:*:*) echo hppa1.0-hp-bsd exit ;; *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) echo hppa1.0-hp-mpeix exit ;; hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) echo hppa1.1-hp-osf exit ;; hp8??:OSF1:*:*) echo hppa1.0-hp-osf exit ;; i*86:OSF1:*:*) if [ -x /usr/sbin/sysversion ] ; then echo ${UNAME_MACHINE}-unknown-osf1mk else echo ${UNAME_MACHINE}-unknown-osf1 fi exit ;; parisc*:Lites*:*:*) echo hppa1.1-hp-lites exit ;; C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) echo c1-convex-bsd exit ;; C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit ;; C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) echo c34-convex-bsd exit ;; C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) echo c38-convex-bsd exit ;; C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) echo c4-convex-bsd exit ;; CRAY*Y-MP:*:*:*) echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*[A-Z]90:*:*:*) echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ -e 's/\.[^.]*$/.X/' exit ;; CRAY*TS:*:*:*) echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*T3E:*:*:*) echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*SV1:*:*:*) echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; *:UNICOS/mp:*:*) echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) FUJITSU_PROC=`uname -m | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz'` FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; 5000:UNIX_System_V:4.*:*) FUJITSU_SYS=`uname -p | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | tr 'ABCDEFGHIJKLMNOPQRSTUVWXYZ' 'abcdefghijklmnopqrstuvwxyz' | sed -e 's/ /_/'` echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} exit ;; sparc*:BSD/OS:*:*) echo sparc-unknown-bsdi${UNAME_RELEASE} exit ;; *:BSD/OS:*:*) echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} exit ;; *:FreeBSD:*:*) case ${UNAME_MACHINE} in pc98) echo i386-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; amd64) echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; *) echo ${UNAME_MACHINE}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; esac exit ;; i*:CYGWIN*:*) echo ${UNAME_MACHINE}-pc-cygwin exit ;; *:MINGW*:*) echo ${UNAME_MACHINE}-pc-mingw32 exit ;; i*:windows32*:*) # uname -m includes "-pc" on this system. echo ${UNAME_MACHINE}-mingw32 exit ;; i*:PW*:*) echo ${UNAME_MACHINE}-pc-pw32 exit ;; *:Interix*:*) case ${UNAME_MACHINE} in x86) echo i586-pc-interix${UNAME_RELEASE} exit ;; authenticamd | genuineintel | EM64T) echo x86_64-unknown-interix${UNAME_RELEASE} exit ;; IA64) echo ia64-unknown-interix${UNAME_RELEASE} exit ;; esac ;; [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) echo i${UNAME_MACHINE}-pc-mks exit ;; 8664:Windows_NT:*) echo x86_64-pc-mks exit ;; i*:Windows_NT*:* | Pentium*:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we # UNAME_MACHINE based on the output of uname instead of i386? echo i586-pc-interix exit ;; i*:UWIN*:*) echo ${UNAME_MACHINE}-pc-uwin exit ;; amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) echo x86_64-unknown-cygwin exit ;; p*:CYGWIN*:*) echo powerpcle-unknown-cygwin exit ;; prep*:SunOS:5.*:*) echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; *:GNU:*:*) # the GNU system echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-gnu`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` exit ;; *:GNU/*:*:*) # other systems with GNU libc and userland echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr '[A-Z]' '[a-z]'``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-gnu exit ;; i*86:Minix:*:*) echo ${UNAME_MACHINE}-pc-minix exit ;; alpha:Linux:*:*) case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in EV5) UNAME_MACHINE=alphaev5 ;; EV56) UNAME_MACHINE=alphaev56 ;; PCA56) UNAME_MACHINE=alphapca56 ;; PCA57) UNAME_MACHINE=alphapca56 ;; EV6) UNAME_MACHINE=alphaev6 ;; EV67) UNAME_MACHINE=alphaev67 ;; EV68*) UNAME_MACHINE=alphaev68 ;; esac objdump --private-headers /bin/sh | grep -q ld.so.1 if test "$?" = 0 ; then LIBC="libc1" ; else LIBC="" ; fi echo ${UNAME_MACHINE}-unknown-linux-gnu${LIBC} exit ;; arm*:Linux:*:*) eval $set_cc_for_build if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_EABI__ then echo ${UNAME_MACHINE}-unknown-linux-gnu else echo ${UNAME_MACHINE}-unknown-linux-gnueabi fi exit ;; avr32*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; cris:Linux:*:*) echo cris-axis-linux-gnu exit ;; crisv32:Linux:*:*) echo crisv32-axis-linux-gnu exit ;; frv:Linux:*:*) echo frv-unknown-linux-gnu exit ;; i*86:Linux:*:*) LIBC=gnu eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #ifdef __dietlibc__ LIBC=dietlibc #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC'` echo "${UNAME_MACHINE}-pc-linux-${LIBC}" exit ;; ia64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; m32r*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; m68*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; mips:Linux:*:* | mips64:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU #undef ${UNAME_MACHINE} #undef ${UNAME_MACHINE}el #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) CPU=${UNAME_MACHINE}el #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) CPU=${UNAME_MACHINE} #else CPU= #endif #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` test x"${CPU}" != x && { echo "${CPU}-unknown-linux-gnu"; exit; } ;; or32:Linux:*:*) echo or32-unknown-linux-gnu exit ;; padre:Linux:*:*) echo sparc-unknown-linux-gnu exit ;; parisc64:Linux:*:* | hppa64:Linux:*:*) echo hppa64-unknown-linux-gnu exit ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in PA7*) echo hppa1.1-unknown-linux-gnu ;; PA8*) echo hppa2.0-unknown-linux-gnu ;; *) echo hppa-unknown-linux-gnu ;; esac exit ;; ppc64:Linux:*:*) echo powerpc64-unknown-linux-gnu exit ;; ppc:Linux:*:*) echo powerpc-unknown-linux-gnu exit ;; s390:Linux:*:* | s390x:Linux:*:*) echo ${UNAME_MACHINE}-ibm-linux exit ;; sh64*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; sh*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; sparc:Linux:*:* | sparc64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; vax:Linux:*:*) echo ${UNAME_MACHINE}-dec-linux-gnu exit ;; x86_64:Linux:*:*) echo x86_64-unknown-linux-gnu exit ;; xtensa*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-gnu exit ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. # earlier versions are messed up and put the nodename in both # sysname and nodename. echo i386-sequent-sysv4 exit ;; i*86:UNIX_SV:4.2MP:2.*) # Unixware is an offshoot of SVR4, but it has its own version # number series starting with 2... # I am not positive that other SVR4 systems won't match this, # I just have to hope. -- rms. # Use sysv4.2uw... so that sysv4* matches it. echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} exit ;; i*86:OS/2:*:*) # If we were able to find `uname', then EMX Unix compatibility # is probably installed. echo ${UNAME_MACHINE}-pc-os2-emx exit ;; i*86:XTS-300:*:STOP) echo ${UNAME_MACHINE}-unknown-stop exit ;; i*86:atheos:*:*) echo ${UNAME_MACHINE}-unknown-atheos exit ;; i*86:syllable:*:*) echo ${UNAME_MACHINE}-pc-syllable exit ;; i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) echo i386-unknown-lynxos${UNAME_RELEASE} exit ;; i*86:*DOS:*:*) echo ${UNAME_MACHINE}-pc-msdosdjgpp exit ;; i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} else echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} fi exit ;; i*86:*:5:[678]*) # UnixWare 7.x, OpenUNIX and OpenServer 6. case `/bin/uname -X | grep "^Machine"` in *486*) UNAME_MACHINE=i486 ;; *Pentium) UNAME_MACHINE=i586 ;; *Pent*|*Celeron) UNAME_MACHINE=i686 ;; esac echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} exit ;; i*86:*:3.2:*) if test -f /usr/options/cb.name; then UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ && UNAME_MACHINE=i586 (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ && UNAME_MACHINE=i686 (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ && UNAME_MACHINE=i686 echo ${UNAME_MACHINE}-pc-sco$UNAME_REL else echo ${UNAME_MACHINE}-pc-sysv32 fi exit ;; pc:*:*:*) # Left here for compatibility: # uname -m prints for DJGPP always 'pc', but it prints nothing about # the processor, so we play safe by assuming i586. # Note: whatever this is, it MUST be the same as what config.sub # prints for the "djgpp" host, or else GDB configury will decide that # this is a cross-build. echo i586-pc-msdosdjgpp exit ;; Intel:Mach:3*:*) echo i386-pc-mach3 exit ;; paragon:*:*:*) echo i860-intel-osf1 exit ;; i860:*:4.*:*) # i860-SVR4 if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 else # Add other i860-SVR4 vendors below as they are discovered. echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 fi exit ;; mini*:CTIX:SYS*5:*) # "miniframe" echo m68010-convergent-sysv exit ;; mc68k:UNIX:SYSTEM5:3.51m) echo m68k-convergent-sysv exit ;; M680?0:D-NIX:5.3:*) echo m68k-diab-dnix exit ;; M68*:*:R3V[5678]*:*) test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) OS_REL='' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4; exit; } ;; NCR*:*:4.2:* | MPRAS*:*:4.2:*) OS_REL='.3' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) echo m68k-unknown-lynxos${UNAME_RELEASE} exit ;; mc68030:UNIX_System_V:4.*:*) echo m68k-atari-sysv4 exit ;; TSUNAMI:LynxOS:2.*:*) echo sparc-unknown-lynxos${UNAME_RELEASE} exit ;; rs6000:LynxOS:2.*:*) echo rs6000-unknown-lynxos${UNAME_RELEASE} exit ;; PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) echo powerpc-unknown-lynxos${UNAME_RELEASE} exit ;; SM[BE]S:UNIX_SV:*:*) echo mips-dde-sysv${UNAME_RELEASE} exit ;; RM*:ReliantUNIX-*:*:*) echo mips-sni-sysv4 exit ;; RM*:SINIX-*:*:*) echo mips-sni-sysv4 exit ;; *:SINIX-*:*:*) if uname -p 2>/dev/null >/dev/null ; then UNAME_MACHINE=`(uname -p) 2>/dev/null` echo ${UNAME_MACHINE}-sni-sysv4 else echo ns32k-sni-sysv fi exit ;; PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort # says echo i586-unisys-sysv4 exit ;; *:UNIX_System_V:4*:FTX*) # From Gerald Hewes . # How about differentiating between stratus architectures? -djm echo hppa1.1-stratus-sysv4 exit ;; *:*:*:FTX*) # From seanf@swdc.stratus.com. echo i860-stratus-sysv4 exit ;; i*86:VOS:*:*) # From Paul.Green@stratus.com. echo ${UNAME_MACHINE}-stratus-vos exit ;; *:VOS:*:*) # From Paul.Green@stratus.com. echo hppa1.1-stratus-vos exit ;; mc68*:A/UX:*:*) echo m68k-apple-aux${UNAME_RELEASE} exit ;; news*:NEWS-OS:6*:*) echo mips-sony-newsos6 exit ;; R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) if [ -d /usr/nec ]; then echo mips-nec-sysv${UNAME_RELEASE} else echo mips-unknown-sysv${UNAME_RELEASE} fi exit ;; BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. echo powerpc-be-beos exit ;; BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. echo powerpc-apple-beos exit ;; BePC:BeOS:*:*) # BeOS running on Intel PC compatible. echo i586-pc-beos exit ;; BePC:Haiku:*:*) # Haiku running on Intel PC compatible. echo i586-pc-haiku exit ;; SX-4:SUPER-UX:*:*) echo sx4-nec-superux${UNAME_RELEASE} exit ;; SX-5:SUPER-UX:*:*) echo sx5-nec-superux${UNAME_RELEASE} exit ;; SX-6:SUPER-UX:*:*) echo sx6-nec-superux${UNAME_RELEASE} exit ;; SX-7:SUPER-UX:*:*) echo sx7-nec-superux${UNAME_RELEASE} exit ;; SX-8:SUPER-UX:*:*) echo sx8-nec-superux${UNAME_RELEASE} exit ;; SX-8R:SUPER-UX:*:*) echo sx8r-nec-superux${UNAME_RELEASE} exit ;; Power*:Rhapsody:*:*) echo powerpc-apple-rhapsody${UNAME_RELEASE} exit ;; *:Rhapsody:*:*) echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} exit ;; *:Darwin:*:*) UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown case $UNAME_PROCESSOR in i386) eval $set_cc_for_build if [ "$CC_FOR_BUILD" != 'no_compiler_found' ]; then if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ (CCOPTS= $CC_FOR_BUILD -E - 2>/dev/null) | \ grep IS_64BIT_ARCH >/dev/null then UNAME_PROCESSOR="x86_64" fi fi ;; unknown) UNAME_PROCESSOR=powerpc ;; esac echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit ;; *:procnto*:*:* | *:QNX:[0123456789]*:*) UNAME_PROCESSOR=`uname -p` if test "$UNAME_PROCESSOR" = "x86"; then UNAME_PROCESSOR=i386 UNAME_MACHINE=pc fi echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} exit ;; *:QNX:*:4*) echo i386-pc-qnx exit ;; NSE-?:NONSTOP_KERNEL:*:*) echo nse-tandem-nsk${UNAME_RELEASE} exit ;; NSR-?:NONSTOP_KERNEL:*:*) echo nsr-tandem-nsk${UNAME_RELEASE} exit ;; *:NonStop-UX:*:*) echo mips-compaq-nonstopux exit ;; BS2000:POSIX*:*:*) echo bs2000-siemens-sysv exit ;; DS/*:UNIX_System_V:*:*) echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} exit ;; *:Plan9:*:*) # "uname -m" is not consistent, so use $cputype instead. 386 # is converted to i386 for consistency with other x86 # operating systems. if test "$cputype" = "386"; then UNAME_MACHINE=i386 else UNAME_MACHINE="$cputype" fi echo ${UNAME_MACHINE}-unknown-plan9 exit ;; *:TOPS-10:*:*) echo pdp10-unknown-tops10 exit ;; *:TENEX:*:*) echo pdp10-unknown-tenex exit ;; KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) echo pdp10-dec-tops20 exit ;; XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) echo pdp10-xkl-tops20 exit ;; *:TOPS-20:*:*) echo pdp10-unknown-tops20 exit ;; *:ITS:*:*) echo pdp10-unknown-its exit ;; SEI:*:*:SEIUX) echo mips-sei-seiux${UNAME_RELEASE} exit ;; *:DragonFly:*:*) echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` exit ;; *:*VMS:*:*) UNAME_MACHINE=`(uname -p) 2>/dev/null` case "${UNAME_MACHINE}" in A*) echo alpha-dec-vms ; exit ;; I*) echo ia64-dec-vms ; exit ;; V*) echo vax-dec-vms ; exit ;; esac ;; *:XENIX:*:SysV) echo i386-pc-xenix exit ;; i*86:skyos:*:*) echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE}` | sed -e 's/ .*$//' exit ;; i*86:rdos:*:*) echo ${UNAME_MACHINE}-pc-rdos exit ;; i*86:AROS:*:*) echo ${UNAME_MACHINE}-pc-aros exit ;; esac #echo '(No uname command or uname output not recognized.)' 1>&2 #echo "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" 1>&2 eval $set_cc_for_build cat >$dummy.c < # include #endif main () { #if defined (sony) #if defined (MIPSEB) /* BFD wants "bsd" instead of "newsos". Perhaps BFD should be changed, I don't know.... */ printf ("mips-sony-bsd\n"); exit (0); #else #include printf ("m68k-sony-newsos%s\n", #ifdef NEWSOS4 "4" #else "" #endif ); exit (0); #endif #endif #if defined (__arm) && defined (__acorn) && defined (__unix) printf ("arm-acorn-riscix\n"); exit (0); #endif #if defined (hp300) && !defined (hpux) printf ("m68k-hp-bsd\n"); exit (0); #endif #if defined (NeXT) #if !defined (__ARCHITECTURE__) #define __ARCHITECTURE__ "m68k" #endif int version; version=`(hostinfo | sed -n 's/.*NeXT Mach \([0-9]*\).*/\1/p') 2>/dev/null`; if (version < 4) printf ("%s-next-nextstep%d\n", __ARCHITECTURE__, version); else printf ("%s-next-openstep%d\n", __ARCHITECTURE__, version); exit (0); #endif #if defined (MULTIMAX) || defined (n16) #if defined (UMAXV) printf ("ns32k-encore-sysv\n"); exit (0); #else #if defined (CMU) printf ("ns32k-encore-mach\n"); exit (0); #else printf ("ns32k-encore-bsd\n"); exit (0); #endif #endif #endif #if defined (__386BSD__) printf ("i386-pc-bsd\n"); exit (0); #endif #if defined (sequent) #if defined (i386) printf ("i386-sequent-dynix\n"); exit (0); #endif #if defined (ns32000) printf ("ns32k-sequent-dynix\n"); exit (0); #endif #endif #if defined (_SEQUENT_) struct utsname un; uname(&un); if (strncmp(un.version, "V2", 2) == 0) { printf ("i386-sequent-ptx2\n"); exit (0); } if (strncmp(un.version, "V1", 2) == 0) { /* XXX is V1 correct? */ printf ("i386-sequent-ptx1\n"); exit (0); } printf ("i386-sequent-ptx\n"); exit (0); #endif #if defined (vax) # if !defined (ultrix) # include # if defined (BSD) # if BSD == 43 printf ("vax-dec-bsd4.3\n"); exit (0); # else # if BSD == 199006 printf ("vax-dec-bsd4.3reno\n"); exit (0); # else printf ("vax-dec-bsd\n"); exit (0); # endif # endif # else printf ("vax-dec-bsd\n"); exit (0); # endif # else printf ("vax-dec-ultrix\n"); exit (0); # endif #endif #if defined (alliant) && defined (i860) printf ("i860-alliant-bsd\n"); exit (0); #endif exit (1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null && SYSTEM_NAME=`$dummy` && { echo "$SYSTEM_NAME"; exit; } # Apollos put the system type in the environment. test -d /usr/apollo && { echo ${ISP}-apollo-${SYSTYPE}; exit; } # Convex versions that predate uname can use getsysinfo(1) if [ -x /usr/convex/getsysinfo ] then case `getsysinfo -f cpu_type` in c1*) echo c1-convex-bsd exit ;; c2*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit ;; c34*) echo c34-convex-bsd exit ;; c38*) echo c38-convex-bsd exit ;; c4*) echo c4-convex-bsd exit ;; esac fi cat >&2 < in order to provide the needed information to handle your system. config.guess timestamp = $timestamp uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` /bin/uname -X = `(/bin/uname -X) 2>/dev/null` hostinfo = `(hostinfo) 2>/dev/null` /bin/universe = `(/bin/universe) 2>/dev/null` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` /bin/arch = `(/bin/arch) 2>/dev/null` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` UNAME_MACHINE = ${UNAME_MACHINE} UNAME_RELEASE = ${UNAME_RELEASE} UNAME_SYSTEM = ${UNAME_SYSTEM} UNAME_VERSION = ${UNAME_VERSION} EOF exit 1 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: dar-2.4.8/configure0000755000175000017520000271557412023053151011142 00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.67 for DAR 2.4.8. # # Report bugs to . # # # Copyright (C) 1992, 1993, 1994, 1995, 1996, 1998, 1999, 2000, 2001, # 2002, 2003, 2004, 2005, 2006, 2007, 2008, 2009, 2010 Free Software # Foundation, Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 test \$(( 1 + 1 )) = 2 || exit 1" if (eval "$as_required") 2>/dev/null; then : as_have_required=yes else as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : break 2 fi fi done;; esac as_found=false done $as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes fi; } IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV export CONFIG_SHELL exec "$CONFIG_SHELL" "$as_myself" ${1+"$@"} fi if test x$as_have_required = xno; then : $as_echo "$0: This script requires a shell more modern than all" $as_echo "$0: the shells that I found on your system." if test x${ZSH_VERSION+set} = xset ; then $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" $as_echo "$0: be upgraded to zsh 4.3.4 or later." else $as_echo "$0: Please tell bug-autoconf@gnu.org and $0: [http://sourceforge.net/tracker/?group_id=65612&atid=511612] $0: about your system, including any error possibly output $0: before this message. Then install a modern shell, or $0: manually run the script under such a shell if you do $0: have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -p'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -p' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi if test -x / >/dev/null 2>&1; then as_test_x='test -x' else if ls -dL / >/dev/null 2>&1; then as_ls_L_option=L else as_ls_L_option= fi as_test_x=' eval sh -c '\'' if test -d "$1"; then test -d "$1/."; else case $1 in #( -*)set "./$1";; esac; case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( ???[sx]*):;;*)false;;esac;fi '\'' sh ' fi as_executable_p=$as_test_x # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" # Check that we are running under the correct shell. SHELL=${CONFIG_SHELL-/bin/sh} case X$lt_ECHO in X*--fallback-echo) # Remove one level of quotation (which was required for Make). ECHO=`echo "$lt_ECHO" | sed 's,\\\\\$\\$0,'$0','` ;; esac ECHO=${lt_ECHO-echo} if test "X$1" = X--no-reexec; then # Discard the --no-reexec flag, and continue. shift elif test "X$1" = X--fallback-echo; then # Avoid inline document here, it may be left over : elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' ; then # Yippee, $ECHO works! : else # Restart under the correct shell. exec $SHELL "$0" --no-reexec ${1+"$@"} fi if test "X$1" = X--fallback-echo; then # used as fallback echo shift cat <<_LT_EOF $* _LT_EOF exit 0 fi # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH if test -z "$lt_ECHO"; then if test "X${echo_test_string+set}" != Xset; then # find a string as large as possible, as long as the shell can cope with it for cmd in 'sed 50q "$0"' 'sed 20q "$0"' 'sed 10q "$0"' 'sed 2q "$0"' 'echo test'; do # expected sizes: less than 2Kb, 1Kb, 512 bytes, 16 bytes, ... if { echo_test_string=`eval $cmd`; } 2>/dev/null && { test "X$echo_test_string" = "X$echo_test_string"; } 2>/dev/null then break fi done fi if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then : else # The Solaris, AIX, and Digital Unix default echo programs unquote # backslashes. This makes it impossible to quote backslashes using # echo "$something" | sed 's/\\/\\\\/g' # # So, first we look for a working echo in the user's PATH. lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for dir in $PATH /usr/ucb; do IFS="$lt_save_ifs" if (test -f $dir/echo || test -f $dir/echo$ac_exeext) && test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' && echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then ECHO="$dir/echo" break fi done IFS="$lt_save_ifs" if test "X$ECHO" = Xecho; then # We didn't find a better echo, so look for alternatives. if test "X`{ print -r '\t'; } 2>/dev/null`" = 'X\t' && echo_testing_string=`{ print -r "$echo_test_string"; } 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then # This shell has a builtin print -r that does the trick. ECHO='print -r' elif { test -f /bin/ksh || test -f /bin/ksh$ac_exeext; } && test "X$CONFIG_SHELL" != X/bin/ksh; then # If we have ksh, try running configure again with it. ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} export ORIGINAL_CONFIG_SHELL CONFIG_SHELL=/bin/ksh export CONFIG_SHELL exec $CONFIG_SHELL "$0" --no-reexec ${1+"$@"} else # Try using printf. ECHO='printf %s\n' if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then # Cool, printf works : elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` && test "X$echo_testing_string" = 'X\t' && echo_testing_string=`($ORIGINAL_CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL export CONFIG_SHELL SHELL="$CONFIG_SHELL" export SHELL ECHO="$CONFIG_SHELL $0 --fallback-echo" elif echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo '\t') 2>/dev/null` && test "X$echo_testing_string" = 'X\t' && echo_testing_string=`($CONFIG_SHELL "$0" --fallback-echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then ECHO="$CONFIG_SHELL $0 --fallback-echo" else # maybe with a smaller string... prev=: for cmd in 'echo test' 'sed 2q "$0"' 'sed 10q "$0"' 'sed 20q "$0"' 'sed 50q "$0"'; do if { test "X$echo_test_string" = "X`eval $cmd`"; } 2>/dev/null then break fi prev="$cmd" done if test "$prev" != 'sed 50q "$0"'; then echo_test_string=`eval $prev` export echo_test_string exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "$0" ${1+"$@"} else # Oops. We lost completely, so just stick with echo. ECHO=echo fi fi fi fi fi fi # Copy echo and quote the copy suitably for passing to libtool from # the Makefile, instead of quoting the original, which is used later. lt_ECHO=$ECHO if test "X$lt_ECHO" = "X$CONFIG_SHELL $0 --fallback-echo"; then lt_ECHO="$CONFIG_SHELL \\\$\$0 --fallback-echo" fi test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='DAR' PACKAGE_TARNAME='dar' PACKAGE_VERSION='2.4.8' PACKAGE_STRING='DAR 2.4.8' PACKAGE_BUGREPORT='http://sourceforge.net/tracker/?group_id=65612&atid=511612' PACKAGE_URL='' ac_unique_file="src/libdar/catalogue.cpp" gt_needs= # Factoring default headers for most tests. ac_includes_default="\ #include #ifdef HAVE_SYS_TYPES_H # include #endif #ifdef HAVE_SYS_STAT_H # include #endif #ifdef STDC_HEADERS # include # include #else # ifdef HAVE_STDLIB_H # include # endif #endif #ifdef HAVE_STRING_H # if !defined STDC_HEADERS && defined HAVE_MEMORY_H # include # endif # include #endif #ifdef HAVE_STRINGS_H # include #endif #ifdef HAVE_INTTYPES_H # include #endif #ifdef HAVE_STDINT_H # include #endif #ifdef HAVE_UNISTD_H # include #endif" ac_header_list= ac_default_prefix=/usr/local ac_subst_vars='am__EXEEXT_FALSE am__EXEEXT_TRUE LTLIBOBJS DOXYGEN_PROG UPX_PROG PROFILING_FALSE PROFILING_TRUE USE_GROFF_FALSE USE_GROFF_TRUE USE_DOXYGEN_FALSE USE_DOXYGEN_TRUE USE_UPX_FALSE USE_UPX_TRUE BUILD_MODE64_FALSE BUILD_MODE64_TRUE BUILD_MODE32_FALSE BUILD_MODE32_TRUE DEBUG_STATIC_FALSE DEBUG_STATIC_TRUE BUILD_DAR_STATIC_FALSE BUILD_DAR_STATIC_TRUE MAKE_USAGE_FALSE MAKE_USAGE_TRUE MAKE_ALL_DIR_FALSE MAKE_ALL_DIR_TRUE upx groff tmp doxygen LIBOBJS CPP OTOOL64 OTOOL LIPO NMEDIT DSYMUTIL lt_ECHO RANLIB AR OBJDUMP LN_S NM ac_ct_DUMPBIN DUMPBIN LD FGREP SED LIBTOOL POSUB LTLIBINTL LIBINTL INTLLIBS LTLIBICONV LIBICONV INTL_MACOSX_LIBS EGREP GREP CXXCPP am__fastdepCXX_FALSE am__fastdepCXX_TRUE CXXDEPMODE ac_ct_CXX CXXFLAGS CXX host_os host_vendor host_cpu host build_os build_vendor build_cpu build am__fastdepCC_FALSE am__fastdepCC_TRUE CCDEPMODE AMDEPBACKSLASH AMDEP_FALSE AMDEP_TRUE am__quote am__include DEPDIR OBJEXT EXEEXT ac_ct_CC CPPFLAGS LDFLAGS CFLAGS CC XGETTEXT_EXTRA_OPTIONS MSGMERGE XGETTEXT_015 XGETTEXT GMSGFMT_015 MSGFMT_015 GMSGFMT MSGFMT GETTEXT_MACRO_VERSION USE_NLS am__untar am__tar AMTAR am__leading_dot SET_MAKE AWK mkdir_p MKDIR_P INSTALL_STRIP_PROGRAM STRIP install_sh MAKEINFO AUTOHEADER AUTOMAKE AUTOCONF ACLOCAL VERSION PACKAGE CYGPATH_W am__isrc INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking enable_nls enable_dependency_tracking with_gnu_ld enable_rpath with_libiconv_prefix with_libintl_prefix enable_shared enable_static with_pic enable_fast_install enable_libtool_lock enable_libdl_linking enable_largefile enable_libz_linking enable_libbz2_linking enable_liblzo2_linking enable_libgcrypt_linking enable_ea_support enable_nodump_flag enable_gnugetopt enable_examples enable_os_bits enable_mode enable_furtive_read enable_debug enable_pedantic enable_build_usage enable_build_html enable_special_alloc enable_upx enable_fast_dir enable_thread_safe enable_execinfo enable_profiling enable_debug_memory enable_dar_static ' ac_precious_vars='build_alias host_alias target_alias CC CFLAGS LDFLAGS LIBS CPPFLAGS CXX CXXFLAGS CCC CXXCPP CPP' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *=) ac_optarg= ;; *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error $? "unrecognized option: \`$ac_option' Try \`$0 --help' for more information" ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : ${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option} ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error $? "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe $as_echo "$as_me: WARNING: if you wanted to set the --build type, don't use --host. If a cross compiler is detected then cross compile mode will be used" >&2 elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error $? "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error $? "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures DAR 2.4.8 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking ...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/dar] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names System types: --build=BUILD configure for building on BUILD [guessed] --host=HOST cross-compile to build programs to run on HOST [BUILD] _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of DAR 2.4.8:";; esac cat <<\_ACEOF Optional Features: --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --disable-nls do not use Native Language Support --disable-dependency-tracking speeds up one-time build --enable-dependency-tracking do not reject slow dependency extractors --disable-rpath do not hardcode runtime library paths --enable-shared[=PKGS] build shared libraries [default=yes] --enable-static[=PKGS] build static libraries [default=yes] --enable-fast-install[=PKGS] optimize for fast installation [default=yes] --disable-libtool-lock avoid locking (might break parallel builds) --disable-libdl-linking ignore any libdl and avoid linking against it --disable-largefile omit support for large files --disable-libz-linking disable linking with libz and disable libz compression support --disable-libbz2-linking disable linking with libbz2 and disables libbz2 compression support --disable-liblzo2-linking disable linking with liblzo2 and disables lzo compression support --disable-libgcrypt-linking disable linking with libgcrypt which disables strong encryption support --disable-ea-support disable Extended Attributes support --disable-nodump-flag deactivate the "ext2/3 nodump flag" feature check --disable-gnugetopt avoid linking with libgnugetopt --enable-examples buld example and testing programs --enable-os-bits=arg arg is 32 or 64. If for some reason, one wants to overcome detected system value --enable-mode=arg where arg is 32 or 64. Makes dar internally use 32 bits or 64 bits integers in place of infinint (which is the case if this option is not given) --disable-furtive-read Ignore furtive read mode availability on systems that support it --enable-debug build targets with debugging option and no optimization --enable-pedantic enable pedantic syntaxical check at compilation, use only for debugging purposes ! --enable-build-usage re-build C++ generated usage files with Chris Martin's dar-help program, non-developper should not activate that option --disable-build-html don't build programming documentation (in particular libdar API documentation) and html man page --disable-special-alloc special memory allcation scheme makes dar using less memory and run a bit faster, in particular in conjunction with --enable-mode=32 or 64, you can disable it if you like --disable-upx by default configure looks for UPX and if available make executables compressed at installation time, you can disable this feature --disable-fast-dir disable optimization for large directories, doing so has a little positive impact on memory requirement but a huge drawback on execution time --disable-thread-safe libdar is thread safe if POSIX mutex are available, you can manually disable the use of POSIX mutex, the resulting libdar library will not be thread-safe anymore unless special-alloc is disabled too --disable-execinfo disable reporting stack information on self diagnostic bugs even --enable-profiling enable executable profiling --enable-debug-memory log memory allocations and releases to /tmp/dar_debug_mem_allocation.txt this debugging option lead to a slow executable --disable-dar-static avoids building dar_static, a dar statically linked version Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-gnu-ld assume the C compiler uses GNU ld default=no --with-libiconv-prefix[=DIR] search for libiconv in DIR/include and DIR/lib --without-libiconv-prefix don't search for libiconv in includedir and libdir --with-libintl-prefix[=DIR] search for libintl in DIR/include and DIR/lib --without-libintl-prefix don't search for libintl in includedir and libdir --with-pic try to use only PIC/non-PIC objects [default=use both] --with-gnu-ld assume the C compiler uses GNU ld [default=no] Some influential environment variables: CC C compiler command CFLAGS C compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory LIBS libraries to pass to the linker, e.g. -l CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CXX C++ compiler command CXXFLAGS C++ compiler flags CXXCPP C++ preprocessor CPP C preprocessor Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. Report bugs to . _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for guested configure. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF DAR configure 2.4.8 generated by GNU Autoconf 2.67 Copyright (C) 2010 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## # ac_fn_c_try_compile LINENO # -------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_compile # ac_fn_cxx_try_compile LINENO # ---------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_cxx_try_compile # ac_fn_cxx_try_cpp LINENO # ------------------------ # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_cxx_try_cpp # ac_fn_cxx_try_link LINENO # ------------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || $as_test_x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_cxx_try_link # ac_fn_cxx_try_run LINENO # ------------------------ # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_cxx_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_cxx_try_run # ac_fn_c_try_link LINENO # ----------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || $as_test_x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_link # ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_c_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_c_check_header_compile # ac_fn_c_try_cpp LINENO # ---------------------- # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_cpp # ac_fn_c_try_run LINENO # ---------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_c_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} as_fn_set_status $ac_retval } # ac_fn_c_try_run # ac_fn_c_check_func LINENO FUNC VAR # ---------------------------------- # Tests whether FUNC exists, setting the cache variable VAR accordingly ac_fn_c_check_func () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main () { return $2 (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_c_check_func # ac_fn_cxx_check_header_mongrel LINENO HEADER VAR INCLUDES # --------------------------------------------------------- # Tests whether HEADER exists, giving a warning if it cannot be compiled using # the include files in INCLUDES and setting the cache variable VAR # accordingly. ac_fn_cxx_check_header_mongrel () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if eval "test \"\${$3+set}\"" = set; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } else # Is the header compilable? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 $as_echo_n "checking $2 usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_header_compiler=yes else ac_header_compiler=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 $as_echo "$ac_header_compiler" >&6; } # Is the header present? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 $as_echo_n "checking $2 presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include <$2> _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : ac_header_preproc=yes else ac_header_preproc=no fi rm -f conftest.err conftest.i conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 $as_echo "$ac_header_preproc" >&6; } # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in #(( yes:no: ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 $as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ;; no:yes:* ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 $as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 $as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 $as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 $as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ( $as_echo "## ------------------------------------------------------------------------- ## ## Report this to http://sourceforge.net/tracker/?group_id=65612&atid=511612 ## ## ------------------------------------------------------------------------- ##" ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 else eval "$3=\$ac_header_compiler" fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_cxx_check_header_mongrel # ac_fn_cxx_check_type LINENO TYPE VAR INCLUDES # --------------------------------------------- # Tests whether TYPE exists after having included INCLUDES, setting cache # variable VAR accordingly. ac_fn_cxx_check_type () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 else eval "$3=no" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof ($2)) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof (($2))) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else eval "$3=yes" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_cxx_check_type # ac_fn_cxx_check_member LINENO AGGR MEMBER VAR INCLUDES # ------------------------------------------------------ # Tries to find if the field MEMBER exists in type AGGR, after including # INCLUDES, setting cache variable VAR accordingly. ac_fn_cxx_check_member () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2.$3" >&5 $as_echo_n "checking for $2.$3... " >&6; } if eval "test \"\${$4+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $5 int main () { static $2 ac_aggr; if (ac_aggr.$3) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$4=yes" else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $5 int main () { static $2 ac_aggr; if (sizeof ac_aggr.$3) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$4=yes" else eval "$4=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$4 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_cxx_check_member # ac_fn_cxx_check_decl LINENO SYMBOL VAR INCLUDES # ----------------------------------------------- # Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR # accordingly. ac_fn_cxx_check_decl () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack as_decl_name=`echo $2|sed 's/ *(.*//'` as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 $as_echo_n "checking whether $as_decl_name is declared... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { #ifndef $as_decl_name #ifdef __cplusplus (void) $as_decl_use; #else (void) $as_decl_name; #endif #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_cxx_check_decl # ac_fn_cxx_compute_int LINENO EXPR VAR INCLUDES # ---------------------------------------------- # Tries to find the compile-time value of EXPR in a program that includes # INCLUDES, setting VAR accordingly. Returns whether the value could be # computed ac_fn_cxx_compute_int () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if test "$cross_compiling" = yes; then # Depending upon the size, compute the lo and hi bounds. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) >= 0)]; test_array [0] = 0 ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_lo=0 ac_mid=0 while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_hi=$ac_mid; break else as_fn_arith $ac_mid + 1 && ac_lo=$as_val if test $ac_lo -le $ac_mid; then ac_lo= ac_hi= break fi as_fn_arith 2 '*' $ac_mid + 1 && ac_mid=$as_val fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) < 0)]; test_array [0] = 0 ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_hi=-1 ac_mid=-1 while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) >= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_lo=$ac_mid; break else as_fn_arith '(' $ac_mid ')' - 1 && ac_hi=$as_val if test $ac_mid -le $ac_hi; then ac_lo= ac_hi= break fi as_fn_arith 2 '*' $ac_mid && ac_mid=$as_val fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done else ac_lo= ac_hi= fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext # Binary search between lo and hi bounds. while test "x$ac_lo" != "x$ac_hi"; do as_fn_arith '(' $ac_hi - $ac_lo ')' / 2 + $ac_lo && ac_mid=$as_val cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; test_array [0] = 0 ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_hi=$ac_mid else as_fn_arith '(' $ac_mid ')' + 1 && ac_lo=$as_val fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done case $ac_lo in #(( ?*) eval "$3=\$ac_lo"; ac_retval=0 ;; '') ac_retval=1 ;; esac else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 static long int longval () { return $2; } static unsigned long int ulongval () { return $2; } #include #include int main () { FILE *f = fopen ("conftest.val", "w"); if (! f) return 1; if (($2) < 0) { long int i = longval (); if (i != ($2)) return 1; fprintf (f, "%ld", i); } else { unsigned long int i = ulongval (); if (i != ($2)) return 1; fprintf (f, "%lu", i); } /* Do not output a trailing newline, as this causes \r\n confusion on some platforms. */ return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : echo >>conftest.val; read $3 &5 $as_echo_n "checking for $2... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main () { return $2 (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_cxx_check_func # ac_fn_cxx_check_header_compile LINENO HEADER VAR INCLUDES # --------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_cxx_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval "test \"\${$3+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; test "x$as_lineno_stack" = x && { as_lineno=; unset as_lineno;} } # ac_fn_cxx_check_header_compile cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by DAR $as_me 2.4.8, which was generated by GNU Autoconf 2.67. Invocation command line was $ $0 $@ _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. $as_echo "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo $as_echo "## ---------------- ## ## Cache variables. ## ## ---------------- ##" echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo $as_echo "## ----------------- ## ## Output variables. ## ## ----------------- ##" echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then $as_echo "## ------------------- ## ## File substitutions. ## ## ------------------- ##" echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then $as_echo "## ----------- ## ## confdefs.h. ## ## ----------- ##" echo cat confdefs.h echo fi test "$ac_signal" != 0 && $as_echo "$as_me: caught signal $ac_signal" $as_echo "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h $as_echo "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_URL "$PACKAGE_URL" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. ac_site_file1=NONE ac_site_file2=NONE if test -n "$CONFIG_SITE"; then # We do not want a PATH search for config.site. case $CONFIG_SITE in #(( -*) ac_site_file1=./$CONFIG_SITE;; */*) ac_site_file1=$CONFIG_SITE;; *) ac_site_file1=./$CONFIG_SITE;; esac elif test "x$prefix" != xNONE; then ac_site_file1=$prefix/share/config.site ac_site_file2=$prefix/etc/config.site else ac_site_file1=$ac_default_prefix/share/config.site ac_site_file2=$ac_default_prefix/etc/config.site fi for ac_site_file in "$ac_site_file1" "$ac_site_file2" do test "x$ac_site_file" = xNONE && continue if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 $as_echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" \ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "failed to load site script $ac_site_file See \`config.log' for more details" "$LINENO" 5 ; } fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 $as_echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 $as_echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi gt_needs="$gt_needs " as_fn_append ac_header_list " utime.h" # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 $as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 $as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 $as_echo "$as_me: former value: \`$ac_old_val'" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 $as_echo "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_config_headers="$ac_config_headers config.h" ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat >>confdefs.h <<_ACEOF #define DAR_VERSION "2.4.8" _ACEOF am__api_version='1.11' ac_aux_dir= for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do if test -f "$ac_dir/install-sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f "$ac_dir/install.sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f "$ac_dir/shtool"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 fi # These three variables are undocumented and unsupported, # and are intended to be withdrawn in a future Autoconf release. # They can cause serious problems if a builder's source tree is in a directory # whose full name contains unusual characters. ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. # Reject install programs that cannot install multiple files. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 $as_echo_n "checking for a BSD-compatible install... " >&6; } if test -z "$INSTALL"; then if test "${ac_cv_path_install+set}" = set; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in #(( ./ | .// | /[cC]/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; }; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else rm -rf conftest.one conftest.two conftest.dir echo one > conftest.one echo two > conftest.two mkdir conftest.dir if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && test -s conftest.one && test -s conftest.two && test -s conftest.dir/conftest.one && test -s conftest.dir/conftest.two then ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi fi done done ;; esac done IFS=$as_save_IFS rm -rf conftest.one conftest.two conftest.dir fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. Don't cache a # value for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. INSTALL=$ac_install_sh fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 $as_echo "$INSTALL" >&6; } # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 $as_echo_n "checking whether build environment is sane... " >&6; } # Just in case sleep 1 echo timestamp > conftest.file # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[\\\"\#\$\&\'\`$am_lf]*) as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5 ;; esac case $srcdir in *[\\\"\#\$\&\'\`$am_lf\ \ ]*) as_fn_error $? "unsafe srcdir value: \`$srcdir'" "$LINENO" 5 ;; esac # Do `set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi rm -f conftest.file if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". as_fn_error $? "ls -t appears to fail. Make sure there is not a broken alias in your environment" "$LINENO" 5 fi test "$2" = conftest.file ) then # Ok. : else as_fn_error $? "newly created file is older than distributed files! Check your system clock" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } test "$program_prefix" != NONE && program_transform_name="s&^&$program_prefix&;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s&\$&$program_suffix&;$program_transform_name" # Double any \ or $. # By default was `s,x,x', remove it if useless. ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --run true"; then am_missing_run="$MISSING --run " else am_missing_run= { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`missing' script is too old or missing" >&5 $as_echo "$as_me: WARNING: \`missing' script is too old or missing" >&2;} fi if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi # Installed binaries are usually stripped using `strip' when the user # run `make install-strip'. However `strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the `STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_STRIP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 $as_echo_n "checking for a thread-safe mkdir -p... " >&6; } if test -z "$MKDIR_P"; then if test "${ac_cv_path_mkdir+set}" = set; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do { test -f "$as_dir/$ac_prog$ac_exec_ext" && $as_test_x "$as_dir/$ac_prog$ac_exec_ext"; } || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ 'mkdir (fileutils) '4.1*) ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext break 3;; esac done done done IFS=$as_save_IFS fi test -d ./--version && rmdir ./--version if test "${ac_cv_path_mkdir+set}" = set; then MKDIR_P="$ac_cv_path_mkdir -p" else # As a last resort, use the slow shell script. Don't cache a # value for MKDIR_P within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. MKDIR_P="$ac_install_sh -d" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 $as_echo "$MKDIR_P" >&6; } mkdir_p="$MKDIR_P" case $mkdir_p in [\\/$]* | ?:[\\/]*) ;; */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; esac for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_AWK+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 $as_echo "$AWK" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AWK" && break done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval "test \"\${ac_cv_prog_make_${ac_make}_set+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." am__isrc=' -I$(srcdir)' # test to see if srcdir already configured if test -f $srcdir/config.status; then as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE='dar' VERSION='2.4.8' cat >>confdefs.h <<_ACEOF #define PACKAGE "$PACKAGE" _ACEOF cat >>confdefs.h <<_ACEOF #define VERSION "$VERSION" _ACEOF # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} # We need awk for the "check" target. The system "awk" is bad on # some platforms. # Always define AMTAR for backward compatibility. AMTAR=${AMTAR-"${am_missing_run}tar"} am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether NLS is requested" >&5 $as_echo_n "checking whether NLS is requested... " >&6; } # Check whether --enable-nls was given. if test "${enable_nls+set}" = set; then : enableval=$enable_nls; USE_NLS=$enableval else USE_NLS=yes fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_NLS" >&5 $as_echo "$USE_NLS" >&6; } GETTEXT_MACRO_VERSION=0.18 # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "msgfmt", so it can be a program name with args. set dummy msgfmt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_path_MSGFMT+set}" = set; then : $as_echo_n "(cached) " >&6 else case "$MSGFMT" in [\\/]* | ?:[\\/]*) ac_cv_path_MSGFMT="$MSGFMT" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --statistics /dev/null >&5 2>&1 && (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi); then ac_cv_path_MSGFMT="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_MSGFMT" && ac_cv_path_MSGFMT=":" ;; esac fi MSGFMT="$ac_cv_path_MSGFMT" if test "$MSGFMT" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MSGFMT" >&5 $as_echo "$MSGFMT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi # Extract the first word of "gmsgfmt", so it can be a program name with args. set dummy gmsgfmt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_path_GMSGFMT+set}" = set; then : $as_echo_n "(cached) " >&6 else case $GMSGFMT in [\\/]* | ?:[\\/]*) ac_cv_path_GMSGFMT="$GMSGFMT" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_path_GMSGFMT="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_GMSGFMT" && ac_cv_path_GMSGFMT="$MSGFMT" ;; esac fi GMSGFMT=$ac_cv_path_GMSGFMT if test -n "$GMSGFMT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GMSGFMT" >&5 $as_echo "$GMSGFMT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi case `$MSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) MSGFMT_015=: ;; *) MSGFMT_015=$MSGFMT ;; esac case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;; *) GMSGFMT_015=$GMSGFMT ;; esac # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "xgettext", so it can be a program name with args. set dummy xgettext; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_path_XGETTEXT+set}" = set; then : $as_echo_n "(cached) " >&6 else case "$XGETTEXT" in [\\/]* | ?:[\\/]*) ac_cv_path_XGETTEXT="$XGETTEXT" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&5 2>&1 && (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi); then ac_cv_path_XGETTEXT="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_XGETTEXT" && ac_cv_path_XGETTEXT=":" ;; esac fi XGETTEXT="$ac_cv_path_XGETTEXT" if test "$XGETTEXT" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $XGETTEXT" >&5 $as_echo "$XGETTEXT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f messages.po case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;; *) XGETTEXT_015=$XGETTEXT ;; esac # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "msgmerge", so it can be a program name with args. set dummy msgmerge; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_path_MSGMERGE+set}" = set; then : $as_echo_n "(cached) " >&6 else case "$MSGMERGE" in [\\/]* | ?:[\\/]*) ac_cv_path_MSGMERGE="$MSGMERGE" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --update -q /dev/null /dev/null >&5 2>&1; then ac_cv_path_MSGMERGE="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_MSGMERGE" && ac_cv_path_MSGMERGE=":" ;; esac fi MSGMERGE="$ac_cv_path_MSGMERGE" if test "$MSGMERGE" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MSGMERGE" >&5 $as_echo "$MSGMERGE" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$localedir" || localedir='${datadir}/locale' test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS= ac_config_commands="$ac_config_commands po-directories" if test "X$prefix" = "XNONE"; then acl_final_prefix="$ac_default_prefix" else acl_final_prefix="$prefix" fi if test "X$exec_prefix" = "XNONE"; then acl_final_exec_prefix='${prefix}' else acl_final_exec_prefix="$exec_prefix" fi acl_save_prefix="$prefix" prefix="$acl_final_prefix" eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" prefix="$acl_save_prefix" DEPDIR="${am__leading_dot}deps" ac_config_commands="$ac_config_commands depfiles" am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 $as_echo_n "checking for style of include used by $am_make... " >&6; } am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from `make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 $as_echo "$_am_result" >&6; } rm -f confinc confmf # Check whether --enable-dependency-tracking was given. if test "${enable_dependency_tracking+set}" = set; then : enableval=$enable_dependency_tracking; fi if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' fi if test "x$enable_dependency_tracking" != xno; then AMDEP_TRUE= AMDEP_FALSE='#' else AMDEP_TRUE='#' AMDEP_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "no acceptable C compiler found in \$PATH See \`config.log' for more details" "$LINENO" 5 ; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 $as_echo_n "checking whether the C compiler works... " >&6; } ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` # The possible output files: ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" ac_rmfiles= for ac_file in $ac_files do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; * ) ac_rmfiles="$ac_rmfiles $ac_file";; esac done rm -f $ac_rmfiles if { { ac_try="$ac_link_default" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link_default") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. # So ignore a value of `no', otherwise this would lead to `EXEEXT = no' # in a Makefile. We should not override ac_cv_exeext if it was cached, # so that the user can short-circuit this test for compilers unknown to # Autoconf. for ac_file in $ac_files '' do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; then :; else ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` fi # We set ac_cv_exeext here because the later test for it is not # safe: cross compilers may not add the suffix if given an `-o' # argument, so we may need to know it at that point already. # Even if this section looks crufty: it has the advantage of # actually working. break;; * ) break;; esac done test "$ac_cv_exeext" = no && ac_cv_exeext= else ac_file='' fi if test -z "$ac_file"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "C compiler cannot create executables See \`config.log' for more details" "$LINENO" 5 ; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 $as_echo_n "checking for C compiler default output file name... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 $as_echo "$ac_file" >&6; } ac_exeext=$ac_cv_exeext rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 $as_echo_n "checking for suffix of executables... " >&6; } if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` break;; * ) break;; esac done else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of executables: cannot compile and link See \`config.log' for more details" "$LINENO" 5 ; } fi rm -f conftest conftest$ac_cv_exeext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 $as_echo "$ac_cv_exeext" >&6; } rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { FILE *f = fopen ("conftest.out", "w"); return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF ac_clean_files="$ac_clean_files conftest.out" # Check that the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 $as_echo_n "checking whether we are cross compiling... " >&6; } if test "$cross_compiling" != yes; then { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if { ac_try='./conftest$ac_cv_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details" "$LINENO" 5 ; } fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 $as_echo "$cross_compiling" >&6; } rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 $as_echo_n "checking for suffix of object files... " >&6; } if test "${ac_cv_objext+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : for ac_file in conftest.o conftest.obj conftest.*; do test -f "$ac_file" || continue; case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of object files: cannot compile See \`config.log' for more details" "$LINENO" 5 ; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 $as_echo "$ac_cv_objext" >&6; } OBJEXT=$ac_cv_objext ac_objext=$OBJEXT { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if test "${ac_cv_c_compiler_gnu+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if test "${ac_cv_prog_cc_g+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if test "${ac_cv_prog_cc_c89+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if test "${am_cv_CC_dependencies_compiler_type+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi # Make sure we can run config.sub. $SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 $as_echo_n "checking build system type... " >&6; } if test "${ac_cv_build+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_build_alias=$build_alias test "x$ac_build_alias" = x && ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` test "x$ac_build_alias" = x && as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 $as_echo "$ac_cv_build" >&6; } case $ac_cv_build in *-*-*) ;; *) as_fn_error $? "invalid value of canonical build" "$LINENO" 5 ;; esac build=$ac_cv_build ac_save_IFS=$IFS; IFS='-' set x $ac_cv_build shift build_cpu=$1 build_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: build_os=$* IFS=$ac_save_IFS case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 $as_echo_n "checking host system type... " >&6; } if test "${ac_cv_host+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "x$host_alias" = x; then ac_cv_host=$ac_cv_build else ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 $as_echo "$ac_cv_host" >&6; } case $ac_cv_host in *-*-*) ;; *) as_fn_error $? "invalid value of canonical host" "$LINENO" 5 ;; esac host=$ac_cv_host ac_save_IFS=$IFS; IFS='-' set x $ac_cv_host shift host_cpu=$1 host_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: host_os=$* IFS=$ac_save_IFS case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then echo "#! /bin/sh" >conf$$.sh echo "exit 0" >>conf$$.sh chmod +x conf$$.sh if (PATH="/nonexistent;."; conf$$.sh) >/dev/null 2>&1; then PATH_SEPARATOR=';' else PATH_SEPARATOR=: fi rm -f conf$$.sh fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by GCC" >&5 $as_echo_n "checking for ld used by GCC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | [A-Za-z]:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the path of ld ac_prog=`echo $ac_prog| sed 's%\\\\%/%g'` while echo $ac_prog | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if test "${acl_cv_path_LD+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then IFS="${IFS= }"; ac_save_ifs="$IFS"; IFS="${IFS}${PATH_SEPARATOR-:}" for ac_dir in $PATH; do test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then acl_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some GNU ld's only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$acl_cv_path_LD" -v 2>&1 < /dev/null` in *GNU* | *'with BFD'*) test "$with_gnu_ld" != no && break ;; *) test "$with_gnu_ld" != yes && break ;; esac fi done IFS="$ac_save_ifs" else acl_cv_path_LD="$LD" # Let the user override the test with a path. fi fi LD="$acl_cv_path_LD" if test -n "$LD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LD" >&5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if test "${acl_cv_prog_gnu_ld+set}" = set; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU ld's only accept -v. case `$LD -v 2>&1 &5 $as_echo "$acl_cv_prog_gnu_ld" >&6; } with_gnu_ld=$acl_cv_prog_gnu_ld { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shared library run path origin" >&5 $as_echo_n "checking for shared library run path origin... " >&6; } if test "${acl_cv_rpath+set}" = set; then : $as_echo_n "(cached) " >&6 else CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh . ./conftest.sh rm -f ./conftest.sh acl_cv_rpath=done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $acl_cv_rpath" >&5 $as_echo "$acl_cv_rpath" >&6; } wl="$acl_cv_wl" acl_libext="$acl_cv_libext" acl_shlibext="$acl_cv_shlibext" acl_libname_spec="$acl_cv_libname_spec" acl_library_names_spec="$acl_cv_library_names_spec" acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" acl_hardcode_direct="$acl_cv_hardcode_direct" acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" # Check whether --enable-rpath was given. if test "${enable_rpath+set}" = set; then : enableval=$enable_rpath; : else enable_rpath=yes fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if test "${ac_cv_cxx_compiler_gnu+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if test "${ac_cv_prog_cxx_g+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if test "${am_cv_CXX_dependencies_compiler_type+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 $as_echo_n "checking how to run the C++ preprocessor... " >&6; } if test -z "$CXXCPP"; then if test "${ac_cv_prog_CXXCPP+set}" = set; then : $as_echo_n "(cached) " >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" do ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CXXCPP=$CXXCPP fi CXXCPP=$ac_cv_prog_CXXCPP else ac_cv_prog_CXXCPP=$CXXCPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 $as_echo "$CXXCPP" >&6; } ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5 ; } fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 $as_echo_n "checking for grep that handles long lines and -e... " >&6; } if test "${ac_cv_path_GREP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$GREP"; then ac_path_GREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" { test -f "$ac_path_GREP" && $as_test_x "$ac_path_GREP"; } || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in *GNU*) ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'GREP' >> "conftest.nl" "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_GREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_GREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_GREP"; then as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_GREP=$GREP fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 $as_echo "$ac_cv_path_GREP" >&6; } GREP="$ac_cv_path_GREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 $as_echo_n "checking for egrep... " >&6; } if test "${ac_cv_path_EGREP+set}" = set; then : $as_echo_n "(cached) " >&6 else if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 then ac_cv_path_EGREP="$GREP -E" else if test -z "$EGREP"; then ac_path_EGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" { test -f "$ac_path_EGREP" && $as_test_x "$ac_path_EGREP"; } || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in *GNU*) ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'EGREP' >> "conftest.nl" "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_EGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_EGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_EGREP"; then as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_EGREP=$EGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 $as_echo "$ac_cv_path_EGREP" >&6; } EGREP="$ac_cv_path_EGREP" acl_libdirstem=lib acl_libdirstem2= case "$host_os" in solaris*) { $as_echo "$as_me:${as_lineno-$LINENO}: checking for 64-bit host" >&5 $as_echo_n "checking for 64-bit host... " >&6; } if test "${gl_cv_solaris_64bit+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef _LP64 sixtyfour bits #endif _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "sixtyfour bits" >/dev/null 2>&1; then : gl_cv_solaris_64bit=yes else gl_cv_solaris_64bit=no fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gl_cv_solaris_64bit" >&5 $as_echo "$gl_cv_solaris_64bit" >&6; } if test $gl_cv_solaris_64bit = yes; then acl_libdirstem=lib/64 case "$host_cpu" in sparc*) acl_libdirstem2=lib/sparcv9 ;; i*86 | x86_64) acl_libdirstem2=lib/amd64 ;; esac fi ;; *) searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` if test -n "$searchpath"; then acl_save_IFS="${IFS= }"; IFS=":" for searchdir in $searchpath; do if test -d "$searchdir"; then case "$searchdir" in */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; */../ | */.. ) # Better ignore directories of this form. They are misleading. ;; *) searchdir=`cd "$searchdir" && pwd` case "$searchdir" in */lib64 ) acl_libdirstem=lib64 ;; esac ;; esac fi done IFS="$acl_save_IFS" fi ;; esac test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem" use_additional=yes acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" # Check whether --with-libiconv-prefix was given. if test "${with_libiconv_prefix+set}" = set; then : withval=$with_libiconv_prefix; if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" if test "$acl_libdirstem2" != "$acl_libdirstem" \ && ! test -d "$withval/$acl_libdirstem"; then additional_libdir="$withval/$acl_libdirstem2" fi fi fi fi LIBICONV= LTLIBICONV= INCICONV= LIBICONV_PREFIX= HAVE_LIBICONV= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='iconv ' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIBICONV="${LIBICONV}${LIBICONV:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }$value" else : fi else found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then if test "$enable_rpath" = no \ || test "X$found_dir" = "X/usr/$acl_libdirstem" \ || test "X$found_dir" = "X/usr/$acl_libdirstem2"; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi if test "$acl_hardcode_direct" = yes; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else haveit= for x in $LDFLAGS $LIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIBICONV="${LIBICONV}${LIBICONV:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else LIBICONV="${LIBICONV}${LIBICONV:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_a" else LIBICONV="${LIBICONV}${LIBICONV:+ }-L$found_dir -l$name" fi fi additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` if test "$name" = 'iconv'; then LIBICONV_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; */$acl_libdirstem2 | */$acl_libdirstem2/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'` if test "$name" = 'iconv'; then LIBICONV_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INCICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then INCICONV="${INCICONV}${INCICONV:+ }-I$additional_includedir" fi fi fi fi fi if test -n "$found_la"; then save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` if test "X$additional_libdir" != "X/usr/$acl_libdirstem" \ && test "X$additional_libdir" != "X/usr/$acl_libdirstem2"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem" \ || test "X$additional_libdir" = "X/usr/local/$acl_libdirstem2"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LIBICONV="${LIBICONV}${LIBICONV:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) LIBICONV="${LIBICONV}${LIBICONV:+ }$dep" LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }$dep" ;; esac done fi else LIBICONV="${LIBICONV}${LIBICONV:+ }-l$name" LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBICONV="${LIBICONV}${LIBICONV:+ }$flag" else for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBICONV="${LIBICONV}${LIBICONV:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then for found_dir in $ltrpathdirs; do LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-R$found_dir" done fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CFPreferencesCopyAppValue" >&5 $as_echo_n "checking for CFPreferencesCopyAppValue... " >&6; } if test "${gt_cv_func_CFPreferencesCopyAppValue+set}" = set; then : $as_echo_n "(cached) " >&6 else gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { CFPreferencesCopyAppValue(NULL, NULL) ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : gt_cv_func_CFPreferencesCopyAppValue=yes else gt_cv_func_CFPreferencesCopyAppValue=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$gt_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_cv_func_CFPreferencesCopyAppValue" >&5 $as_echo "$gt_cv_func_CFPreferencesCopyAppValue" >&6; } if test $gt_cv_func_CFPreferencesCopyAppValue = yes; then $as_echo "#define HAVE_CFPREFERENCESCOPYAPPVALUE 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CFLocaleCopyCurrent" >&5 $as_echo_n "checking for CFLocaleCopyCurrent... " >&6; } if test "${gt_cv_func_CFLocaleCopyCurrent+set}" = set; then : $as_echo_n "(cached) " >&6 else gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { CFLocaleCopyCurrent(); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : gt_cv_func_CFLocaleCopyCurrent=yes else gt_cv_func_CFLocaleCopyCurrent=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$gt_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_cv_func_CFLocaleCopyCurrent" >&5 $as_echo "$gt_cv_func_CFLocaleCopyCurrent" >&6; } if test $gt_cv_func_CFLocaleCopyCurrent = yes; then $as_echo "#define HAVE_CFLOCALECOPYCURRENT 1" >>confdefs.h fi INTL_MACOSX_LIBS= if test $gt_cv_func_CFPreferencesCopyAppValue = yes || test $gt_cv_func_CFLocaleCopyCurrent = yes; then INTL_MACOSX_LIBS="-Wl,-framework -Wl,CoreFoundation" fi LIBINTL= LTLIBINTL= POSUB= case " $gt_needs " in *" need-formatstring-macros "*) gt_api_version=3 ;; *" need-ngettext "*) gt_api_version=2 ;; *) gt_api_version=1 ;; esac gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc" gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl" if test "$USE_NLS" = "yes"; then gt_use_preinstalled_gnugettext=no if test $gt_api_version -ge 3; then gt_revision_test_code=' #ifndef __GNU_GETTEXT_SUPPORTED_REVISION #define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1) #endif typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1]; ' else gt_revision_test_code= fi if test $gt_api_version -ge 2; then gt_expression_test_code=' + * ngettext ("", "", 0)' else gt_expression_test_code= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU gettext in libc" >&5 $as_echo_n "checking for GNU gettext in libc... " >&6; } if eval "test \"\${$gt_func_gnugettext_libc+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern int *_nl_domain_bindings; int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_domain_bindings ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : eval "$gt_func_gnugettext_libc=yes" else eval "$gt_func_gnugettext_libc=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$gt_func_gnugettext_libc { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then am_save_CPPFLAGS="$CPPFLAGS" for element in $INCICONV; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv" >&5 $as_echo_n "checking for iconv... " >&6; } if test "${am_cv_func_iconv+set}" = set; then : $as_echo_n "(cached) " >&6 else am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_lib_iconv=yes am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$am_save_LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv" >&5 $as_echo "$am_cv_func_iconv" >&6; } if test "$am_cv_func_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working iconv" >&5 $as_echo_n "checking for working iconv... " >&6; } if test "${am_cv_func_iconv_works+set}" = set; then : $as_echo_n "(cached) " >&6 else am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi if test "$cross_compiling" = yes; then : case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static const char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } /* Test against Solaris 10 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646"); if (cd_ascii_to_88591 != (iconv_t)(-1)) { static const char input[] = "\263"; char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_ascii_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static const char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) return 1; } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) return 1; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : am_cv_func_iconv_works=yes else am_cv_func_iconv_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi LIBS="$am_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv_works" >&5 $as_echo "$am_cv_func_iconv_works" >&6; } case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then $as_echo "#define HAVE_ICONV 1" >>confdefs.h fi if test "$am_cv_lib_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libiconv" >&5 $as_echo_n "checking how to link with libiconv... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBICONV" >&5 $as_echo "$LIBICONV" >&6; } else CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi use_additional=yes acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" # Check whether --with-libintl-prefix was given. if test "${with_libintl_prefix+set}" = set; then : withval=$with_libintl_prefix; if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" if test "$acl_libdirstem2" != "$acl_libdirstem" \ && ! test -d "$withval/$acl_libdirstem"; then additional_libdir="$withval/$acl_libdirstem2" fi fi fi fi LIBINTL= LTLIBINTL= INCINTL= LIBINTL_PREFIX= HAVE_LIBINTL= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='intl ' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./-|ABCDEFGHIJKLMNOPQRSTUVWXYZ___|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIBINTL="${LIBINTL}${LIBINTL:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }$value" else : fi else found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then if test "$enable_rpath" = no \ || test "X$found_dir" = "X/usr/$acl_libdirstem" \ || test "X$found_dir" = "X/usr/$acl_libdirstem2"; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi if test "$acl_hardcode_direct" = yes; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else haveit= for x in $LDFLAGS $LIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIBINTL="${LIBINTL}${LIBINTL:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else LIBINTL="${LIBINTL}${LIBINTL:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_a" else LIBINTL="${LIBINTL}${LIBINTL:+ }-L$found_dir -l$name" fi fi additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` if test "$name" = 'intl'; then LIBINTL_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; */$acl_libdirstem2 | */$acl_libdirstem2/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'` if test "$name" = 'intl'; then LIBINTL_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INCINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then INCINTL="${INCINTL}${INCINTL:+ }-I$additional_includedir" fi fi fi fi fi if test -n "$found_la"; then save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` if test "X$additional_libdir" != "X/usr/$acl_libdirstem" \ && test "X$additional_libdir" != "X/usr/$acl_libdirstem2"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem" \ || test "X$additional_libdir" = "X/usr/local/$acl_libdirstem2"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LIBINTL="${LIBINTL}${LIBINTL:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) LIBINTL="${LIBINTL}${LIBINTL:+ }$dep" LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }$dep" ;; esac done fi else LIBINTL="${LIBINTL}${LIBINTL:+ }-l$name" LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBINTL="${LIBINTL}${LIBINTL:+ }$flag" else for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBINTL="${LIBINTL}${LIBINTL:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then for found_dir in $ltrpathdirs; do LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-R$found_dir" done fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU gettext in libintl" >&5 $as_echo_n "checking for GNU gettext in libintl... " >&6; } if eval "test \"\${$gt_func_gnugettext_libintl+set}\"" = set; then : $as_echo_n "(cached) " >&6 else gt_save_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $INCINTL" gt_save_LIBS="$LIBS" LIBS="$LIBS $LIBINTL" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("") ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : eval "$gt_func_gnugettext_libintl=yes" else eval "$gt_func_gnugettext_libintl=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include $gt_revision_test_code extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + _nl_msg_cat_cntr + *_nl_expand_alias ("") ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : LIBINTL="$LIBINTL $LIBICONV" LTLIBINTL="$LTLIBINTL $LTLIBICONV" eval "$gt_func_gnugettext_libintl=yes" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi CPPFLAGS="$gt_save_CPPFLAGS" LIBS="$gt_save_LIBS" fi eval ac_res=\$$gt_func_gnugettext_libintl { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \ || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \ && test "$PACKAGE" != gettext-runtime \ && test "$PACKAGE" != gettext-tools; }; then gt_use_preinstalled_gnugettext=yes else LIBINTL= LTLIBINTL= INCINTL= fi if test -n "$INTL_MACOSX_LIBS"; then if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then LIBINTL="$LIBINTL $INTL_MACOSX_LIBS" LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS" fi fi if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then $as_echo "#define ENABLE_NLS 1" >>confdefs.h else USE_NLS=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to use NLS" >&5 $as_echo_n "checking whether to use NLS... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_NLS" >&5 $as_echo "$USE_NLS" >&6; } if test "$USE_NLS" = "yes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking where the gettext function comes from" >&5 $as_echo_n "checking where the gettext function comes from... " >&6; } if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then gt_source="external libintl" else gt_source="libc" fi else gt_source="included intl directory" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_source" >&5 $as_echo "$gt_source" >&6; } fi if test "$USE_NLS" = "yes"; then if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libintl" >&5 $as_echo_n "checking how to link with libintl... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBINTL" >&5 $as_echo "$LIBINTL" >&6; } for element in $INCINTL; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done fi $as_echo "#define HAVE_GETTEXT 1" >>confdefs.h $as_echo "#define HAVE_DCGETTEXT 1" >>confdefs.h fi POSUB=po fi INTLLIBS="$LIBINTL" XGETTEXT_EXTRA_OPTIONS='--keyword=dar_gettext' am_save_CPPFLAGS="$CPPFLAGS" for element in $INCICONV; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv" >&5 $as_echo_n "checking for iconv... " >&6; } if test "${am_cv_func_iconv+set}" = set; then : $as_echo_n "(cached) " >&6 else am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_lib_iconv=yes am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$am_save_LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv" >&5 $as_echo "$am_cv_func_iconv" >&6; } if test "$am_cv_func_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working iconv" >&5 $as_echo_n "checking for working iconv... " >&6; } if test "${am_cv_func_iconv_works+set}" = set; then : $as_echo_n "(cached) " >&6 else am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi if test "$cross_compiling" = yes; then : case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static const char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } /* Test against Solaris 10 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646"); if (cd_ascii_to_88591 != (iconv_t)(-1)) { static const char input[] = "\263"; char buf[10]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_ascii_to_88591, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) return 1; } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static const char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; const char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, (char **) &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) return 1; } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) return 1; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : am_cv_func_iconv_works=yes else am_cv_func_iconv_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi LIBS="$am_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv_works" >&5 $as_echo "$am_cv_func_iconv_works" >&6; } case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then $as_echo "#define HAVE_ICONV 1" >>confdefs.h fi if test "$am_cv_lib_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libiconv" >&5 $as_echo_n "checking how to link with libiconv... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBICONV" >&5 $as_echo "$LIBICONV" >&6; } else CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi if test "$am_cv_func_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv declaration" >&5 $as_echo_n "checking for iconv declaration... " >&6; } if test "${am_cv_proto_iconv+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include extern #ifdef __cplusplus "C" #endif #if defined(__STDC__) || defined(__cplusplus) size_t iconv (iconv_t cd, char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft); #else size_t iconv(); #endif int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : am_cv_proto_iconv_arg1="" else am_cv_proto_iconv_arg1="const" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext am_cv_proto_iconv="extern size_t iconv (iconv_t cd, $am_cv_proto_iconv_arg1 char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);" fi am_cv_proto_iconv=`echo "$am_cv_proto_iconv" | tr -s ' ' | sed -e 's/( /(/'` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_proto_iconv" >&5 $as_echo " $am_cv_proto_iconv" >&6; } cat >>confdefs.h <<_ACEOF #define ICONV_CONST $am_cv_proto_iconv_arg1 _ACEOF fi # Checks for programs. ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if test "${ac_cv_cxx_compiler_gnu+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if test "${ac_cv_prog_cxx_g+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if test "${am_cv_CXX_dependencies_compiler_type+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "no acceptable C compiler found in \$PATH See \`config.log' for more details" "$LINENO" 5 ; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if test "${ac_cv_c_compiler_gnu+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if test "${ac_cv_prog_cc_g+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if test "${ac_cv_prog_cc_c89+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if test "${am_cv_CC_dependencies_compiler_type+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi case `pwd` in *\ * | *\ *) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 $as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; esac macro_version='2.2.6b' macro_revision='1.3017' ltmain="$ac_aux_dir/ltmain.sh" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 $as_echo_n "checking for a sed that does not truncate output... " >&6; } if test "${ac_cv_path_SED+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ for ac_i in 1 2 3 4 5 6 7; do ac_script="$ac_script$as_nl$ac_script" done echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed { ac_script=; unset ac_script;} if test -z "$SED"; then ac_path_SED_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" { test -f "$ac_path_SED" && $as_test_x "$ac_path_SED"; } || continue # Check for GNU ac_path_SED and select it if it is found. # Check for GNU $ac_path_SED case `"$ac_path_SED" --version 2>&1` in *GNU*) ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo '' >> "conftest.nl" "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_SED_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_SED="$ac_path_SED" ac_path_SED_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_SED_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_SED"; then as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 fi else ac_cv_path_SED=$SED fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 $as_echo "$ac_cv_path_SED" >&6; } SED="$ac_cv_path_SED" rm -f conftest.sed test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 $as_echo_n "checking for fgrep... " >&6; } if test "${ac_cv_path_FGREP+set}" = set; then : $as_echo_n "(cached) " >&6 else if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 then ac_cv_path_FGREP="$GREP -F" else if test -z "$FGREP"; then ac_path_FGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in fgrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" { test -f "$ac_path_FGREP" && $as_test_x "$ac_path_FGREP"; } || continue # Check for GNU ac_path_FGREP and select it if it is found. # Check for GNU $ac_path_FGREP case `"$ac_path_FGREP" --version 2>&1` in *GNU*) ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'FGREP' >> "conftest.nl" "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_FGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_FGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_FGREP"; then as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_FGREP=$FGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 $as_echo "$ac_cv_path_FGREP" >&6; } FGREP="$ac_cv_path_FGREP" test -z "$GREP" && GREP=grep # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if test "${lt_cv_path_LD+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if test "${lt_cv_prog_gnu_ld+set}" = set; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld { $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 $as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } if test "${lt_cv_path_NM+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM="$NM" else lt_nm_to_check="${ac_tool_prefix}nm" if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. tmp_nm="$ac_dir/$lt_tmp_nm" if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then # Check to see if the nm accepts a BSD-compat flag. # Adding the `sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in */dev/null* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS="$lt_save_ifs" done : ${lt_cv_path_NM=no} fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 $as_echo "$lt_cv_path_NM" >&6; } if test "$lt_cv_path_NM" != "no"; then NM="$lt_cv_path_NM" else # Didn't find any BSD compatible name lister, look for dumpbin. if test -n "$ac_tool_prefix"; then for ac_prog in "dumpbin -symbols" "link -dump -symbols" do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_DUMPBIN+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$DUMPBIN"; then ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DUMPBIN=$ac_cv_prog_DUMPBIN if test -n "$DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 $as_echo "$DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$DUMPBIN" && break done fi if test -z "$DUMPBIN"; then ac_ct_DUMPBIN=$DUMPBIN for ac_prog in "dumpbin -symbols" "link -dump -symbols" do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_DUMPBIN+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DUMPBIN"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN if test -n "$ac_ct_DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 $as_echo "$ac_ct_DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_DUMPBIN" && break done if test "x$ac_ct_DUMPBIN" = x; then DUMPBIN=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DUMPBIN=$ac_ct_DUMPBIN fi fi if test "$DUMPBIN" != ":"; then NM="$DUMPBIN" fi fi test -z "$NM" && NM=nm { $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 $as_echo_n "checking the name lister ($NM) interface... " >&6; } if test "${lt_cv_nm_interface+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:9099: $ac_compile\"" >&5) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&5 (eval echo "\"\$as_me:9102: $NM \\\"conftest.$ac_objext\\\"\"" >&5) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&5 (eval echo "\"\$as_me:9105: output\"" >&5) cat conftest.out >&5 if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 $as_echo "$lt_cv_nm_interface" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 $as_echo_n "checking whether ln -s works... " >&6; } LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 $as_echo "no, using $LN_S" >&6; } fi # find the maximum length of command line arguments { $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 $as_echo_n "checking the maximum length of command line arguments... " >&6; } if test "${lt_cv_sys_max_cmd_len+set}" = set; then : $as_echo_n "(cached) " >&6 else i=0 teststring="ABCD" case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8 ; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test "X"`$SHELL $0 --fallback-echo "X$teststring$teststring" 2>/dev/null` \ = "XX$teststring$teststring"; } >/dev/null 2>&1 && test $i != 17 # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac fi if test -n $lt_cv_sys_max_cmd_len ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 $as_echo "$lt_cv_sys_max_cmd_len" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 $as_echo "none" >&6; } fi max_cmd_len=$lt_cv_sys_max_cmd_len : ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands some XSI constructs" >&5 $as_echo_n "checking whether the shell understands some XSI constructs... " >&6; } # Try some XSI features xsi_shell=no ( _lt_dummy="a/b/c" test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ = c,a/b,, \ && eval 'test $(( 1 + 1 )) -eq 2 \ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ && xsi_shell=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $xsi_shell" >&5 $as_echo "$xsi_shell" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the shell understands \"+=\"" >&5 $as_echo_n "checking whether the shell understands \"+=\"... " >&6; } lt_shell_append=no ( foo=bar; set foo baz; eval "$1+=\$2" && test "$foo" = barbaz ) \ >/dev/null 2>&1 \ && lt_shell_append=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_shell_append" >&5 $as_echo "$lt_shell_append" >&6; } if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 $as_echo_n "checking for $LD option to reload object files... " >&6; } if test "${lt_cv_ld_reload_flag+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_reload_flag='-r' fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 $as_echo "$lt_cv_ld_reload_flag" >&6; } reload_flag=$lt_cv_ld_reload_flag case $reload_flag in "" | " "*) ;; *) reload_flag=" $reload_flag" ;; esac reload_cmds='$LD$reload_flag -o $output$reload_objs' case $host_os in darwin*) if test "$GCC" = yes; then reload_cmds='$LTCC $LTCFLAGS -nostdlib ${wl}-r -o $output$reload_objs' else reload_cmds='$LD$reload_flag -o $output$reload_objs' fi ;; esac if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. set dummy ${ac_tool_prefix}objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_OBJDUMP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$OBJDUMP"; then ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OBJDUMP=$ac_cv_prog_OBJDUMP if test -n "$OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 $as_echo "$OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OBJDUMP"; then ac_ct_OBJDUMP=$OBJDUMP # Extract the first word of "objdump", so it can be a program name with args. set dummy objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_OBJDUMP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OBJDUMP"; then ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_OBJDUMP="objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP if test -n "$ac_ct_OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 $as_echo "$ac_ct_OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OBJDUMP" = x; then OBJDUMP="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OBJDUMP=$ac_ct_OBJDUMP fi else OBJDUMP="$ac_cv_prog_OBJDUMP" fi test -z "$OBJDUMP" && OBJDUMP=objdump { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 $as_echo_n "checking how to recognize dependent libraries... " >&6; } if test "${lt_cv_deplibs_check_method+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= lt_cv_deplibs_check_method='unknown' # Need to set the preceding variable on all platforms that support # interlibrary dependencies. # 'none' -- dependencies not supported. # `unknown' -- same as none, but documents that we really don't know. # 'pass_all' -- all dependencies passed with no checks. # 'test_compile' -- check by making test program. # 'file_magic [[regex]]' -- check by looking for files in library path # which responds to the $file_magic_cmd with a given extended regex. # If you have `file' or equivalent on your system and you're not sure # whether `pass_all' will *always* work, you probably want this one. case $host_os in aix[4-9]*) lt_cv_deplibs_check_method=pass_all ;; beos*) lt_cv_deplibs_check_method=pass_all ;; bsdi[45]*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' lt_cv_file_magic_cmd='/usr/bin/file -L' lt_cv_file_magic_test_file=/shlib/libc.so ;; cygwin*) # func_win32_libid is a shell function defined in ltmain.sh lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' ;; mingw* | pw32*) # Base MSYS/MinGW do not provide the 'file' command needed by # func_win32_libid shell function, so use a weaker test based on 'objdump', # unless we find 'file', for example because we are cross-compiling. if ( file / ) >/dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; gnu*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]' lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9].[0-9]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[3-9]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be Linux ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) lt_cv_deplibs_check_method=pass_all ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 $as_echo "$lt_cv_deplibs_check_method" >&6; } file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ar", so it can be a program name with args. set dummy ${ac_tool_prefix}ar; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_AR+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$AR"; then ac_cv_prog_AR="$AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_AR="${ac_tool_prefix}ar" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AR=$ac_cv_prog_AR if test -n "$AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 $as_echo "$AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_AR"; then ac_ct_AR=$AR # Extract the first word of "ar", so it can be a program name with args. set dummy ar; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_AR+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_AR"; then ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_AR="ar" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_AR=$ac_cv_prog_ac_ct_AR if test -n "$ac_ct_AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 $as_echo "$ac_ct_AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_AR" = x; then AR="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac AR=$ac_ct_AR fi else AR="$ac_cv_prog_AR" fi test -z "$AR" && AR=ar test -z "$AR_FLAGS" && AR_FLAGS=cru if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_STRIP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_STRIP+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi test -z "$STRIP" && STRIP=: if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_RANLIB+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 $as_echo "$RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 $as_echo "$ac_ct_RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then RANLIB=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB fi else RANLIB="$ac_cv_prog_RANLIB" fi test -z "$RANLIB" && RANLIB=: # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib" fi # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Check for command to grab the raw symbol name followed by C symbol from nm. { $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 $as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } if test "${lt_cv_sys_global_symbol_pipe+set}" = set; then : $as_echo_n "(cached) " >&6 else # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[BCDEGRST]' # Regexp to match symbols that can be accessed directly from C. sympat='\([_A-Za-z][_A-Za-z0-9]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[BCDT]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[ABCDGISTW]' ;; hpux*) if test "$host_cpu" = ia64; then symcode='[ABCDEGRST]' fi ;; irix* | nonstopux*) symcode='[BCDEGRST]' ;; osf*) symcode='[BCDEGQRST]' ;; solaris*) symcode='[BDRT]' ;; sco3.2v5*) symcode='[DT]' ;; sysv4.2uw2*) symcode='[DT]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[ABDT]' ;; sysv4) symcode='[DFNSTU]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[ABCDGIRSTW]' ;; esac # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"\2\", (void *) \&\2},/p'" lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([^ ]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([^ ]*\) \(lib[^ ]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([^ ]*\) \([^ ]*\)$/ {\"lib\2\", (void *) \&\2},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function # and D for any global variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK '"\ " {last_section=section; section=\$ 3};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ " {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ " s[1]~/^[@?]/{print s[1], s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Now try to grab the symbols. nlist=conftest.nm if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist\""; } >&5 (eval $NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ const struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_save_LIBS="$LIBS" lt_save_CFLAGS="$CFLAGS" LIBS="conftstm.$ac_objext" CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest${ac_exeext}; then pipe_works=yes fi LIBS="$lt_save_LIBS" CFLAGS="$lt_save_CFLAGS" else echo "cannot find nm_test_func in $nlist" >&5 fi else echo "cannot find nm_test_var in $nlist" >&5 fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 fi else echo "$progname: failed program was:" >&5 cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test "$pipe_works" = yes; then break else lt_cv_sys_global_symbol_pipe= fi done fi if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 $as_echo "failed" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } fi # Check whether --enable-libtool-lock was given. if test "${enable_libtool_lock+set}" = set; then : enableval=$enable_libtool_lock; fi test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE="32" ;; *ELF-64*) HPUX_IA64_MODE="64" ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out which ABI we are using. echo '#line 10311 "configure"' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then if test "$lt_cv_prog_gnu_ld" = yes; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_i386" ;; ppc64-*linux*|powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; ppc*-*linux*|powerpc*-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS -belf" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 $as_echo_n "checking whether the C compiler needs -belf... " >&6; } if test "${lt_cv_cc_needs_belf+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_cc_needs_belf=yes else lt_cv_cc_needs_belf=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 $as_echo "$lt_cv_cc_needs_belf" >&6; } if test x"$lt_cv_cc_needs_belf" != x"yes"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS="$SAVE_CFLAGS" fi ;; sparc*-*solaris*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) LD="${LD-ld} -m elf64_sparc" ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks="$enable_libtool_lock" case $host_os in rhapsody* | darwin*) if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_DSYMUTIL+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$DSYMUTIL"; then ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DSYMUTIL=$ac_cv_prog_DSYMUTIL if test -n "$DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 $as_echo "$DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_DSYMUTIL"; then ac_ct_DSYMUTIL=$DSYMUTIL # Extract the first word of "dsymutil", so it can be a program name with args. set dummy dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_DSYMUTIL+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DSYMUTIL"; then ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL if test -n "$ac_ct_DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 $as_echo "$ac_ct_DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_DSYMUTIL" = x; then DSYMUTIL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DSYMUTIL=$ac_ct_DSYMUTIL fi else DSYMUTIL="$ac_cv_prog_DSYMUTIL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. set dummy ${ac_tool_prefix}nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_NMEDIT+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$NMEDIT"; then ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi NMEDIT=$ac_cv_prog_NMEDIT if test -n "$NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 $as_echo "$NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_NMEDIT"; then ac_ct_NMEDIT=$NMEDIT # Extract the first word of "nmedit", so it can be a program name with args. set dummy nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_NMEDIT+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_NMEDIT"; then ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_NMEDIT="nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT if test -n "$ac_ct_NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 $as_echo "$ac_ct_NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_NMEDIT" = x; then NMEDIT=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac NMEDIT=$ac_ct_NMEDIT fi else NMEDIT="$ac_cv_prog_NMEDIT" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. set dummy ${ac_tool_prefix}lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_LIPO+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$LIPO"; then ac_cv_prog_LIPO="$LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_LIPO="${ac_tool_prefix}lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi LIPO=$ac_cv_prog_LIPO if test -n "$LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 $as_echo "$LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_LIPO"; then ac_ct_LIPO=$LIPO # Extract the first word of "lipo", so it can be a program name with args. set dummy lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_LIPO+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_LIPO"; then ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_LIPO="lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO if test -n "$ac_ct_LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 $as_echo "$ac_ct_LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_LIPO" = x; then LIPO=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac LIPO=$ac_ct_LIPO fi else LIPO="$ac_cv_prog_LIPO" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. set dummy ${ac_tool_prefix}otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_OTOOL+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL"; then ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_OTOOL="${ac_tool_prefix}otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL=$ac_cv_prog_OTOOL if test -n "$OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 $as_echo "$OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL"; then ac_ct_OTOOL=$OTOOL # Extract the first word of "otool", so it can be a program name with args. set dummy otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_OTOOL+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL"; then ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_OTOOL="otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL if test -n "$ac_ct_OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 $as_echo "$ac_ct_OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL" = x; then OTOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL=$ac_ct_OTOOL fi else OTOOL="$ac_cv_prog_OTOOL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. set dummy ${ac_tool_prefix}otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_OTOOL64+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL64"; then ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL64=$ac_cv_prog_OTOOL64 if test -n "$OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 $as_echo "$OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL64"; then ac_ct_OTOOL64=$OTOOL64 # Extract the first word of "otool64", so it can be a program name with args. set dummy otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_OTOOL64+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL64"; then ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_OTOOL64="otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 if test -n "$ac_ct_OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 $as_echo "$ac_ct_OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL64" = x; then OTOOL64=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL64=$ac_ct_OTOOL64 fi else OTOOL64="$ac_cv_prog_OTOOL64" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 $as_echo_n "checking for -single_module linker flag... " >&6; } if test "${lt_cv_apple_cc_single_mod+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_apple_cc_single_mod=no if test -z "${LT_MULTI_MODULE}"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&5 $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&5 fi rm -rf libconftest.dylib* rm -f conftest.* fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 $as_echo "$lt_cv_apple_cc_single_mod" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 $as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } if test "${lt_cv_ld_exported_symbols_list+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_ld_exported_symbols_list=yes else lt_cv_ld_exported_symbols_list=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 $as_echo "$lt_cv_ld_exported_symbols_list" >&6; } case $host_os in rhapsody* | darwin1.[012]) _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[91]*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; 10.[012]*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test "$lt_cv_apple_cc_single_mod" = "yes"; then _lt_dar_single_mod='$single_module' fi if test "$lt_cv_ld_exported_symbols_list" = "yes"; then _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' fi if test "$DSYMUTIL" != ":"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 $as_echo_n "checking how to run the C preprocessor... " >&6; } # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if test "${ac_cv_prog_CPP+set}" = set; then : $as_echo_n "(cached) " >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 $as_echo "$CPP" >&6; } ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5 ; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if test "${ac_cv_header_stdc+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi # On IRIX 5.3, sys/types and inttypes.h are conflicting. for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ inttypes.h stdint.h unistd.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_header in dlfcn.h do : ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default " if test "x$ac_cv_header_dlfcn_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_DLFCN_H 1 _ACEOF fi done ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if test "${ac_cv_cxx_compiler_gnu+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if test "${ac_cv_prog_cxx_g+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if test "${am_cv_CXX_dependencies_compiler_type+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi if test -n "$CXX" && ( test "X$CXX" != "Xno" && ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || (test "X$CXX" != "Xg++"))) ; then ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 $as_echo_n "checking how to run the C++ preprocessor... " >&6; } if test -z "$CXXCPP"; then if test "${ac_cv_prog_CXXCPP+set}" = set; then : $as_echo_n "(cached) " >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" do ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CXXCPP=$CXXCPP fi CXXCPP=$ac_cv_prog_CXXCPP else ac_cv_prog_CXXCPP=$CXXCPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 $as_echo "$CXXCPP" >&6; } ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} _lt_caught_CXX_error=yes; } fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu else _lt_caught_CXX_error=yes fi # Set options enable_dlopen=no enable_win32_dll=no # Check whether --enable-shared was given. if test "${enable_shared+set}" = set; then : enableval=$enable_shared; p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS="$lt_save_ifs" ;; esac else enable_shared=yes fi # Check whether --enable-static was given. if test "${enable_static+set}" = set; then : enableval=$enable_static; p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS="$lt_save_ifs" ;; esac else enable_static=yes fi # Check whether --with-pic was given. if test "${with_pic+set}" = set; then : withval=$with_pic; pic_mode="$withval" else pic_mode=default fi test -z "$pic_mode" && pic_mode=default # Check whether --enable-fast-install was given. if test "${enable_fast_install+set}" = set; then : enableval=$enable_fast_install; p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS="$lt_save_ifs" ;; esac else enable_fast_install=yes fi # This can be used to rebuild libtool when needed LIBTOOL_DEPS="$ltmain" # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' test -z "$LN_S" && LN_S="ln -s" if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 $as_echo_n "checking for objdir... " >&6; } if test "${lt_cv_objdir+set}" = set; then : $as_echo_n "(cached) " >&6 else rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 $as_echo "$lt_cv_objdir" >&6; } objdir=$lt_cv_objdir cat >>confdefs.h <<_ACEOF #define LT_OBJDIR "$lt_cv_objdir/" _ACEOF case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. sed_quote_subst='s/\(["`$\\]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\(["`\\]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a `.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld="$lt_cv_prog_gnu_ld" old_CC="$CC" old_CFLAGS="$CFLAGS" # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o for cc_temp in $compiler""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"` # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 $as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } if test "${lt_cv_path_MAGIC_CMD+set}" = set; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/${ac_tool_prefix}file; then lt_cv_path_MAGIC_CMD="$ac_dir/${ac_tool_prefix}file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac fi MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 $as_echo_n "checking for file... " >&6; } if test "${lt_cv_path_MAGIC_CMD+set}" = set; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/file; then lt_cv_path_MAGIC_CMD="$ac_dir/file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac fi MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else MAGIC_CMD=: fi fi fi ;; esac # Use C for the default configuration in the libtool script lt_save_CC="$CC" ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o objext=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* if test -n "$compiler"; then lt_prog_compiler_no_builtin_flag= if test "$GCC" = yes; then lt_prog_compiler_no_builtin_flag=' -fno-builtin' { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 $as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } if test "${lt_cv_prog_compiler_rtti_exceptions+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_rtti_exceptions=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-fno-rtti -fno-exceptions" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:12362: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:12366: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_rtti_exceptions=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 $as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } if test x"$lt_cv_prog_compiler_rtti_exceptions" = xyes; then lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" else : fi fi lt_prog_compiler_wl= lt_prog_compiler_pic= lt_prog_compiler_static= { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } if test "$GCC" = yes; then lt_prog_compiler_wl='-Wl,' lt_prog_compiler_static='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic='-fno-common' ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) lt_prog_compiler_pic='-fPIC' ;; esac ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. lt_prog_compiler_can_build_shared=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic=-Kconform_pic fi ;; *) lt_prog_compiler_pic='-fPIC' ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) lt_prog_compiler_wl='-Wl,' if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' else lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' fi ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic='-DDLL_EXPORT' ;; hpux9* | hpux10* | hpux11*) lt_prog_compiler_wl='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? lt_prog_compiler_static='${wl}-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) lt_prog_compiler_wl='-Wl,' # PIC (with -KPIC) is the default. lt_prog_compiler_static='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in # old Intel for x86_64 which still supported -KPIC. ecc*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; # Lahey Fortran 8.1. lf95*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='--shared' lt_prog_compiler_static='--static' ;; pgcc* | pgf77* | pgf90* | pgf95*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fpic' lt_prog_compiler_static='-Bstatic' ;; ccc*) lt_prog_compiler_wl='-Wl,' # All Alpha code is PIC. lt_prog_compiler_static='-non_shared' ;; xl*) # IBM XL C 8.0/Fortran 10.1 on PPC lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-qpic' lt_prog_compiler_static='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='-Wl,' ;; *Sun\ F*) # Sun Fortran 8.3 passes all unrecognized flags to the linker lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='' ;; esac ;; esac ;; newsos6) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; osf3* | osf4* | osf5*) lt_prog_compiler_wl='-Wl,' # All OSF/1 code is PIC. lt_prog_compiler_static='-non_shared' ;; rdos*) lt_prog_compiler_static='-non_shared' ;; solaris*) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' case $cc_basename in f77* | f90* | f95*) lt_prog_compiler_wl='-Qoption ld ';; *) lt_prog_compiler_wl='-Wl,';; esac ;; sunos4*) lt_prog_compiler_wl='-Qoption ld ' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec ;then lt_prog_compiler_pic='-Kconform_pic' lt_prog_compiler_static='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; unicos*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_can_build_shared=no ;; uts4*) lt_prog_compiler_pic='-pic' lt_prog_compiler_static='-Bstatic' ;; *) lt_prog_compiler_can_build_shared=no ;; esac fi case $host_os in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic= ;; *) lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic" >&5 $as_echo "$lt_prog_compiler_pic" >&6; } # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } if test "${lt_cv_prog_compiler_pic_works+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic -DPIC" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:12701: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:12705: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 $as_echo "$lt_cv_prog_compiler_pic_works" >&6; } if test x"$lt_cv_prog_compiler_pic_works" = xyes; then case $lt_prog_compiler_pic in "" | " "*) ;; *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; esac else lt_prog_compiler_pic= lt_prog_compiler_can_build_shared=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if test "${lt_cv_prog_compiler_static_works+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works=yes fi else lt_cv_prog_compiler_static_works=yes fi fi $RM -r conftest* LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 $as_echo "$lt_cv_prog_compiler_static_works" >&6; } if test x"$lt_cv_prog_compiler_static_works" = xyes; then : else lt_prog_compiler_static= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if test "${lt_cv_prog_compiler_c_o+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:12806: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:12810: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if test "${lt_cv_prog_compiler_c_o+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:12861: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:12865: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } hard_links="nottested" if test "$lt_cv_prog_compiler_c_o" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test "$hard_links" = no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } runpath_var= allow_undefined_flag= always_export_symbols=no archive_cmds= archive_expsym_cmds= compiler_needs_object=no enable_shared_with_static_runtimes=no export_dynamic_flag_spec= export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' hardcode_automatic=no hardcode_direct=no hardcode_direct_absolute=no hardcode_libdir_flag_spec= hardcode_libdir_flag_spec_ld= hardcode_libdir_separator= hardcode_minus_L=no hardcode_shlibpath_var=unsupported inherit_rpath=no link_all_deplibs=unknown module_cmds= module_expsym_cmds= old_archive_from_new_cmds= old_archive_from_expsyms_cmds= thread_safe_flag_spec= whole_archive_flag_spec= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list include_expsyms= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ` (' and `)$', so one must not match beginning or # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', # as well as any symbol that contains `d'. exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; linux* | k*bsd*-gnu) link_all_deplibs=no ;; esac ld_shlibs=yes if test "$with_gnu_ld" = yes; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='${wl}' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' export_dynamic_flag_spec='${wl}--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec= fi supports_anon_versioning=no case `$LD -v 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[3-9]*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.9.1, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to modify your PATH *** so that a non-GNU linker is found, and then restart. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else ld_shlibs=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' allow_undefined_flag=unsupported always_export_symbols=no enable_shared_with_static_runtimes=yes export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs=no fi ;; interix[3-9]*) hardcode_direct=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='${wl}-rpath,$libdir' export_dynamic_flag_spec='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test "$host_os" = linux-dietlibc; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test "$tmp_diet" = no then tmp_addflag= tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95*) # Portland Group f77 and f90 compilers whole_archive_flag_spec='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 whole_archive_flag_spec= tmp_sharedflag='--shared' ;; xl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 whole_archive_flag_spec='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' compiler_needs_object=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi case $cc_basename in xlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' hardcode_libdir_flag_spec= hardcode_libdir_flag_spec_ld='-rpath $libdir' archive_cmds='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' if test "x$supports_anon_versioning" = xyes; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else ld_shlibs=no fi ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac ;; sunos4*) archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= hardcode_direct=yes hardcode_shlibpath_var=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac if test "$ld_shlibs" = no; then runpath_var= hardcode_libdir_flag_spec= export_dynamic_flag_spec= whole_archive_flag_spec= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) allow_undefined_flag=unsupported always_export_symbols=yes archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix[4-9]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds='' hardcode_direct=yes hardcode_direct_absolute=yes hardcode_libdir_separator=':' link_all_deplibs=yes file_list_spec='${wl}-f,' if test "$GCC" = yes; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi link_all_deplibs=no else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi export_dynamic_flag_spec='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag='-berok' # Determine the default libpath from the value encoded in an # empty executable. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/ p } }' aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag="-z nodefs" archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/ p } }' aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag=' ${wl}-bernotok' allow_undefined_flag=' ${wl}-berok' # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec='$convenience' archive_cmds_need_lc=yes # This is similar to how AIX traditionally builds its shared libraries. archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; bsdi[45]*) export_dynamic_flag_spec=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $lib $libobjs $compiler_flags `$ECHO "X$deplibs" | $Xsed -e '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. old_archive_from_new_cmds='true' # FIXME: Should let the user specify the lib program. old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' fix_srcfile_path='`cygpath -w "$srcfile"`' enable_shared_with_static_runtimes=yes ;; darwin* | rhapsody*) archive_cmds_need_lc=no hardcode_direct=no hardcode_automatic=yes hardcode_shlibpath_var=unsupported whole_archive_flag_spec='' link_all_deplibs=yes allow_undefined_flag="$_lt_dar_allow_undefined" case $cc_basename in ifort*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test "$_lt_dar_can_shared" = "yes"; then output_verbose_link_cmd=echo archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" archive_expsym_cmds="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" module_expsym_cmds="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" else ld_shlibs=no fi ;; dgux*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; freebsd1*) ld_shlibs=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) archive_cmds='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; hpux9*) if test "$GCC" = yes; then archive_cmds='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes export_dynamic_flag_spec='${wl}-E' ;; hpux10*) if test "$GCC" = yes -a "$with_gnu_ld" = no; then archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_flag_spec_ld='+b $libdir' hardcode_libdir_separator=: hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test "$GCC" = yes -a "$with_gnu_ld" = no; then case $host_cpu in hppa*64*) archive_cmds='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) archive_cmds='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac fi if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no hardcode_shlibpath_var=no ;; *) hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int foo(void) {} _ACEOF if ac_fn_c_try_link "$LINENO"; then : archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS="$save_LDFLAGS" else archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: inherit_rpath=yes link_all_deplibs=yes ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; newsos6) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: hardcode_shlibpath_var=no ;; *nto* | *qnx*) ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes hardcode_shlibpath_var=no hardcode_direct_absolute=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' hardcode_libdir_flag_spec='${wl}-rpath,$libdir' export_dynamic_flag_spec='${wl}-E' else case $host_os in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-R$libdir' ;; *) archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; esac fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes allow_undefined_flag=unsupported archive_cmds='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$ECHO DATA >> $output_objdir/$libname.def~$ECHO " SINGLE NONSHARED" >> $output_objdir/$libname.def~$ECHO EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' old_archive_from_new_cmds='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ;; osf3*) if test "$GCC" = yes; then allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then allow_undefined_flag=' ${wl}-expect_unresolved ${wl}\*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi archive_cmds_need_lc='no' hardcode_libdir_separator=: ;; solaris*) no_undefined_flag=' -z defs' if test "$GCC" = yes; then wlarc='${wl}' archive_cmds='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' archive_cmds='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='${wl}' archive_cmds='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi hardcode_libdir_flag_spec='-R$libdir' hardcode_shlibpath_var=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. GCC discards it without `$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test "$GCC" = yes; then whole_archive_flag_spec='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' else whole_archive_flag_spec='-z allextract$convenience -z defaultextract' fi ;; esac link_all_deplibs=yes ;; sunos4*) if test "x$host_vendor" = xsequent; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. archive_cmds='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; sysv4) case $host_vendor in sni) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' reload_cmds='$CC -r -o $output$reload_objs' hardcode_direct=no ;; motorola) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' hardcode_shlibpath_var=no ;; sysv4.3*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no export_dynamic_flag_spec='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag='${wl}-z,text' archive_cmds_need_lc=no hardcode_shlibpath_var=no runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag='${wl}-z,text' allow_undefined_flag='${wl}-z,nodefs' archive_cmds_need_lc=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='${wl}-R,$libdir' hardcode_libdir_separator=':' link_all_deplibs=yes export_dynamic_flag_spec='${wl}-Bexport' runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then archive_cmds='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; *) ld_shlibs=no ;; esac if test x$host_vendor = xsni; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) export_dynamic_flag_spec='${wl}-Blargedynsym' ;; esac fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 $as_echo "$ld_shlibs" >&6; } test "$ld_shlibs" = no && can_build_shared=no with_gnu_ld=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc" in x|xyes) # Assume -lc should be added archive_cmds_need_lc=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl pic_flag=$lt_prog_compiler_pic compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag allow_undefined_flag= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then archive_cmds_need_lc=no else archive_cmds_need_lc=yes fi allow_undefined_flag=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* { $as_echo "$as_me:${as_lineno-$LINENO}: result: $archive_cmds_need_lc" >&5 $as_echo "$archive_cmds_need_lc" >&6; } ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } if test "$GCC" = yes; then case $host_os in darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; *) lt_awk_arg="/^libraries:/" ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e "s,=/,/,g"` if $ECHO "$lt_search_path_spec" | $GREP ';' >/dev/null ; then # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e 's/;/ /g'` else lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary. lt_tmp_lt_search_path_spec= lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path/$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" else test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO $lt_tmp_lt_search_path_spec | awk ' BEGIN {RS=" "; FS="/|\n";} { lt_foo=""; lt_count=0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo="/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[lt_foo]++; } if (lt_freq[lt_foo] == 1) { print lt_foo; } }'` sys_lib_search_path_spec=`$ECHO $lt_search_path_spec` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix[4-9]*) version_type=linux need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$host_os in yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH printed by # mingw gcc, but we are running on Cygwin. Gcc prints its search # path with ; separators, and with drive letters. We can handle the # drive letters (cygwin fileutils understands them), so leave them, # especially as we might pass files found there to a mingw objdump, # which wouldn't understand a cygwinified path. Ahh. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; esac ;; *) library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ;; esac dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd1*) dynamic_linker=no ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[123]*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555. postinstall_cmds='chmod 555 $lib' ;; interix[3-9]*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be Linux ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsdelf*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='NetBSD ld.elf_so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd*) version_type=sunos sys_lib_dlsearch_path_spec="/usr/lib" need_lib_prefix=no # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. case $host_os in openbsd3.3 | openbsd3.3.*) need_version=yes ;; *) need_version=no ;; esac library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=freebsd-elf need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test "$with_gnu_ld" = yes; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" fi if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action= if test -n "$hardcode_libdir_flag_spec" || test -n "$runpath_var" || test "X$hardcode_automatic" = "Xyes" ; then # We can hardcode non-existent directories. if test "$hardcode_direct" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_TAGVAR(hardcode_shlibpath_var, )" != no && test "$hardcode_minus_L" != no; then # Linking always hardcodes the temporary library directory. hardcode_action=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 $as_echo "$hardcode_action" >&6; } if test "$hardcode_action" = relink || test "$inherit_rpath" = yes; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi if test "x$enable_dlopen" != xyes; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen="load_add_on" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen="LoadLibrary" lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen="dlopen" lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if test "${ac_cv_lib_dl_dlopen+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = x""yes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else lt_cv_dlopen="dyld" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes fi ;; *) ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" if test "x$ac_cv_func_shl_load" = x""yes; then : lt_cv_dlopen="shl_load" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 $as_echo_n "checking for shl_load in -ldld... " >&6; } if test "${ac_cv_lib_dld_shl_load+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char shl_load (); int main () { return shl_load (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_shl_load=yes else ac_cv_lib_dld_shl_load=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 $as_echo "$ac_cv_lib_dld_shl_load" >&6; } if test "x$ac_cv_lib_dld_shl_load" = x""yes; then : lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld" else ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" if test "x$ac_cv_func_dlopen" = x""yes; then : lt_cv_dlopen="dlopen" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if test "${ac_cv_lib_dl_dlopen+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = x""yes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 $as_echo_n "checking for dlopen in -lsvld... " >&6; } if test "${ac_cv_lib_svld_dlopen+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_svld_dlopen=yes else ac_cv_lib_svld_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 $as_echo "$ac_cv_lib_svld_dlopen" >&6; } if test "x$ac_cv_lib_svld_dlopen" = x""yes; then : lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 $as_echo_n "checking for dld_link in -ldld... " >&6; } if test "${ac_cv_lib_dld_dld_link+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dld_link (); int main () { return dld_link (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_dld_link=yes else ac_cv_lib_dld_dld_link=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 $as_echo "$ac_cv_lib_dld_dld_link" >&6; } if test "x$ac_cv_lib_dld_dld_link" = x""yes; then : lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld" fi fi fi fi fi fi ;; esac if test "x$lt_cv_dlopen" != xno; then enable_dlopen=yes else enable_dlopen=no fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS="$CPPFLAGS" test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS="$LDFLAGS" wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 $as_echo_n "checking whether a program can dlopen itself... " >&6; } if test "${lt_cv_dlopen_self+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line 15245 "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; esac else : # compilation failed lt_cv_dlopen_self=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 $as_echo "$lt_cv_dlopen_self" >&6; } if test "x$lt_cv_dlopen_self" = xyes; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 $as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } if test "${lt_cv_dlopen_self_static+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : lt_cv_dlopen_self_static=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line 15341 "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; esac else : # compilation failed lt_cv_dlopen_self_static=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 $as_echo "$lt_cv_dlopen_self_static" >&6; } fi CPPFLAGS="$save_CPPFLAGS" LDFLAGS="$save_LDFLAGS" LIBS="$save_LIBS" ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi striplib= old_striplib= { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 $as_echo_n "checking whether stripping libraries is possible... " >&6; } if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" old_striplib="$STRIP -S" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ;; esac fi # Report which library types will actually be built { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 $as_echo_n "checking if libtool supports shared libraries... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 $as_echo "$can_build_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 $as_echo_n "checking whether to build shared libraries... " >&6; } test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[4-9]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 $as_echo "$enable_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 $as_echo_n "checking whether to build static libraries... " >&6; } # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 $as_echo "$enable_static" >&6; } fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu CC="$lt_save_CC" ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu archive_cmds_need_lc_CXX=no allow_undefined_flag_CXX= always_export_symbols_CXX=no archive_expsym_cmds_CXX= compiler_needs_object_CXX=no export_dynamic_flag_spec_CXX= hardcode_direct_CXX=no hardcode_direct_absolute_CXX=no hardcode_libdir_flag_spec_CXX= hardcode_libdir_flag_spec_ld_CXX= hardcode_libdir_separator_CXX= hardcode_minus_L_CXX=no hardcode_shlibpath_var_CXX=unsupported hardcode_automatic_CXX=no inherit_rpath_CXX=no module_cmds_CXX= module_expsym_cmds_CXX= link_all_deplibs_CXX=unknown old_archive_cmds_CXX=$old_archive_cmds no_undefined_flag_CXX= whole_archive_flag_spec_CXX= enable_shared_with_static_runtimes_CXX=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o objext_CXX=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_caught_CXX_error" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} compiler=$CC compiler_CXX=$CC for cc_temp in $compiler""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"` if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test "$GXX" = yes; then lt_prog_compiler_no_builtin_flag_CXX=' -fno-builtin' else lt_prog_compiler_no_builtin_flag_CXX= fi if test "$GXX" = yes; then # Set up default GNU C++ configuration # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if test "${lt_cv_path_LD+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if test "${lt_cv_prog_gnu_ld+set}" = set; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test "$with_gnu_ld" = yes; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='${wl}' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec_CXX="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else whole_archive_flag_spec_CXX= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } ld_shlibs_CXX=yes case $host_os in aix3*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aix[4-9]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds_CXX='' hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes file_list_spec_CXX='${wl}-f,' if test "$GXX" = yes; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct_CXX=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L_CXX=yes hardcode_libdir_flag_spec_CXX='-L$libdir' hardcode_libdir_separator_CXX= fi esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi export_dynamic_flag_spec_CXX='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. always_export_symbols_CXX=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag_CXX='-berok' # Determine the default libpath from the value encoded in an empty # executable. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/ p } }' aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec_CXX='${wl}-R $libdir:/usr/lib:/lib' allow_undefined_flag_CXX="-z nodefs" archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/ p } }' aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi hardcode_libdir_flag_spec_CXX='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag_CXX=' ${wl}-bernotok' allow_undefined_flag_CXX=' ${wl}-berok' # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec_CXX='$convenience' archive_cmds_need_lc_CXX=yes # This is similar to how AIX traditionally builds its shared # libraries. archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag_CXX=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds_CXX='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else ld_shlibs_CXX=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec_CXX='-L$libdir' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=no enable_shared_with_static_runtimes_CXX=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... archive_expsym_cmds_CXX='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs_CXX=no fi ;; darwin* | rhapsody*) archive_cmds_need_lc_CXX=no hardcode_direct_CXX=no hardcode_automatic_CXX=yes hardcode_shlibpath_var_CXX=unsupported whole_archive_flag_spec_CXX='' link_all_deplibs_CXX=yes allow_undefined_flag_CXX="$_lt_dar_allow_undefined" case $cc_basename in ifort*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test "$_lt_dar_can_shared" = "yes"; then output_verbose_link_cmd=echo archive_cmds_CXX="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" module_cmds_CXX="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" archive_expsym_cmds_CXX="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" module_expsym_cmds_CXX="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" if test "$lt_cv_apple_cc_single_mod" != "yes"; then archive_cmds_CXX="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" archive_expsym_cmds_CXX="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" fi else ld_shlibs_CXX=no fi ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; freebsd[12]*) # C++ shared libraries reported to be fairly broken before # switch to ELF ld_shlibs_CXX=no ;; freebsd-elf*) archive_cmds_need_lc_CXX=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions ld_shlibs_CXX=yes ;; gnu*) ;; hpux9*) hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir' hardcode_libdir_separator_CXX=: export_dynamic_flag_spec_CXX='${wl}-E' hardcode_direct_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) archive_cmds_CXX='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; *) if test "$GXX" = yes; then archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; hpux10*|hpux11*) if test $with_gnu_ld = no; then hardcode_libdir_flag_spec_CXX='${wl}+b ${wl}$libdir' hardcode_libdir_separator_CXX=: case $host_cpu in hppa*64*|ia64*) ;; *) export_dynamic_flag_spec_CXX='${wl}-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no ;; *) hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; *) if test "$GXX" = yes; then if test $with_gnu_ld = no; then case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; interix[3-9]*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds_CXX='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds_CXX='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ archive_cmds_CXX='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test "$GXX" = yes; then if test "$with_gnu_ld" = no; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` -o $lib' fi fi link_all_deplibs_CXX=yes ;; esac hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: inherit_rpath_CXX=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' archive_expsym_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac archive_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; esac archive_cmds_need_lc_CXX=no hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' whole_archive_flag_spec_CXX='${wl}--whole-archive$convenience ${wl}--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [1-5]* | *pgcpp\ [1-5]*) prelink_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"' old_archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~ $RANLIB $oldlib' archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' archive_expsym_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; *) # Version 6 will use weak symbols archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='${wl}--rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' whole_archive_flag_spec_CXX='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' ;; cxx*) # Compaq C++ archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec_CXX='-rpath $libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; xl*) # IBM XL 8.0 on PPC, with GNU ld hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' export_dynamic_flag_spec_CXX='${wl}--export-dynamic' archive_cmds_CXX='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then archive_expsym_cmds_CXX='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' hardcode_libdir_flag_spec_CXX='-R$libdir' whole_archive_flag_spec_CXX='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' compiler_needs_object_CXX=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='echo' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; m88k*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds_CXX='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) ld_shlibs_CXX=yes ;; openbsd2*) # C++ shared libraries are fairly broken ld_shlibs_CXX=no ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no hardcode_direct_absolute_CXX=yes archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' export_dynamic_flag_spec_CXX='${wl}-E' whole_archive_flag_spec_CXX="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' fi output_verbose_link_cmd=echo else ld_shlibs_CXX=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath,$libdir' hardcode_libdir_separator_CXX=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; *) old_archive_cmds_CXX='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; cxx*) case $host in osf3*) allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*' archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && $ECHO "X${wl}-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' ;; *) allow_undefined_flag_CXX=' -expect_unresolved \*' archive_cmds_CXX='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' archive_expsym_cmds_CXX='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~ $RM $lib.exp' hardcode_libdir_flag_spec_CXX='-rpath $libdir' ;; esac hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; *) if test "$GXX" = yes && test "$with_gnu_ld" = no; then allow_undefined_flag_CXX=' ${wl}-expect_unresolved ${wl}\*' case $host in osf3*) archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; *) archive_cmds_CXX='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; solaris*) case $cc_basename in CC*) # Sun C++ 4.2, 5.x and Centerline C++ archive_cmds_need_lc_CXX=yes no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_shlibpath_var_CXX=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec_CXX='-z allextract$convenience -z defaultextract' ;; esac link_all_deplibs_CXX=yes output_verbose_link_cmd='echo' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. old_archive_cmds_CXX='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test "$GXX" = yes && test "$with_gnu_ld" = no; then no_undefined_flag_CXX=' ${wl}-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then archive_cmds_CXX='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' else # g++ 2.7 appears to require `-G' NOT `-shared' on this # platform. archive_cmds_CXX='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' fi hardcode_libdir_flag_spec_CXX='${wl}-R $wl$libdir' case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) whole_archive_flag_spec_CXX='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag_CXX='${wl}-z,text' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag_CXX='${wl}-z,text' allow_undefined_flag_CXX='${wl}-z,nodefs' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='${wl}-R,$libdir' hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes export_dynamic_flag_spec_CXX='${wl}-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test "$ld_shlibs_CXX" = no && can_build_shared=no GCC_CXX="$GXX" LD_CXX="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... # Dependencies to place before and after the object being linked: predep_objects_CXX= postdep_objects_CXX= predeps_CXX= postdeps_CXX= compiler_lib_search_path_CXX= cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case $p in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test $p = "-L" || test $p = "-R"; then prev=$p continue else prev= fi if test "$pre_test_object_deps_done" = no; then case $p in -L* | -R*) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$compiler_lib_search_path_CXX"; then compiler_lib_search_path_CXX="${prev}${p}" else compiler_lib_search_path_CXX="${compiler_lib_search_path_CXX} ${prev}${p}" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$postdeps_CXX"; then postdeps_CXX="${prev}${p}" else postdeps_CXX="${postdeps_CXX} ${prev}${p}" fi fi ;; *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test "$pre_test_object_deps_done" = no; then if test -z "$predep_objects_CXX"; then predep_objects_CXX="$p" else predep_objects_CXX="$predep_objects_CXX $p" fi else if test -z "$postdep_objects_CXX"; then postdep_objects_CXX="$p" else postdep_objects_CXX="$postdep_objects_CXX $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling CXX test program" fi $RM -f confest.$objext # PORTME: override above test on systems where it is broken case $host_os in interix[3-9]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. predep_objects_CXX= postdep_objects_CXX= postdeps_CXX= ;; linux*) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac if test "$solaris_use_stlport4" != yes; then postdeps_CXX='-library=Cstd -library=Crun' fi ;; esac ;; solaris*) case $cc_basename in CC*) # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac # Adding this requires a known-good setup of shared libraries for # Sun compiler versions before 5.6, else PIC objects from an old # archive will be linked into the output, leading to subtle bugs. if test "$solaris_use_stlport4" != yes; then postdeps_CXX='-library=Cstd -library=Crun' fi ;; esac ;; esac case " $postdeps_CXX " in *" -lc "*) archive_cmds_need_lc_CXX=no ;; esac compiler_lib_search_dirs_CXX= if test -n "${compiler_lib_search_path_CXX}"; then compiler_lib_search_dirs_CXX=`echo " ${compiler_lib_search_path_CXX}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` fi lt_prog_compiler_wl_CXX= lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX= { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } # C++ specific cases for pic, static, wl, etc. if test "$GXX" = yes; then lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic_CXX='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. lt_prog_compiler_pic_CXX='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic_CXX='-DDLL_EXPORT' ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic_CXX='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all lt_prog_compiler_pic_CXX= ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic_CXX=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac else case $host_os in aix[4-9]*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' else lt_prog_compiler_static_CXX='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, CXX)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; dgux*) case $cc_basename in ec++*) lt_prog_compiler_pic_CXX='-KPIC' ;; ghcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='${wl}-a ${wl}archive' if test "$host_cpu" != ia64; then lt_prog_compiler_pic_CXX='+Z' fi ;; aCC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='${wl}-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_CXX='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # KAI C++ Compiler lt_prog_compiler_wl_CXX='--backend -Wl,' lt_prog_compiler_pic_CXX='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64 which still supported -KPIC. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fPIC' lt_prog_compiler_static_CXX='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fpic' lt_prog_compiler_static_CXX='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; xlc* | xlC*) # IBM XL 8.0 on PPC lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-qpic' lt_prog_compiler_static_CXX='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) lt_prog_compiler_pic_CXX='-W c,exportall' ;; *) ;; esac ;; netbsd* | netbsdelf*-gnu) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) lt_prog_compiler_wl_CXX='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 lt_prog_compiler_pic_CXX='-pic' ;; cxx*) # Digital/Compaq C++ lt_prog_compiler_wl_CXX='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC*) # Sun C++ 4.2, 5.x and Centerline C++ lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x lt_prog_compiler_pic_CXX='-pic' lt_prog_compiler_static_CXX='-Bstatic' ;; lcc*) # Lucid lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 lt_prog_compiler_pic_CXX='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) lt_prog_compiler_can_build_shared_CXX=no ;; esac fi case $host_os in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic_CXX= ;; *) lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_prog_compiler_pic_CXX" >&5 $as_echo "$lt_prog_compiler_pic_CXX" >&6; } # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... " >&6; } if test "${lt_cv_prog_compiler_pic_works_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works_CXX=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic_CXX -DPIC" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:17297: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:17301: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works_CXX=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_pic_works_CXX" >&6; } if test x"$lt_cv_prog_compiler_pic_works_CXX" = xyes; then case $lt_prog_compiler_pic_CXX in "" | " "*) ;; *) lt_prog_compiler_pic_CXX=" $lt_prog_compiler_pic_CXX" ;; esac else lt_prog_compiler_pic_CXX= lt_prog_compiler_can_build_shared_CXX=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl_CXX eval lt_tmp_static_flag=\"$lt_prog_compiler_static_CXX\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if test "${lt_cv_prog_compiler_static_works_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works_CXX=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works_CXX=yes fi else lt_cv_prog_compiler_static_works_CXX=yes fi fi $RM -r conftest* LDFLAGS="$save_LDFLAGS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_static_works_CXX" >&6; } if test x"$lt_cv_prog_compiler_static_works_CXX" = xyes; then : else lt_prog_compiler_static_CXX= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if test "${lt_cv_prog_compiler_c_o_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:17396: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:17400: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if test "${lt_cv_prog_compiler_c_o_CXX+set}" = set; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:17448: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:17452: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } hard_links="nottested" if test "$lt_cv_prog_compiler_c_o_CXX" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test "$hard_links" = no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: \`$CC' does not support \`-c -o', so \`make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' case $host_os in aix[4-9]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else export_symbols_cmds_CXX='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && (substr(\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi ;; pw32*) export_symbols_cmds_CXX="$ltdll_cmds" ;; cygwin* | mingw* | cegcc*) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;/^.*[ ]__nm__/s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' ;; linux* | k*bsd*-gnu) link_all_deplibs_CXX=no ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test "$ld_shlibs_CXX" = no && can_build_shared=no with_gnu_ld_CXX=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc_CXX" in x|xyes) # Assume -lc should be added archive_cmds_need_lc_CXX=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $archive_cmds_CXX in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl_CXX pic_flag=$lt_prog_compiler_pic_CXX compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_CXX allow_undefined_flag_CXX= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then archive_cmds_need_lc_CXX=no else archive_cmds_need_lc_CXX=yes fi allow_undefined_flag_CXX=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* { $as_echo "$as_me:${as_lineno-$LINENO}: result: $archive_cmds_need_lc_CXX" >&5 $as_echo "$archive_cmds_need_lc_CXX" >&6; } ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix[4-9]*) version_type=linux need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$host_os in yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH printed by # mingw gcc, but we are running on Cygwin. Gcc prints its search # path with ; separators, and with drive letters. We can handle the # drive letters (cygwin fileutils understands them), so leave them, # especially as we might pass files found there to a mingw objdump, # which wouldn't understand a cygwinified path. Ahh. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext}' ;; esac ;; *) library_names_spec='${libname}`echo ${release} | $SED -e 's/[.]/-/g'`${versuffix}${shared_ext} $libname.lib' ;; esac dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd1*) dynamic_linker=no ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[123]*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555. postinstall_cmds='chmod 555 $lib' ;; interix[3-9]*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be Linux ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_CXX\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_CXX\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsdelf*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='NetBSD ld.elf_so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd*) version_type=sunos sys_lib_dlsearch_path_spec="/usr/lib" need_lib_prefix=no # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. case $host_os in openbsd3.3 | openbsd3.3.*) need_version=yes ;; *) need_version=no ;; esac library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[89] | openbsd2.[89].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=freebsd-elf need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test "$with_gnu_ld" = yes; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" fi if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action_CXX= if test -n "$hardcode_libdir_flag_spec_CXX" || test -n "$runpath_var_CXX" || test "X$hardcode_automatic_CXX" = "Xyes" ; then # We can hardcode non-existent directories. if test "$hardcode_direct_CXX" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_TAGVAR(hardcode_shlibpath_var, CXX)" != no && test "$hardcode_minus_L_CXX" != no; then # Linking always hardcodes the temporary library directory. hardcode_action_CXX=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action_CXX=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action_CXX=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_CXX" >&5 $as_echo "$hardcode_action_CXX" >&6; } if test "$hardcode_action_CXX" = relink || test "$inherit_rpath_CXX" = yes; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi fi # test -n "$compiler" CC=$lt_save_CC LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test "$_lt_caught_CXX_error" != yes ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu ac_config_commands="$ac_config_commands libtool" # Only expand once: { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval "test \"\${ac_cv_prog_make_${ac_make}_set+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_RANLIB+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 $as_echo "$RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_ac_ct_RANLIB+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 $as_echo "$ac_ct_RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then RANLIB=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB fi else RANLIB="$ac_cv_prog_RANLIB" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler usability" >&5 $as_echo_n "checking for C++ compiler usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { class test { public: int test; }; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } else as_fn_error $? "No C++ compiler found" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext # Checks for libraries. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for socket in -lsocket" >&5 $as_echo_n "checking for socket in -lsocket... " >&6; } if test "${ac_cv_lib_socket_socket+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsocket $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char socket (); int main () { return socket (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_socket_socket=yes else ac_cv_lib_socket_socket=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_socket_socket" >&5 $as_echo "$ac_cv_lib_socket_socket" >&6; } if test "x$ac_cv_lib_socket_socket" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBSOCKET 1 _ACEOF LIBS="-lsocket $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for endnetconfig in -lnsl" >&5 $as_echo_n "checking for endnetconfig in -lnsl... " >&6; } if test "${ac_cv_lib_nsl_endnetconfig+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lnsl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char endnetconfig (); int main () { return endnetconfig (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_nsl_endnetconfig=yes else ac_cv_lib_nsl_endnetconfig=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_nsl_endnetconfig" >&5 $as_echo "$ac_cv_lib_nsl_endnetconfig" >&6; } if test "x$ac_cv_lib_nsl_endnetconfig" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBNSL 1 _ACEOF LIBS="-lnsl $LIBS" fi # Check whether --enable-libdl-linking was given. if test "${enable_libdl_linking+set}" = set; then : enableval=$enable_libdl_linking; else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlsym in -ldl" >&5 $as_echo_n "checking for dlsym in -ldl... " >&6; } if test "${ac_cv_lib_dl_dlsym+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlsym (); int main () { return dlsym (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_dl_dlsym=yes else ac_cv_lib_dl_dlsym=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlsym" >&5 $as_echo "$ac_cv_lib_dl_dlsym" >&6; } if test "x$ac_cv_lib_dl_dlsym" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBDL 1 _ACEOF LIBS="-ldl $LIBS" fi fi # Checks for header files. ac_header_dirent=no for ac_hdr in dirent.h sys/ndir.h sys/dir.h ndir.h; do as_ac_Header=`$as_echo "ac_cv_header_dirent_$ac_hdr" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_hdr that defines DIR" >&5 $as_echo_n "checking for $ac_hdr that defines DIR... " >&6; } if eval "test \"\${$as_ac_Header+set}\"" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include <$ac_hdr> int main () { if ((DIR *) 0) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$as_ac_Header=yes" else eval "$as_ac_Header=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$as_ac_Header { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_hdr" | $as_tr_cpp` 1 _ACEOF ac_header_dirent=$ac_hdr; break fi done # Two versions of opendir et al. are in -ldir and -lx on SCO Xenix. if test $ac_header_dirent = dirent.h; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing opendir" >&5 $as_echo_n "checking for library containing opendir... " >&6; } if test "${ac_cv_search_opendir+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char opendir (); int main () { return opendir (); ; return 0; } _ACEOF for ac_lib in '' dir; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_search_opendir=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_opendir+set}" = set; then : break fi done if test "${ac_cv_search_opendir+set}" = set; then : else ac_cv_search_opendir=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_opendir" >&5 $as_echo "$ac_cv_search_opendir" >&6; } ac_res=$ac_cv_search_opendir if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing opendir" >&5 $as_echo_n "checking for library containing opendir... " >&6; } if test "${ac_cv_search_opendir+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char opendir (); int main () { return opendir (); ; return 0; } _ACEOF for ac_lib in '' x; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_search_opendir=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if test "${ac_cv_search_opendir+set}" = set; then : break fi done if test "${ac_cv_search_opendir+set}" = set; then : else ac_cv_search_opendir=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_opendir" >&5 $as_echo "$ac_cv_search_opendir" >&6; } ac_res=$ac_cv_search_opendir if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if test "${ac_cv_header_stdc+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sys/wait.h that is POSIX.1 compatible" >&5 $as_echo_n "checking for sys/wait.h that is POSIX.1 compatible... " >&6; } if test "${ac_cv_header_sys_wait_h+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #ifndef WEXITSTATUS # define WEXITSTATUS(stat_val) ((unsigned int) (stat_val) >> 8) #endif #ifndef WIFEXITED # define WIFEXITED(stat_val) (((stat_val) & 255) == 0) #endif int main () { int s; wait (&s); s = WIFEXITED (s) ? WEXITSTATUS (s) : 1; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_header_sys_wait_h=yes else ac_cv_header_sys_wait_h=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_sys_wait_h" >&5 $as_echo "$ac_cv_header_sys_wait_h" >&6; } if test $ac_cv_header_sys_wait_h = yes; then $as_echo "#define HAVE_SYS_WAIT_H 1" >>confdefs.h fi for ac_header in fcntl.h netinet/in.h arpa/inet.h stdint.h stdlib.h string.h sys/ioctl.h sys/socket.h termios.h unistd.h utime.h sys/types.h signal.h errno.h sys/un.h sys/stat.h time.h fnmatch.h regex.h pwd.h grp.h stdio.h pthread.h ctype.h getopt.h limits.h stddef.h sys/utsname.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_cxx_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done # Check whether --enable-largefile was given. if test "${enable_largefile+set}" = set; then : enableval=$enable_largefile; fi if test "$enable_largefile" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for special C compiler options needed for large files" >&5 $as_echo_n "checking for special C compiler options needed for large files... " >&6; } if test "${ac_cv_sys_largefile_CC+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_sys_largefile_CC=no if test "$GCC" != yes; then ac_save_CC=$CC while :; do # IRIX 6.2 and later do not support large files by default, # so use the C compiler's -n32 option if that helps. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : break fi rm -f core conftest.err conftest.$ac_objext CC="$CC -n32" if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_largefile_CC=' -n32'; break fi rm -f core conftest.err conftest.$ac_objext break done CC=$ac_save_CC rm -f conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_largefile_CC" >&5 $as_echo "$ac_cv_sys_largefile_CC" >&6; } if test "$ac_cv_sys_largefile_CC" != no; then CC=$CC$ac_cv_sys_largefile_CC fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _FILE_OFFSET_BITS value needed for large files" >&5 $as_echo_n "checking for _FILE_OFFSET_BITS value needed for large files... " >&6; } if test "${ac_cv_sys_file_offset_bits+set}" = set; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _FILE_OFFSET_BITS 64 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=64; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_file_offset_bits=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_file_offset_bits" >&5 $as_echo "$ac_cv_sys_file_offset_bits" >&6; } case $ac_cv_sys_file_offset_bits in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _FILE_OFFSET_BITS $ac_cv_sys_file_offset_bits _ACEOF ;; esac rm -rf conftest* if test $ac_cv_sys_file_offset_bits = unknown; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _LARGE_FILES value needed for large files" >&5 $as_echo_n "checking for _LARGE_FILES value needed for large files... " >&6; } if test "${ac_cv_sys_large_files+set}" = set; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_large_files=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _LARGE_FILES 1 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T (((off_t) 1 << 62) - 1 + ((off_t) 1 << 62)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_large_files=1; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_large_files=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_large_files" >&5 $as_echo "$ac_cv_sys_large_files" >&6; } case $ac_cv_sys_large_files in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _LARGE_FILES $ac_cv_sys_large_files _ACEOF ;; esac rm -rf conftest* fi fi # Checks for typedefs, structures, and compiler characteristics. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 $as_echo_n "checking for an ANSI C-conforming const... " >&6; } if test "${ac_cv_c_const+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { /* FIXME: Include the comments suggested by Paul. */ #ifndef __cplusplus /* Ultrix mips cc rejects this. */ typedef int charset[2]; const charset cs; /* SunOS 4.1.1 cc rejects this. */ char const *const *pcpcc; char **ppc; /* NEC SVR4.0.2 mips cc rejects this. */ struct point {int x, y;}; static struct point const zero = {0,0}; /* AIX XL C 1.02.0.0 rejects this. It does not let you subtract one const X* pointer from another in an arm of an if-expression whose if-part is not a constant expression */ const char *g = "string"; pcpcc = &g + (g ? g-g : 0); /* HPUX 7.0 cc rejects these. */ ++pcpcc; ppc = (char**) pcpcc; pcpcc = (char const *const *) ppc; { /* SCO 3.2v4 cc rejects this. */ char *t; char const *s = 0 ? (char *) 0 : (char const *) 0; *t++ = 0; if (s) return 0; } { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ int x[] = {25, 17}; const int *foo = &x[0]; ++foo; } { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ typedef const int *iptr; iptr p = 0; ++p; } { /* AIX XL C 1.02.0.0 rejects this saying "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ struct s { int j; const int *ap[3]; }; struct s *b; b->j = 5; } { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ const int foo = 10; if (!foo) return 0; } return !cs[0] && !zero.x; #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_c_const=yes else ac_cv_c_const=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 $as_echo "$ac_cv_c_const" >&6; } if test $ac_cv_c_const = no; then $as_echo "#define const /**/" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for inline" >&5 $as_echo_n "checking for inline... " >&6; } if test "${ac_cv_c_inline+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_cv_c_inline=no for ac_kw in inline __inline__ __inline; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifndef __cplusplus typedef int foo_t; static $ac_kw foo_t static_foo () {return 0; } $ac_kw foo_t foo () {return 0; } #endif _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_c_inline=$ac_kw fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext test "$ac_cv_c_inline" != no && break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_inline" >&5 $as_echo "$ac_cv_c_inline" >&6; } case $ac_cv_c_inline in inline | yes) ;; *) case $ac_cv_c_inline in no) ac_val=;; *) ac_val=$ac_cv_c_inline;; esac cat >>confdefs.h <<_ACEOF #ifndef __cplusplus #define inline $ac_val #endif _ACEOF ;; esac ac_fn_cxx_check_type "$LINENO" "off_t" "ac_cv_type_off_t" "$ac_includes_default" if test "x$ac_cv_type_off_t" = x""yes; then : else cat >>confdefs.h <<_ACEOF #define off_t long int _ACEOF fi ac_fn_cxx_check_type "$LINENO" "pid_t" "ac_cv_type_pid_t" "$ac_includes_default" if test "x$ac_cv_type_pid_t" = x""yes; then : else cat >>confdefs.h <<_ACEOF #define pid_t int _ACEOF fi ac_fn_cxx_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" if test "x$ac_cv_type_size_t" = x""yes; then : else cat >>confdefs.h <<_ACEOF #define size_t unsigned int _ACEOF fi ac_fn_cxx_check_member "$LINENO" "struct stat" "st_rdev" "ac_cv_member_struct_stat_st_rdev" "$ac_includes_default" if test "x$ac_cv_member_struct_stat_st_rdev" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STRUCT_STAT_ST_RDEV 1 _ACEOF fi ac_fn_cxx_check_decl "$LINENO" "sys_siglist" "ac_cv_have_decl_sys_siglist" "#include /* NetBSD declares sys_siglist in unistd.h. */ #ifdef HAVE_UNISTD_H # include #endif " if test "x$ac_cv_have_decl_sys_siglist" = x""yes; then : ac_have_decl=1 else ac_have_decl=0 fi cat >>confdefs.h <<_ACEOF #define HAVE_DECL_SYS_SIGLIST $ac_have_decl _ACEOF ac_fn_cxx_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" if test "x$ac_cv_type_size_t" = x""yes; then : # The cast to long int works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. { $as_echo "$as_me:${as_lineno-$LINENO}: checking size of size_t" >&5 $as_echo_n "checking size of size_t... " >&6; } if test "${ac_cv_sizeof_size_t+set}" = set; then : $as_echo_n "(cached) " >&6 else if ac_fn_cxx_compute_int "$LINENO" "(long int) (sizeof (size_t))" "ac_cv_sizeof_size_t" "$ac_includes_default"; then : else if test "$ac_cv_type_size_t" = yes; then { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "cannot compute sizeof (size_t) See \`config.log' for more details" "$LINENO" 5 ; } else ac_cv_sizeof_size_t=0 fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_size_t" >&5 $as_echo "$ac_cv_sizeof_size_t" >&6; } cat >>confdefs.h <<_ACEOF #define SIZEOF_SIZE_T $ac_cv_sizeof_size_t _ACEOF else as_fn_error $? "Cannot find size_t type" "$LINENO" 5 fi ac_fn_cxx_check_type "$LINENO" "time_t" "ac_cv_type_time_t" "$ac_includes_default" if test "x$ac_cv_type_time_t" = x""yes; then : # The cast to long int works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. { $as_echo "$as_me:${as_lineno-$LINENO}: checking size of time_t" >&5 $as_echo_n "checking size of time_t... " >&6; } if test "${ac_cv_sizeof_time_t+set}" = set; then : $as_echo_n "(cached) " >&6 else if ac_fn_cxx_compute_int "$LINENO" "(long int) (sizeof (time_t))" "ac_cv_sizeof_time_t" "$ac_includes_default"; then : else if test "$ac_cv_type_time_t" = yes; then { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "cannot compute sizeof (time_t) See \`config.log' for more details" "$LINENO" 5 ; } else ac_cv_sizeof_time_t=0 fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_time_t" >&5 $as_echo "$ac_cv_sizeof_time_t" >&6; } cat >>confdefs.h <<_ACEOF #define SIZEOF_TIME_T $ac_cv_sizeof_time_t _ACEOF else as_fn_error $? "Cannot find time_t type" "$LINENO" 5 fi ac_fn_cxx_check_type "$LINENO" "off_t" "ac_cv_type_off_t" "$ac_includes_default" if test "x$ac_cv_type_off_t" = x""yes; then : # The cast to long int works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. { $as_echo "$as_me:${as_lineno-$LINENO}: checking size of off_t" >&5 $as_echo_n "checking size of off_t... " >&6; } if test "${ac_cv_sizeof_off_t+set}" = set; then : $as_echo_n "(cached) " >&6 else if ac_fn_cxx_compute_int "$LINENO" "(long int) (sizeof (off_t))" "ac_cv_sizeof_off_t" "$ac_includes_default"; then : else if test "$ac_cv_type_off_t" = yes; then { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "cannot compute sizeof (off_t) See \`config.log' for more details" "$LINENO" 5 ; } else ac_cv_sizeof_off_t=0 fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_off_t" >&5 $as_echo "$ac_cv_sizeof_off_t" >&6; } cat >>confdefs.h <<_ACEOF #define SIZEOF_OFF_T $ac_cv_sizeof_off_t _ACEOF else as_fn_error $? "Cannot find off_t type" "$LINENO" 5 fi # Checks for library functions. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working POSIX fnmatch" >&5 $as_echo_n "checking for working POSIX fnmatch... " >&6; } if test "${ac_cv_func_fnmatch_works+set}" = set; then : $as_echo_n "(cached) " >&6 else # Some versions of Solaris, SCO, and the GNU C Library # have a broken or incompatible fnmatch. # So we run a test program. If we are cross-compiling, take no chance. # Thanks to John Oleynick, Franc,ois Pinard, and Paul Eggert for this test. if test "$cross_compiling" = yes; then : ac_cv_func_fnmatch_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include # define y(a, b, c) (fnmatch (a, b, c) == 0) # define n(a, b, c) (fnmatch (a, b, c) == FNM_NOMATCH) int main () { return (!(y ("a*", "abc", 0) && n ("d*/*1", "d/s/1", FNM_PATHNAME) && y ("a\\\\bc", "abc", 0) && n ("a\\\\bc", "abc", FNM_NOESCAPE) && y ("*x", ".x", 0) && n ("*x", ".x", FNM_PERIOD) && 1)); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_fnmatch_works=yes else ac_cv_func_fnmatch_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_fnmatch_works" >&5 $as_echo "$ac_cv_func_fnmatch_works" >&6; } if test $ac_cv_func_fnmatch_works = yes; then : $as_echo "#define HAVE_FNMATCH 1" >>confdefs.h fi for ac_header in vfork.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "vfork.h" "ac_cv_header_vfork_h" "$ac_includes_default" if test "x$ac_cv_header_vfork_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_VFORK_H 1 _ACEOF fi done for ac_func in fork vfork do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_cxx_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done if test "x$ac_cv_func_fork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working fork" >&5 $as_echo_n "checking for working fork... " >&6; } if test "${ac_cv_func_fork_works+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_fork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { /* By Ruediger Kuhlmann. */ return fork () < 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_fork_works=yes else ac_cv_func_fork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_fork_works" >&5 $as_echo "$ac_cv_func_fork_works" >&6; } else ac_cv_func_fork_works=$ac_cv_func_fork fi if test "x$ac_cv_func_fork_works" = xcross; then case $host in *-*-amigaos* | *-*-msdosdjgpp*) # Override, as these systems have only a dummy fork() stub ac_cv_func_fork_works=no ;; *) ac_cv_func_fork_works=yes ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&2;} fi ac_cv_func_vfork_works=$ac_cv_func_vfork if test "x$ac_cv_func_vfork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working vfork" >&5 $as_echo_n "checking for working vfork... " >&6; } if test "${ac_cv_func_vfork_works+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_vfork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Thanks to Paul Eggert for this test. */ $ac_includes_default #include #ifdef HAVE_VFORK_H # include #endif /* On some sparc systems, changes by the child to local and incoming argument registers are propagated back to the parent. The compiler is told about this with #include , but some compilers (e.g. gcc -O) don't grok . Test for this by using a static variable whose address is put into a register that is clobbered by the vfork. */ static void #ifdef __cplusplus sparc_address_test (int arg) # else sparc_address_test (arg) int arg; #endif { static pid_t child; if (!child) { child = vfork (); if (child < 0) { perror ("vfork"); _exit(2); } if (!child) { arg = getpid(); write(-1, "", 0); _exit (arg); } } } int main () { pid_t parent = getpid (); pid_t child; sparc_address_test (0); child = vfork (); if (child == 0) { /* Here is another test for sparc vfork register problems. This test uses lots of local variables, at least as many local variables as main has allocated so far including compiler temporaries. 4 locals are enough for gcc 1.40.3 on a Solaris 4.1.3 sparc, but we use 8 to be safe. A buggy compiler should reuse the register of parent for one of the local variables, since it will think that parent can't possibly be used any more in this routine. Assigning to the local variable will thus munge parent in the parent process. */ pid_t p = getpid(), p1 = getpid(), p2 = getpid(), p3 = getpid(), p4 = getpid(), p5 = getpid(), p6 = getpid(), p7 = getpid(); /* Convince the compiler that p..p7 are live; otherwise, it might use the same hardware register for all 8 local variables. */ if (p != p1 || p != p2 || p != p3 || p != p4 || p != p5 || p != p6 || p != p7) _exit(1); /* On some systems (e.g. IRIX 3.3), vfork doesn't separate parent from child file descriptors. If the child closes a descriptor before it execs or exits, this munges the parent's descriptor as well. Test for this by closing stdout in the child. */ _exit(close(fileno(stdout)) != 0); } else { int status; struct stat st; while (wait(&status) != child) ; return ( /* Was there some problem with vforking? */ child < 0 /* Did the child fail? (This shouldn't happen.) */ || status /* Did the vfork/compiler bug occur? */ || parent != getpid() /* Did the file descriptor bug occur? */ || fstat(fileno(stdout), &st) != 0 ); } } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_vfork_works=yes else ac_cv_func_vfork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_vfork_works" >&5 $as_echo "$ac_cv_func_vfork_works" >&6; } fi; if test "x$ac_cv_func_fork_works" = xcross; then ac_cv_func_vfork_works=$ac_cv_func_vfork { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&2;} fi if test "x$ac_cv_func_vfork_works" = xyes; then $as_echo "#define HAVE_WORKING_VFORK 1" >>confdefs.h else $as_echo "#define vfork fork" >>confdefs.h fi if test "x$ac_cv_func_fork_works" = xyes; then $as_echo "#define HAVE_WORKING_FORK 1" >>confdefs.h fi if test $ac_cv_c_compiler_gnu = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC needs -traditional" >&5 $as_echo_n "checking whether $CC needs -traditional... " >&6; } if test "${ac_cv_prog_gcc_traditional+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_pattern="Autoconf.*'x'" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include Autoconf TIOCGETP _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "$ac_pattern" >/dev/null 2>&1; then : ac_cv_prog_gcc_traditional=yes else ac_cv_prog_gcc_traditional=no fi rm -f conftest* if test $ac_cv_prog_gcc_traditional = no; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include Autoconf TCGETA _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "$ac_pattern" >/dev/null 2>&1; then : ac_cv_prog_gcc_traditional=yes fi rm -f conftest* fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_gcc_traditional" >&5 $as_echo "$ac_cv_prog_gcc_traditional" >&6; } if test $ac_cv_prog_gcc_traditional = yes; then CC="$CC -traditional" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat correctly handles trailing slash" >&5 $as_echo_n "checking whether lstat correctly handles trailing slash... " >&6; } if test "${ac_cv_func_lstat_dereferences_slashed_symlink+set}" = set; then : $as_echo_n "(cached) " >&6 else rm -f conftest.sym conftest.file echo >conftest.file if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then if test "$cross_compiling" = yes; then : ac_cv_func_lstat_dereferences_slashed_symlink=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; /* Linux will dereference the symlink and fail, as required by POSIX. That is better in the sense that it means we will not have to compile and use the lstat wrapper. */ return lstat ("conftest.sym/", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_lstat_dereferences_slashed_symlink=yes else ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else # If the `ln -s' command failed, then we probably don't even # have an lstat function. ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f conftest.sym conftest.file fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_dereferences_slashed_symlink" >&5 $as_echo "$ac_cv_func_lstat_dereferences_slashed_symlink" >&6; } test $ac_cv_func_lstat_dereferences_slashed_symlink = yes && cat >>confdefs.h <<_ACEOF #define LSTAT_FOLLOWS_SLASHED_SYMLINK 1 _ACEOF if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat accepts an empty string" >&5 $as_echo_n "checking whether lstat accepts an empty string... " >&6; } if test "${ac_cv_func_lstat_empty_string_bug+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_lstat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return lstat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_lstat_empty_string_bug=no else ac_cv_func_lstat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_empty_string_bug" >&5 $as_echo "$ac_cv_func_lstat_empty_string_bug" >&6; } if test $ac_cv_func_lstat_empty_string_bug = yes; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_LSTAT_EMPTY_STRING_BUG 1 _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether sys/types.h defines makedev" >&5 $as_echo_n "checking whether sys/types.h defines makedev... " >&6; } if test "${ac_cv_header_sys_types_h_makedev+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { return makedev(0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_header_sys_types_h_makedev=yes else ac_cv_header_sys_types_h_makedev=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_sys_types_h_makedev" >&5 $as_echo "$ac_cv_header_sys_types_h_makedev" >&6; } if test $ac_cv_header_sys_types_h_makedev = no; then ac_fn_cxx_check_header_mongrel "$LINENO" "sys/mkdev.h" "ac_cv_header_sys_mkdev_h" "$ac_includes_default" if test "x$ac_cv_header_sys_mkdev_h" = x""yes; then : $as_echo "#define MAJOR_IN_MKDEV 1" >>confdefs.h fi if test $ac_cv_header_sys_mkdev_h = no; then ac_fn_cxx_check_header_mongrel "$LINENO" "sys/sysmacros.h" "ac_cv_header_sys_sysmacros_h" "$ac_includes_default" if test "x$ac_cv_header_sys_sysmacros_h" = x""yes; then : $as_echo "#define MAJOR_IN_SYSMACROS 1" >>confdefs.h fi fi fi for ac_header in stdlib.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "stdlib.h" "ac_cv_header_stdlib_h" "$ac_includes_default" if test "x$ac_cv_header_stdlib_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STDLIB_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU libc compatible malloc" >&5 $as_echo_n "checking for GNU libc compatible malloc... " >&6; } if test "${ac_cv_func_malloc_0_nonnull+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_malloc_0_nonnull=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if defined STDC_HEADERS || defined HAVE_STDLIB_H # include #else char *malloc (); #endif int main () { return ! malloc (0); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_malloc_0_nonnull=yes else ac_cv_func_malloc_0_nonnull=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_malloc_0_nonnull" >&5 $as_echo "$ac_cv_func_malloc_0_nonnull" >&6; } if test $ac_cv_func_malloc_0_nonnull = yes; then : $as_echo "#define HAVE_MALLOC 1" >>confdefs.h else $as_echo "#define HAVE_MALLOC 0" >>confdefs.h case " $LIBOBJS " in *" malloc.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS malloc.$ac_objext" ;; esac $as_echo "#define malloc rpl_malloc" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking return type of signal handlers" >&5 $as_echo_n "checking return type of signal handlers... " >&6; } if test "${ac_cv_type_signal+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { return *(signal (0, 0)) (0) == 1; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_type_signal=int else ac_cv_type_signal=void fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_signal" >&5 $as_echo "$ac_cv_type_signal" >&6; } cat >>confdefs.h <<_ACEOF #define RETSIGTYPE $ac_cv_type_signal _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stat accepts an empty string" >&5 $as_echo_n "checking whether stat accepts an empty string... " >&6; } if test "${ac_cv_func_stat_empty_string_bug+set}" = set; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_stat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return stat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_stat_empty_string_bug=no else ac_cv_func_stat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_stat_empty_string_bug" >&5 $as_echo "$ac_cv_func_stat_empty_string_bug" >&6; } if test $ac_cv_func_stat_empty_string_bug = yes; then case " $LIBOBJS " in *" stat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS stat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_STAT_EMPTY_STRING_BUG 1 _ACEOF fi for ac_header in $ac_header_list do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_cxx_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether utime accepts a null argument" >&5 $as_echo_n "checking whether utime accepts a null argument... " >&6; } if test "${ac_cv_func_utime_null+set}" = set; then : $as_echo_n "(cached) " >&6 else rm -f conftest.data; >conftest.data # Sequent interprets utime(file, 0) to mean use start of epoch. Wrong. if test "$cross_compiling" = yes; then : ac_cv_func_utime_null='guessing yes' else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #ifdef HAVE_UTIME_H # include #endif int main () { struct stat s, t; return ! (stat ("conftest.data", &s) == 0 && utime ("conftest.data", 0) == 0 && stat ("conftest.data", &t) == 0 && t.st_mtime >= s.st_mtime && t.st_mtime - s.st_mtime < 120); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_utime_null=yes else ac_cv_func_utime_null=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_utime_null" >&5 $as_echo "$ac_cv_func_utime_null" >&6; } if test "x$ac_cv_func_utime_null" != xno; then ac_cv_func_utime_null=yes $as_echo "#define HAVE_UTIME_NULL 1" >>confdefs.h fi rm -f conftest.data { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether time.h and sys/time.h may both be included" >&5 $as_echo_n "checking whether time.h and sys/time.h may both be included... " >&6; } if test "${ac_cv_header_time+set}" = set; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include int main () { if ((struct tm *) 0) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_header_time=yes else ac_cv_header_time=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_time" >&5 $as_echo "$ac_cv_header_time" >&6; } if test $ac_cv_header_time = yes; then $as_echo "#define TIME_WITH_SYS_TIME 1" >>confdefs.h fi for ac_func in lchown mkdir regcomp rmdir strerror utime fdopendir do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_cxx_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt() in " >&5 $as_echo_n "checking for getopt() in ... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_UNISTD_H #include #endif } int main () { getopt(0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_GETOPT_IN_UNISTD_H 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: present" >&5 $as_echo "present" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: absent" >&5 $as_echo "absent" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt_long() in " >&5 $as_echo_n "checking for getopt_long() in ... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_UNISTD_H #include #endif } int main () { getopt_long(0, 0, 0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_GETOPT_LONG_IN_UNISTD_H 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: present" >&5 $as_echo "present" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: absent" >&5 $as_echo "absent" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for optreset presence" >&5 $as_echo_n "checking for optreset presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif } int main () { int x = optreset; return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_OPTRESET 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Door file support" >&5 $as_echo_n "checking for Door file support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_STAT_H #include #endif #if HAVE_UNISTD_H #include #endif } int main () { struct stat buf; if(S_ISDOOR(buf.st_mode)) return 0; else return 1; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_DOOR 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext # DAR's features # Check whether --enable-libz-linking was given. if test "${enable_libz_linking+set}" = set; then : enableval=$enable_libz_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libz compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: libz compression support has been disabled by user" >&2;} local_libz="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for deflate in -lz" >&5 $as_echo_n "checking for deflate in -lz... " >&6; } if test "${ac_cv_lib_z_deflate+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lz $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char deflate (); int main () { return deflate (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_z_deflate=yes else ac_cv_lib_z_deflate=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_z_deflate" >&5 $as_echo "$ac_cv_lib_z_deflate" >&6; } if test "x$ac_cv_lib_z_deflate" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBZ 1 _ACEOF LIBS="-lz $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: library zlib not found" >&5 $as_echo "$as_me: WARNING: library zlib not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "zlib.h" "ac_cv_header_zlib_h" "$ac_includes_default" if test "x$ac_cv_header_zlib_h" = x""yes; then : local_libz="yes" $as_echo "#define HAVE_ZLIB_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find zlib.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find zlib.h header file" >&2;} local_libz="no" fi if test "$local_libz" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_ZLIB_H #include #endif } int main () { z_stream *ptr = (z_stream *)0; deflate(ptr, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBZ_AVAILABLE 1" >>confdefs.h else local_libz="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libz compression support not available" >&5 $as_echo "$as_me: WARNING: libz compression support not available" >&2;} fi fi # Check whether --enable-libbz2-linking was given. if test "${enable_libbz2_linking+set}" = set; then : enableval=$enable_libbz2_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libbz2 compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: libbz2 compression support has been disabled by user" >&2;} local_libbz2="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for BZ2_bzCompress in -lbz2" >&5 $as_echo_n "checking for BZ2_bzCompress in -lbz2... " >&6; } if test "${ac_cv_lib_bz2_BZ2_bzCompress+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lbz2 $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char BZ2_bzCompress (); int main () { return BZ2_bzCompress (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_bz2_BZ2_bzCompress=yes else ac_cv_lib_bz2_BZ2_bzCompress=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_bz2_BZ2_bzCompress" >&5 $as_echo "$ac_cv_lib_bz2_BZ2_bzCompress" >&6; } if test "x$ac_cv_lib_bz2_BZ2_bzCompress" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBBZ2 1 _ACEOF LIBS="-lbz2 $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: library libbz2 not found" >&5 $as_echo "$as_me: WARNING: library libbz2 not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "bzlib.h" "ac_cv_header_bzlib_h" "$ac_includes_default" if test "x$ac_cv_header_bzlib_h" = x""yes; then : local_libbz2="yes" $as_echo "#define HAVE_BZLIB_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find bzlib.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find bzlib.h header file" >&2;} local_libbz2="no" fi if test "$local_libbz2" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_BZLIB_H #include #endif } int main () { bz_stream *ptr = (bz_stream *)0; BZ2_bzCompress(ptr, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBBZ2_AVAILABLE 1" >>confdefs.h else local_libbz2="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libbz2 compression support not available" >&5 $as_echo "$as_me: WARNING: libbz2 compression support not available" >&2;} fi fi # Check whether --enable-liblzo2-linking was given. if test "${enable_liblzo2_linking+set}" = set; then : enableval=$enable_liblzo2_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: lzo compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: lzo compression support has been disabled by user" >&2;} local_liblzo2="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lzo1x_1_compress in -llzo2" >&5 $as_echo_n "checking for lzo1x_1_compress in -llzo2... " >&6; } if test "${ac_cv_lib_lzo2_lzo1x_1_compress+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-llzo2 $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char lzo1x_1_compress (); int main () { return lzo1x_1_compress (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_lzo2_lzo1x_1_compress=yes else ac_cv_lib_lzo2_lzo1x_1_compress=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lzo2_lzo1x_1_compress" >&5 $as_echo "$ac_cv_lib_lzo2_lzo1x_1_compress" >&6; } if test "x$ac_cv_lib_lzo2_lzo1x_1_compress" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBLZO2 1 _ACEOF LIBS="-llzo2 $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: library liblzo2 not found" >&5 $as_echo "$as_me: WARNING: library liblzo2 not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "lzo/lzo1x.h" "ac_cv_header_lzo_lzo1x_h" "$ac_includes_default" if test "x$ac_cv_header_lzo_lzo1x_h" = x""yes; then : local_liblzo2="yes" $as_echo "#define HAVE_LZO_LZO1X_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find lzo/lzo1x.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find lzo/lzo1x.h header file" >&2;} local_liblzo2="no" fi if test "$local_liblzo2" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_LZO_LZO1X_H #include #endif } int main () { int x = lzo1x_1_compress(0, 0, 0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBLZO2_AVAILABLE 1" >>confdefs.h else local_liblzo2="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: lzo compression support not available" >&5 $as_echo "$as_me: WARNING: lzo compression support not available" >&2;} fi fi # Check whether --enable-libgcrypt-linking was given. if test "${enable_libgcrypt_linking+set}" = set; then : enableval=$enable_libgcrypt_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: strong encryption support has been disabled by user" >&5 $as_echo "$as_me: WARNING: strong encryption support has been disabled by user" >&2;} local_crypto="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gpg_err_init in -lgpg-error" >&5 $as_echo_n "checking for gpg_err_init in -lgpg-error... " >&6; } if test "${ac_cv_lib_gpg_error_gpg_err_init+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lgpg-error $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char gpg_err_init (); int main () { return gpg_err_init (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_gpg_error_gpg_err_init=yes else ac_cv_lib_gpg_error_gpg_err_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gpg_error_gpg_err_init" >&5 $as_echo "$ac_cv_lib_gpg_error_gpg_err_init" >&6; } if test "x$ac_cv_lib_gpg_error_gpg_err_init" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBGPG_ERROR 1 _ACEOF LIBS="-lgpg-error $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gcry_check_version in -lgcrypt" >&5 $as_echo_n "checking for gcry_check_version in -lgcrypt... " >&6; } if test "${ac_cv_lib_gcrypt_gcry_check_version+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lgcrypt $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char gcry_check_version (); int main () { return gcry_check_version (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_gcrypt_gcry_check_version=yes else ac_cv_lib_gcrypt_gcry_check_version=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gcrypt_gcry_check_version" >&5 $as_echo "$ac_cv_lib_gcrypt_gcry_check_version" >&6; } if test "x$ac_cv_lib_gcrypt_gcry_check_version" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBGCRYPT 1 _ACEOF LIBS="-lgcrypt $LIBS" fi ac_fn_cxx_check_header_mongrel "$LINENO" "gcrypt.h" "ac_cv_header_gcrypt_h" "$ac_includes_default" if test "x$ac_cv_header_gcrypt_h" = x""yes; then : local_crypto="yes" $as_echo "#define HAVE_GCRYPT_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannt find gcrypt.h header file" >&5 $as_echo "$as_me: WARNING: Cannt find gcrypt.h header file" >&2;} local_crypto="no" fi if test "$local_crypto" = "yes" ; then $as_echo "#define MIN_VERSION_GCRYPT \"1.4.0\"" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libgcrypt usability" >&5 $as_echo_n "checking for libgcrypt usability... " >&6; } if test "$cross_compiling" = yes; then : { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run test program while cross compiling See \`config.log' for more details" "$LINENO" 5 ; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_GCRYPT_H #include #endif } #include using namespace std; int main () { if(!gcry_check_version(MIN_VERSION_GCRYPT)) { cout << "ligcrypt version too low, minimum version is " << MIN_VERSION_GCRYPT << endl; exit(1); } else exit(0); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : $as_echo "#define CRYPTO_AVAILABLE 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } else if test "$?" = "1" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed: need libgcypt >= MIN_VERSION_GCRYPT, disabling strong encryption support" >&5 $as_echo "failed: need libgcypt >= MIN_VERSION_GCRYPT, disabling strong encryption support" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed: libgcrypt is unusable, cannot even call gcry_check_version(). Disabling strong encryption support" >&5 $as_echo "failed: libgcrypt is unusable, cannot even call gcry_check_version(). Disabling strong encryption support" >&6; } fi local_crypto="yes" fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: strong encryption support not available" >&5 $as_echo "$as_me: WARNING: strong encryption support not available" >&2;} fi fi # Check whether --enable-ea-support was given. if test "${enable_ea_support+set}" = set; then : enableval=$enable_ea_support; { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Extended Attribute support" >&5 $as_echo_n "checking for Extended Attribute support... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: disabled" >&5 $as_echo "disabled" >&6; } else for ac_header in attr/xattr.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "attr/xattr.h" "ac_cv_header_attr_xattr_h" "$ac_includes_default" if test "x$ac_cv_header_attr_xattr_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_ATTR_XATTR_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lgetxattr in -lattr" >&5 $as_echo_n "checking for lgetxattr in -lattr... " >&6; } if test "${ac_cv_lib_attr_lgetxattr+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lattr $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char lgetxattr (); int main () { return lgetxattr (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_attr_lgetxattr=yes else ac_cv_lib_attr_lgetxattr=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_attr_lgetxattr" >&5 $as_echo "$ac_cv_lib_attr_lgetxattr" >&6; } if test "x$ac_cv_lib_attr_lgetxattr" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBATTR 1 _ACEOF LIBS="-lattr $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Unix Extended Attribute support" >&5 $as_echo_n "checking for Unix Extended Attribute support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TYPES_H #include #endif #if HAVE_ATTR_XATTR_H #include #endif } int main () { lgetxattr((char *)0, (char *)0, (void *)0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define EA_SUPPORT /**/" >>confdefs.h local_ea_support="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } for ac_header in sys/xattr.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "sys/xattr.h" "ac_cv_header_sys_xattr_h" "$ac_includes_default" if test "x$ac_cv_header_sys_xattr_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SYS_XATTR_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgetxattr in -lc" >&5 $as_echo_n "checking for fgetxattr in -lc... " >&6; } if test "${ac_cv_lib_c_fgetxattr+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lc $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char fgetxattr (); int main () { return fgetxattr (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_c_fgetxattr=yes else ac_cv_lib_c_fgetxattr=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_c_fgetxattr" >&5 $as_echo "$ac_cv_lib_c_fgetxattr" >&6; } if test "x$ac_cv_lib_c_fgetxattr" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBC 1 _ACEOF LIBS="-lc $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Mac OS X Extended Attribute support" >&5 $as_echo_n "checking for Mac OS X Extended Attribute support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_XATTR_H #include #endif } int main () { getxattr((char *)0, (char *)0, (void *)0, 0, 0, XATTR_NOFOLLOW); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define EA_SUPPORT /**/" >>confdefs.h $as_echo "#define OSX_EA_SUPPORT /**/" >>confdefs.h local_ea_support="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking ext2fs.h availability" >&5 $as_echo_n "checking ext2fs.h availability... " >&6; } # Check whether --enable-nodump-flag was given. if test "${enable_nodump_flag+set}" = set; then : enableval=$enable_nodump_flag; { $as_echo "$as_me:${as_lineno-$LINENO}: result: nodump flag disabled" >&5 $as_echo "nodump flag disabled" >&6; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #include #if HAVE_SYS_IOCTL_H #include #endif } int main () { int fd, f; ioctl(fd, EXT2_IOC_GETFLAGS, &f); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBDAR_NODUMP_FEATURE NODUMP_EXT2FS" >>confdefs.h local_nodump_feature="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: found " >&5 $as_echo "found " >&6; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #include #if HAVE_SYS_IOCTL_H #include #endif } int main () { int fd, f; ioctl(fd, EXT2_IOC_GETFLAGS, &f); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBDAR_NODUMP_FEATURE NODUMP_LINUX" >>confdefs.h local_nodump_feature="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: found " >&5 $as_echo "found " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: NOT FOUND" >&5 $as_echo "NOT FOUND" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cannot find ext2_fs.h header file, nodump-flag feature will not be activated" >&5 $as_echo "$as_me: WARNING: cannot find ext2_fs.h header file, nodump-flag feature will not be activated" >&2;} fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi # Check whether --enable-gnugetopt was given. if test "${enable_gnugetopt+set}" = set; then : enableval=$enable_gnugetopt; else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt_long in -lgnugetopt" >&5 $as_echo_n "checking for getopt_long in -lgnugetopt... " >&6; } if test "${ac_cv_lib_gnugetopt_getopt_long+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lgnugetopt $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char getopt_long (); int main () { return getopt_long (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_gnugetopt_getopt_long=yes else ac_cv_lib_gnugetopt_getopt_long=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gnugetopt_getopt_long" >&5 $as_echo "$ac_cv_lib_gnugetopt_getopt_long" >&6; } if test "x$ac_cv_lib_gnugetopt_getopt_long" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBGNUGETOPT 1 _ACEOF LIBS="-lgnugetopt $LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt() availability" >&5 $as_echo_n "checking for getopt() availability... " >&6; }; cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif } int main () { getopt(0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } else as_fn_error $? "absent but required" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt_long() availability" >&5 $as_echo_n "checking for getopt_long() availability... " >&6; }; cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif } int main () { getopt_long(0, 0, 0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : local_have_getopt_long="yes" $as_echo "#define HAVE_GETOPT_LONG 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: NOT AVAILABLE" >&5 $as_echo "NOT AVAILABLE" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext # Check whether --enable-examples was given. if test "${enable_examples+set}" = set; then : enableval=$enable_examples; examples="yes" else examples="false" fi # Check whether --enable-os-bits was given. if test "${enable_os_bits+set}" = set; then : enableval=$enable_os_bits; cat >>confdefs.h <<_ACEOF #define OS_BITS $enableval _ACEOF else ac_fn_cxx_check_header_mongrel "$LINENO" "inttypes.h" "ac_cv_header_inttypes_h" "$ac_includes_default" if test "x$ac_cv_header_inttypes_h" = x""yes; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #include } int main () { uint16_t a = 0; uint32_t b = 0; uint64_t c = 0; int16_t d = 0; int32_t e = 0; int64_t f = 0; return a+b+c+d+e+f; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else as_fn_error $? "Cannot find *int*_t type declarations in headerfile, --enable-os-bits=... option must be used" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext else as_fn_error $? "Cannot find inttypes.h headerfile, --enable-os-bits=... option must be used" "$LINENO" 5 fi fi # Check whether --enable-mode was given. if test "${enable_mode+set}" = set; then : enableval=$enable_mode; build_mode=$enableval fi # Check whether --enable-furtive-read was given. if test "${enable_furtive_read+set}" = set; then : enableval=$enable_furtive_read; local_furtive_read_mode="no" { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Furtive read mode disabled" >&5 $as_echo "$as_me: WARNING: Furtive read mode disabled" >&2;} else { $as_echo "$as_me:${as_lineno-$LINENO}: checking furtive read mode availability" >&5 $as_echo_n "checking furtive read mode availability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TYPE_H #include #endif #if HAVE_SYS_STAT_H #include #endif #if HAVE_FCNTL_H #include #endif #if HAVE_DIRENT_H #include #endif } int main () { int x = O_NOATIME; int fd = open("/",O_RDONLY|O_NOATIME); #if HAVE_FDOPENDIR (void)fdopendir(fd); #else syntaxically incorrect statement here to force compilation to fail! #endif ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define FURTIVE_READ_MODE_AVAILABLE 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } local_furtive_read_mode="yes" else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no available" >&5 $as_echo "no available" >&6; } local_furtive_read_mode="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi # Check whether --enable-debug was given. if test "${enable_debug+set}" = set; then : enableval=$enable_debug; CXXFLAGS="-ggdb -Wall" CFLAGS="-ggdb -Wall" debug_static="yes" $as_echo "#define LIBDAR_NO_OPTIMIZATION 1" >>confdefs.h else debug_static="no" fi # Check whether --enable-pedantic was given. if test "${enable_pedantic+set}" = set; then : enableval=$enable_pedantic; CXXFLAGS="$CXXFLAGS -pedantic -Wno-long-long" fi # Check whether --enable-build-usage was given. if test "${enable_build_usage+set}" = set; then : enableval=$enable_build_usage; export CPPFLAGS="$CPPFLAGS -I/usr/include/libxml2" ac_fn_cxx_check_header_mongrel "$LINENO" "libxml/tree.h" "ac_cv_header_libxml_tree_h" "$ac_includes_default" if test "x$ac_cv_header_libxml_tree_h" = x""yes; then : $as_echo "#define HAVE_LIBXML_TREE_H 1" >>confdefs.h else as_fn_error $? "Cannot find libxml/tree.h header file" "$LINENO" 5 fi ac_fn_cxx_check_header_mongrel "$LINENO" "libxml/parser.h" "ac_cv_header_libxml_parser_h" "$ac_includes_default" if test "x$ac_cv_header_libxml_parser_h" = x""yes; then : $as_echo "#define HAVE_LIBXML_PARSER_H 1" >>confdefs.h else as_fn_error $? "Cannot find libxml/parser.h header file" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for reference in -lxml2" >&5 $as_echo_n "checking for reference in -lxml2... " >&6; } if test "${ac_cv_lib_xml2_reference+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lxml2 $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char reference (); int main () { return reference (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_xml2_reference=yes else ac_cv_lib_xml2_reference=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_xml2_reference" >&5 $as_echo "$ac_cv_lib_xml2_reference" >&6; } if test "x$ac_cv_lib_xml2_reference" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBXML2 1 _ACEOF LIBS="-lxml2 $LIBS" else as_fn_error $? "library xml2 not found" "$LINENO" 5 fi build_usage="yes" else build_usage="no" fi # Check whether --enable-build-html was given. if test "${enable_build_html+set}" = set; then : enableval=$enable_build_html; doxygen="no" groff="no" else # Extract the first word of "doxygen", so it can be a program name with args. set dummy doxygen; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_doxygen+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$doxygen"; then ac_cv_prog_doxygen="$doxygen" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_doxygen="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_doxygen" && ac_cv_prog_doxygen="no" fi fi doxygen=$ac_cv_prog_doxygen if test -n "$doxygen"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $doxygen" >&5 $as_echo "$doxygen" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for doxygen version" >&5 $as_echo_n "checking for doxygen version... " >&6; } if test "$doxygen" = "yes" ; then n1=`doxygen --version | cut -d '.' -f 1` n2=`doxygen --version | cut -d '.' -f 2` if test $n1 -gt 1 -o $n2 -ge 3 ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: >= 1.3" >&5 $as_echo " >= 1.3" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: too old (< 1.3) ignoring doxygen" >&5 $as_echo " too old (< 1.3) ignoring doxygen" >&6; } doxygen="no" fi fi # Extract the first word of "man", so it can be a program name with args. set dummy man; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_tmp+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$tmp"; then ac_cv_prog_tmp="$tmp" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_tmp="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_tmp" && ac_cv_prog_tmp="no" fi fi tmp=$ac_cv_prog_tmp if test -n "$tmp"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $tmp" >&5 $as_echo "$tmp" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "$tmp" = "yes" ; then # Extract the first word of "groff", so it can be a program name with args. set dummy groff; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_groff+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$groff"; then ac_cv_prog_groff="$groff" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_groff="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_groff" && ac_cv_prog_groff="no" fi fi groff=$ac_cv_prog_groff if test -n "$groff"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $groff" >&5 $as_echo "$groff" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else groff = "no"; fi fi # Check whether --enable-special-alloc was given. if test "${enable_special_alloc+set}" = set; then : enableval=$enable_special_alloc; else $as_echo "#define LIBDAR_SPECIAL_ALLOC /**/" >>confdefs.h local_special_alloc="yes" fi # Check whether --enable-upx was given. if test "${enable_upx+set}" = set; then : enableval=$enable_upx; { $as_echo "$as_me:${as_lineno-$LINENO}: ignoring UPX" >&5 $as_echo "$as_me: ignoring UPX" >&6;} upx="no" else # Extract the first word of "upx", so it can be a program name with args. set dummy upx; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if test "${ac_cv_prog_upx+set}" = set; then : $as_echo_n "(cached) " >&6 else if test -n "$upx"; then ac_cv_prog_upx="$upx" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if { test -f "$as_dir/$ac_word$ac_exec_ext" && $as_test_x "$as_dir/$ac_word$ac_exec_ext"; }; then ac_cv_prog_upx="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_upx" && ac_cv_prog_upx="no" fi fi upx=$ac_cv_prog_upx if test -n "$upx"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $upx" >&5 $as_echo "$upx" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi # Check whether --enable-fast-dir was given. if test "${enable_fast_dir+set}" = set; then : enableval=$enable_fast_dir; else $as_echo "#define LIBDAR_FAST_DIR 1" >>confdefs.h local_fast_dir="yes" fi # Check whether --enable-thread-safe was given. if test "${enable_thread_safe+set}" = set; then : enableval=$enable_thread_safe; { $as_echo "$as_me:${as_lineno-$LINENO}: thread-safe support disabled" >&5 $as_echo "$as_me: thread-safe support disabled" >&6;} else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for pthread_mutex_init in -lpthread" >&5 $as_echo_n "checking for pthread_mutex_init in -lpthread... " >&6; } if test "${ac_cv_lib_pthread_pthread_mutex_init+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpthread $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char pthread_mutex_init (); int main () { return pthread_mutex_init (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_pthread_pthread_mutex_init=yes else ac_cv_lib_pthread_pthread_mutex_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_pthread_pthread_mutex_init" >&5 $as_echo "$ac_cv_lib_pthread_pthread_mutex_init" >&6; } if test "x$ac_cv_lib_pthread_pthread_mutex_init" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBPTHREAD 1 _ACEOF LIBS="-lpthread $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for POSIX mutex" >&5 $as_echo_n "checking for POSIX mutex... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_PTHREAD_H #include #endif } int main () { pthread_mutex_t mutex; pthread_mutex_init(&mutex, (const pthread_mutexattr_t*)0); pthread_mutex_lock(&mutex); pthread_mutex_unlock(&mutex); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define MUTEX_WORKS 1" >>confdefs.h local_mutex_works="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi # Check whether --enable-execinfo was given. if test "${enable_execinfo+set}" = set; then : enableval=$enable_execinfo; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: ignoring execinfo even if available" >&5 $as_echo "$as_me: WARNING: ignoring execinfo even if available" >&2;} else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for backtrace in -lexecinfo" >&5 $as_echo_n "checking for backtrace in -lexecinfo... " >&6; } if test "${ac_cv_lib_execinfo_backtrace+set}" = set; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lexecinfo $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char backtrace (); int main () { return backtrace (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_execinfo_backtrace=yes else ac_cv_lib_execinfo_backtrace=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_execinfo_backtrace" >&5 $as_echo "$ac_cv_lib_execinfo_backtrace" >&6; } if test "x$ac_cv_lib_execinfo_backtrace" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBEXECINFO 1 _ACEOF LIBS="-lexecinfo $LIBS" fi , for ac_header in execinfo.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "execinfo.h" "ac_cv_header_execinfo_h" "$ac_includes_default" if test "x$ac_cv_header_execinfo_h" = x""yes; then : cat >>confdefs.h <<_ACEOF #define HAVE_EXECINFO_H 1 _ACEOF fi done fi # Check whether --enable-profiling was given. if test "${enable_profiling+set}" = set; then : enableval=$enable_profiling; profiling="yes" fi # Check whether --enable-debug-memory was given. if test "${enable_debug_memory+set}" = set; then : enableval=$enable_debug_memory; $as_echo "#define LIBDAR_DEBUG_MEMORY 1" >>confdefs.h fi # Check whether --enable-dar-static was given. if test "${enable_dar_static+set}" = set; then : enableval=$enable_dar_static; build_static="no" else build_static="yes" fi MEM_LDFLAGS=$LDFLAGS LDFLAGS="-static" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #include } int main () { printf("Hello World!"); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : static_pb="no" else static_pb="yes" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$MEM_LDFLAGS if test $examples = "yes"; then MAKE_ALL_DIR_TRUE= MAKE_ALL_DIR_FALSE='#' else MAKE_ALL_DIR_TRUE='#' MAKE_ALL_DIR_FALSE= fi if test $build_usage = "yes"; then MAKE_USAGE_TRUE= MAKE_USAGE_FALSE='#' else MAKE_USAGE_TRUE='#' MAKE_USAGE_FALSE= fi if test $build_static = "yes" -a $static_pb = "no"; then BUILD_DAR_STATIC_TRUE= BUILD_DAR_STATIC_FALSE='#' else BUILD_DAR_STATIC_TRUE='#' BUILD_DAR_STATIC_FALSE= fi if test $debug_static = "yes" -a $static_pb = "no"; then DEBUG_STATIC_TRUE= DEBUG_STATIC_FALSE='#' else DEBUG_STATIC_TRUE='#' DEBUG_STATIC_FALSE= fi if test "$build_mode" = "32"; then BUILD_MODE32_TRUE= BUILD_MODE32_FALSE='#' else BUILD_MODE32_TRUE='#' BUILD_MODE32_FALSE= fi if test "$build_mode" = "64"; then BUILD_MODE64_TRUE= BUILD_MODE64_FALSE='#' else BUILD_MODE64_TRUE='#' BUILD_MODE64_FALSE= fi if test "$upx" = "yes"; then USE_UPX_TRUE= USE_UPX_FALSE='#' else USE_UPX_TRUE='#' USE_UPX_FALSE= fi if test "$doxygen" = "yes"; then USE_DOXYGEN_TRUE= USE_DOXYGEN_FALSE='#' else USE_DOXYGEN_TRUE='#' USE_DOXYGEN_FALSE= fi if test "$groff" = "yes"; then USE_GROFF_TRUE= USE_GROFF_FALSE='#' else USE_GROFF_TRUE='#' USE_GROFF_FALSE= fi if test "$profiling" = "yes"; then PROFILING_TRUE= PROFILING_FALSE='#' else PROFILING_TRUE='#' PROFILING_FALSE= fi UPX_PROG=upx DOXYGEN_PROG=doxygen # defaults # hack from litool mailing-list to know from source point of view whether we are compiling for dynamic or static way ac_config_commands="$ac_config_commands hack-libtool" ac_config_files="$ac_config_files Makefile man/Makefile src/Makefile src/libdar/Makefile src/dar_suite/Makefile src/testing/Makefile src/examples/Makefile doc/Makefile doc/samples/Makefile misc/Makefile doc/mini-howto/Makefile src/libdar/libdar.pc.tmpl doc/man/Makefile src/check/Makefile po/Makefile.in" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then test "x$cache_file" != "x/dev/null" && { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 $as_echo "$as_me: updating cache $cache_file" >&6;} cat confcache >$cache_file else { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs if test -n "$EXEEXT"; then am__EXEEXT_TRUE= am__EXEEXT_FALSE='#' else am__EXEEXT_TRUE='#' am__EXEEXT_FALSE= fi if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then as_fn_error $? "conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MAKE_ALL_DIR_TRUE}" && test -z "${MAKE_ALL_DIR_FALSE}"; then as_fn_error $? "conditional \"MAKE_ALL_DIR\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MAKE_USAGE_TRUE}" && test -z "${MAKE_USAGE_FALSE}"; then as_fn_error $? "conditional \"MAKE_USAGE\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${BUILD_DAR_STATIC_TRUE}" && test -z "${BUILD_DAR_STATIC_FALSE}"; then as_fn_error $? "conditional \"BUILD_DAR_STATIC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${DEBUG_STATIC_TRUE}" && test -z "${DEBUG_STATIC_FALSE}"; then as_fn_error $? "conditional \"DEBUG_STATIC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${BUILD_MODE32_TRUE}" && test -z "${BUILD_MODE32_FALSE}"; then as_fn_error $? "conditional \"BUILD_MODE32\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${BUILD_MODE64_TRUE}" && test -z "${BUILD_MODE64_FALSE}"; then as_fn_error $? "conditional \"BUILD_MODE64\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_UPX_TRUE}" && test -z "${USE_UPX_FALSE}"; then as_fn_error $? "conditional \"USE_UPX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_DOXYGEN_TRUE}" && test -z "${USE_DOXYGEN_FALSE}"; then as_fn_error $? "conditional \"USE_DOXYGEN\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_GROFF_TRUE}" && test -z "${USE_GROFF_FALSE}"; then as_fn_error $? "conditional \"USE_GROFF\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PROFILING_TRUE}" && test -z "${PROFILING_FALSE}"; then as_fn_error $? "conditional \"PROFILING\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi : ${CONFIG_STATUS=./config.status} ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 $as_echo "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -p'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -p' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -p' fi else as_ln_s='cp -p' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi if test -x / >/dev/null 2>&1; then as_test_x='test -x' else if ls -dL / >/dev/null 2>&1; then as_ls_L_option=L else as_ls_L_option= fi as_test_x=' eval sh -c '\'' if test -d "$1"; then test -d "$1/."; else case $1 in #( -*)set "./$1";; esac; case `ls -ld'$as_ls_L_option' "$1" 2>/dev/null` in #(( ???[sx]*):;;*)false;;esac;fi '\'' sh ' fi as_executable_p=$as_test_x # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by DAR $as_me 2.4.8, which was generated by GNU Autoconf 2.67. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac case $ac_config_headers in *" "*) set x $ac_config_headers; shift; ac_config_headers=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" config_headers="$ac_config_headers" config_commands="$ac_config_commands" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ DAR config.status 2.4.8 configured by $0, generated by GNU Autoconf 2.67, with options \\"\$ac_cs_config\\" Copyright (C) 2010 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' INSTALL='$INSTALL' MKDIR_P='$MKDIR_P' AWK='$AWK' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; --*=) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg= ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) $as_echo "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; '') as_fn_error $? "missing file argument" ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append CONFIG_HEADERS " '$ac_optarg'" ac_need_defaults=false;; --he | --h) # Conflict between --help and --header as_fn_error $? "ambiguous option: \`$1' Try \`$0 --help' for more information.";; --help | --hel | -h ) $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error $? "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X '$SHELL' '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX $as_echo "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # # INIT-COMMANDS # # Capture the value of obsolete ALL_LINGUAS because we need it to compute # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS. But hide it # from automake < 1.5. eval 'OBSOLETE_ALL_LINGUAS''="$ALL_LINGUAS"' # Capture the value of LINGUAS because we need it to compute CATALOGS. LINGUAS="${LINGUAS-%UNSET%}" AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' macro_version='`$ECHO "X$macro_version" | $Xsed -e "$delay_single_quote_subst"`' macro_revision='`$ECHO "X$macro_revision" | $Xsed -e "$delay_single_quote_subst"`' enable_shared='`$ECHO "X$enable_shared" | $Xsed -e "$delay_single_quote_subst"`' enable_static='`$ECHO "X$enable_static" | $Xsed -e "$delay_single_quote_subst"`' pic_mode='`$ECHO "X$pic_mode" | $Xsed -e "$delay_single_quote_subst"`' enable_fast_install='`$ECHO "X$enable_fast_install" | $Xsed -e "$delay_single_quote_subst"`' host_alias='`$ECHO "X$host_alias" | $Xsed -e "$delay_single_quote_subst"`' host='`$ECHO "X$host" | $Xsed -e "$delay_single_quote_subst"`' host_os='`$ECHO "X$host_os" | $Xsed -e "$delay_single_quote_subst"`' build_alias='`$ECHO "X$build_alias" | $Xsed -e "$delay_single_quote_subst"`' build='`$ECHO "X$build" | $Xsed -e "$delay_single_quote_subst"`' build_os='`$ECHO "X$build_os" | $Xsed -e "$delay_single_quote_subst"`' SED='`$ECHO "X$SED" | $Xsed -e "$delay_single_quote_subst"`' Xsed='`$ECHO "X$Xsed" | $Xsed -e "$delay_single_quote_subst"`' GREP='`$ECHO "X$GREP" | $Xsed -e "$delay_single_quote_subst"`' EGREP='`$ECHO "X$EGREP" | $Xsed -e "$delay_single_quote_subst"`' FGREP='`$ECHO "X$FGREP" | $Xsed -e "$delay_single_quote_subst"`' LD='`$ECHO "X$LD" | $Xsed -e "$delay_single_quote_subst"`' NM='`$ECHO "X$NM" | $Xsed -e "$delay_single_quote_subst"`' LN_S='`$ECHO "X$LN_S" | $Xsed -e "$delay_single_quote_subst"`' max_cmd_len='`$ECHO "X$max_cmd_len" | $Xsed -e "$delay_single_quote_subst"`' ac_objext='`$ECHO "X$ac_objext" | $Xsed -e "$delay_single_quote_subst"`' exeext='`$ECHO "X$exeext" | $Xsed -e "$delay_single_quote_subst"`' lt_unset='`$ECHO "X$lt_unset" | $Xsed -e "$delay_single_quote_subst"`' lt_SP2NL='`$ECHO "X$lt_SP2NL" | $Xsed -e "$delay_single_quote_subst"`' lt_NL2SP='`$ECHO "X$lt_NL2SP" | $Xsed -e "$delay_single_quote_subst"`' reload_flag='`$ECHO "X$reload_flag" | $Xsed -e "$delay_single_quote_subst"`' reload_cmds='`$ECHO "X$reload_cmds" | $Xsed -e "$delay_single_quote_subst"`' OBJDUMP='`$ECHO "X$OBJDUMP" | $Xsed -e "$delay_single_quote_subst"`' deplibs_check_method='`$ECHO "X$deplibs_check_method" | $Xsed -e "$delay_single_quote_subst"`' file_magic_cmd='`$ECHO "X$file_magic_cmd" | $Xsed -e "$delay_single_quote_subst"`' AR='`$ECHO "X$AR" | $Xsed -e "$delay_single_quote_subst"`' AR_FLAGS='`$ECHO "X$AR_FLAGS" | $Xsed -e "$delay_single_quote_subst"`' STRIP='`$ECHO "X$STRIP" | $Xsed -e "$delay_single_quote_subst"`' RANLIB='`$ECHO "X$RANLIB" | $Xsed -e "$delay_single_quote_subst"`' old_postinstall_cmds='`$ECHO "X$old_postinstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' old_postuninstall_cmds='`$ECHO "X$old_postuninstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' old_archive_cmds='`$ECHO "X$old_archive_cmds" | $Xsed -e "$delay_single_quote_subst"`' CC='`$ECHO "X$CC" | $Xsed -e "$delay_single_quote_subst"`' CFLAGS='`$ECHO "X$CFLAGS" | $Xsed -e "$delay_single_quote_subst"`' compiler='`$ECHO "X$compiler" | $Xsed -e "$delay_single_quote_subst"`' GCC='`$ECHO "X$GCC" | $Xsed -e "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_pipe='`$ECHO "X$lt_cv_sys_global_symbol_pipe" | $Xsed -e "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_cdecl='`$ECHO "X$lt_cv_sys_global_symbol_to_cdecl" | $Xsed -e "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "X$lt_cv_sys_global_symbol_to_c_name_address" | $Xsed -e "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "X$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $Xsed -e "$delay_single_quote_subst"`' objdir='`$ECHO "X$objdir" | $Xsed -e "$delay_single_quote_subst"`' SHELL='`$ECHO "X$SHELL" | $Xsed -e "$delay_single_quote_subst"`' ECHO='`$ECHO "X$ECHO" | $Xsed -e "$delay_single_quote_subst"`' MAGIC_CMD='`$ECHO "X$MAGIC_CMD" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag='`$ECHO "X$lt_prog_compiler_no_builtin_flag" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_wl='`$ECHO "X$lt_prog_compiler_wl" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_pic='`$ECHO "X$lt_prog_compiler_pic" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_static='`$ECHO "X$lt_prog_compiler_static" | $Xsed -e "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o='`$ECHO "X$lt_cv_prog_compiler_c_o" | $Xsed -e "$delay_single_quote_subst"`' need_locks='`$ECHO "X$need_locks" | $Xsed -e "$delay_single_quote_subst"`' DSYMUTIL='`$ECHO "X$DSYMUTIL" | $Xsed -e "$delay_single_quote_subst"`' NMEDIT='`$ECHO "X$NMEDIT" | $Xsed -e "$delay_single_quote_subst"`' LIPO='`$ECHO "X$LIPO" | $Xsed -e "$delay_single_quote_subst"`' OTOOL='`$ECHO "X$OTOOL" | $Xsed -e "$delay_single_quote_subst"`' OTOOL64='`$ECHO "X$OTOOL64" | $Xsed -e "$delay_single_quote_subst"`' libext='`$ECHO "X$libext" | $Xsed -e "$delay_single_quote_subst"`' shrext_cmds='`$ECHO "X$shrext_cmds" | $Xsed -e "$delay_single_quote_subst"`' extract_expsyms_cmds='`$ECHO "X$extract_expsyms_cmds" | $Xsed -e "$delay_single_quote_subst"`' archive_cmds_need_lc='`$ECHO "X$archive_cmds_need_lc" | $Xsed -e "$delay_single_quote_subst"`' enable_shared_with_static_runtimes='`$ECHO "X$enable_shared_with_static_runtimes" | $Xsed -e "$delay_single_quote_subst"`' export_dynamic_flag_spec='`$ECHO "X$export_dynamic_flag_spec" | $Xsed -e "$delay_single_quote_subst"`' whole_archive_flag_spec='`$ECHO "X$whole_archive_flag_spec" | $Xsed -e "$delay_single_quote_subst"`' compiler_needs_object='`$ECHO "X$compiler_needs_object" | $Xsed -e "$delay_single_quote_subst"`' old_archive_from_new_cmds='`$ECHO "X$old_archive_from_new_cmds" | $Xsed -e "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds='`$ECHO "X$old_archive_from_expsyms_cmds" | $Xsed -e "$delay_single_quote_subst"`' archive_cmds='`$ECHO "X$archive_cmds" | $Xsed -e "$delay_single_quote_subst"`' archive_expsym_cmds='`$ECHO "X$archive_expsym_cmds" | $Xsed -e "$delay_single_quote_subst"`' module_cmds='`$ECHO "X$module_cmds" | $Xsed -e "$delay_single_quote_subst"`' module_expsym_cmds='`$ECHO "X$module_expsym_cmds" | $Xsed -e "$delay_single_quote_subst"`' with_gnu_ld='`$ECHO "X$with_gnu_ld" | $Xsed -e "$delay_single_quote_subst"`' allow_undefined_flag='`$ECHO "X$allow_undefined_flag" | $Xsed -e "$delay_single_quote_subst"`' no_undefined_flag='`$ECHO "X$no_undefined_flag" | $Xsed -e "$delay_single_quote_subst"`' hardcode_libdir_flag_spec='`$ECHO "X$hardcode_libdir_flag_spec" | $Xsed -e "$delay_single_quote_subst"`' hardcode_libdir_flag_spec_ld='`$ECHO "X$hardcode_libdir_flag_spec_ld" | $Xsed -e "$delay_single_quote_subst"`' hardcode_libdir_separator='`$ECHO "X$hardcode_libdir_separator" | $Xsed -e "$delay_single_quote_subst"`' hardcode_direct='`$ECHO "X$hardcode_direct" | $Xsed -e "$delay_single_quote_subst"`' hardcode_direct_absolute='`$ECHO "X$hardcode_direct_absolute" | $Xsed -e "$delay_single_quote_subst"`' hardcode_minus_L='`$ECHO "X$hardcode_minus_L" | $Xsed -e "$delay_single_quote_subst"`' hardcode_shlibpath_var='`$ECHO "X$hardcode_shlibpath_var" | $Xsed -e "$delay_single_quote_subst"`' hardcode_automatic='`$ECHO "X$hardcode_automatic" | $Xsed -e "$delay_single_quote_subst"`' inherit_rpath='`$ECHO "X$inherit_rpath" | $Xsed -e "$delay_single_quote_subst"`' link_all_deplibs='`$ECHO "X$link_all_deplibs" | $Xsed -e "$delay_single_quote_subst"`' fix_srcfile_path='`$ECHO "X$fix_srcfile_path" | $Xsed -e "$delay_single_quote_subst"`' always_export_symbols='`$ECHO "X$always_export_symbols" | $Xsed -e "$delay_single_quote_subst"`' export_symbols_cmds='`$ECHO "X$export_symbols_cmds" | $Xsed -e "$delay_single_quote_subst"`' exclude_expsyms='`$ECHO "X$exclude_expsyms" | $Xsed -e "$delay_single_quote_subst"`' include_expsyms='`$ECHO "X$include_expsyms" | $Xsed -e "$delay_single_quote_subst"`' prelink_cmds='`$ECHO "X$prelink_cmds" | $Xsed -e "$delay_single_quote_subst"`' file_list_spec='`$ECHO "X$file_list_spec" | $Xsed -e "$delay_single_quote_subst"`' variables_saved_for_relink='`$ECHO "X$variables_saved_for_relink" | $Xsed -e "$delay_single_quote_subst"`' need_lib_prefix='`$ECHO "X$need_lib_prefix" | $Xsed -e "$delay_single_quote_subst"`' need_version='`$ECHO "X$need_version" | $Xsed -e "$delay_single_quote_subst"`' version_type='`$ECHO "X$version_type" | $Xsed -e "$delay_single_quote_subst"`' runpath_var='`$ECHO "X$runpath_var" | $Xsed -e "$delay_single_quote_subst"`' shlibpath_var='`$ECHO "X$shlibpath_var" | $Xsed -e "$delay_single_quote_subst"`' shlibpath_overrides_runpath='`$ECHO "X$shlibpath_overrides_runpath" | $Xsed -e "$delay_single_quote_subst"`' libname_spec='`$ECHO "X$libname_spec" | $Xsed -e "$delay_single_quote_subst"`' library_names_spec='`$ECHO "X$library_names_spec" | $Xsed -e "$delay_single_quote_subst"`' soname_spec='`$ECHO "X$soname_spec" | $Xsed -e "$delay_single_quote_subst"`' postinstall_cmds='`$ECHO "X$postinstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' postuninstall_cmds='`$ECHO "X$postuninstall_cmds" | $Xsed -e "$delay_single_quote_subst"`' finish_cmds='`$ECHO "X$finish_cmds" | $Xsed -e "$delay_single_quote_subst"`' finish_eval='`$ECHO "X$finish_eval" | $Xsed -e "$delay_single_quote_subst"`' hardcode_into_libs='`$ECHO "X$hardcode_into_libs" | $Xsed -e "$delay_single_quote_subst"`' sys_lib_search_path_spec='`$ECHO "X$sys_lib_search_path_spec" | $Xsed -e "$delay_single_quote_subst"`' sys_lib_dlsearch_path_spec='`$ECHO "X$sys_lib_dlsearch_path_spec" | $Xsed -e "$delay_single_quote_subst"`' hardcode_action='`$ECHO "X$hardcode_action" | $Xsed -e "$delay_single_quote_subst"`' enable_dlopen='`$ECHO "X$enable_dlopen" | $Xsed -e "$delay_single_quote_subst"`' enable_dlopen_self='`$ECHO "X$enable_dlopen_self" | $Xsed -e "$delay_single_quote_subst"`' enable_dlopen_self_static='`$ECHO "X$enable_dlopen_self_static" | $Xsed -e "$delay_single_quote_subst"`' old_striplib='`$ECHO "X$old_striplib" | $Xsed -e "$delay_single_quote_subst"`' striplib='`$ECHO "X$striplib" | $Xsed -e "$delay_single_quote_subst"`' compiler_lib_search_dirs='`$ECHO "X$compiler_lib_search_dirs" | $Xsed -e "$delay_single_quote_subst"`' predep_objects='`$ECHO "X$predep_objects" | $Xsed -e "$delay_single_quote_subst"`' postdep_objects='`$ECHO "X$postdep_objects" | $Xsed -e "$delay_single_quote_subst"`' predeps='`$ECHO "X$predeps" | $Xsed -e "$delay_single_quote_subst"`' postdeps='`$ECHO "X$postdeps" | $Xsed -e "$delay_single_quote_subst"`' compiler_lib_search_path='`$ECHO "X$compiler_lib_search_path" | $Xsed -e "$delay_single_quote_subst"`' LD_CXX='`$ECHO "X$LD_CXX" | $Xsed -e "$delay_single_quote_subst"`' old_archive_cmds_CXX='`$ECHO "X$old_archive_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' compiler_CXX='`$ECHO "X$compiler_CXX" | $Xsed -e "$delay_single_quote_subst"`' GCC_CXX='`$ECHO "X$GCC_CXX" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "X$lt_prog_compiler_no_builtin_flag_CXX" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_wl_CXX='`$ECHO "X$lt_prog_compiler_wl_CXX" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_pic_CXX='`$ECHO "X$lt_prog_compiler_pic_CXX" | $Xsed -e "$delay_single_quote_subst"`' lt_prog_compiler_static_CXX='`$ECHO "X$lt_prog_compiler_static_CXX" | $Xsed -e "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o_CXX='`$ECHO "X$lt_cv_prog_compiler_c_o_CXX" | $Xsed -e "$delay_single_quote_subst"`' archive_cmds_need_lc_CXX='`$ECHO "X$archive_cmds_need_lc_CXX" | $Xsed -e "$delay_single_quote_subst"`' enable_shared_with_static_runtimes_CXX='`$ECHO "X$enable_shared_with_static_runtimes_CXX" | $Xsed -e "$delay_single_quote_subst"`' export_dynamic_flag_spec_CXX='`$ECHO "X$export_dynamic_flag_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`' whole_archive_flag_spec_CXX='`$ECHO "X$whole_archive_flag_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`' compiler_needs_object_CXX='`$ECHO "X$compiler_needs_object_CXX" | $Xsed -e "$delay_single_quote_subst"`' old_archive_from_new_cmds_CXX='`$ECHO "X$old_archive_from_new_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds_CXX='`$ECHO "X$old_archive_from_expsyms_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' archive_cmds_CXX='`$ECHO "X$archive_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' archive_expsym_cmds_CXX='`$ECHO "X$archive_expsym_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' module_cmds_CXX='`$ECHO "X$module_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' module_expsym_cmds_CXX='`$ECHO "X$module_expsym_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' with_gnu_ld_CXX='`$ECHO "X$with_gnu_ld_CXX" | $Xsed -e "$delay_single_quote_subst"`' allow_undefined_flag_CXX='`$ECHO "X$allow_undefined_flag_CXX" | $Xsed -e "$delay_single_quote_subst"`' no_undefined_flag_CXX='`$ECHO "X$no_undefined_flag_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_libdir_flag_spec_CXX='`$ECHO "X$hardcode_libdir_flag_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_libdir_flag_spec_ld_CXX='`$ECHO "X$hardcode_libdir_flag_spec_ld_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_libdir_separator_CXX='`$ECHO "X$hardcode_libdir_separator_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_direct_CXX='`$ECHO "X$hardcode_direct_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_direct_absolute_CXX='`$ECHO "X$hardcode_direct_absolute_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_minus_L_CXX='`$ECHO "X$hardcode_minus_L_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_shlibpath_var_CXX='`$ECHO "X$hardcode_shlibpath_var_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_automatic_CXX='`$ECHO "X$hardcode_automatic_CXX" | $Xsed -e "$delay_single_quote_subst"`' inherit_rpath_CXX='`$ECHO "X$inherit_rpath_CXX" | $Xsed -e "$delay_single_quote_subst"`' link_all_deplibs_CXX='`$ECHO "X$link_all_deplibs_CXX" | $Xsed -e "$delay_single_quote_subst"`' fix_srcfile_path_CXX='`$ECHO "X$fix_srcfile_path_CXX" | $Xsed -e "$delay_single_quote_subst"`' always_export_symbols_CXX='`$ECHO "X$always_export_symbols_CXX" | $Xsed -e "$delay_single_quote_subst"`' export_symbols_cmds_CXX='`$ECHO "X$export_symbols_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' exclude_expsyms_CXX='`$ECHO "X$exclude_expsyms_CXX" | $Xsed -e "$delay_single_quote_subst"`' include_expsyms_CXX='`$ECHO "X$include_expsyms_CXX" | $Xsed -e "$delay_single_quote_subst"`' prelink_cmds_CXX='`$ECHO "X$prelink_cmds_CXX" | $Xsed -e "$delay_single_quote_subst"`' file_list_spec_CXX='`$ECHO "X$file_list_spec_CXX" | $Xsed -e "$delay_single_quote_subst"`' hardcode_action_CXX='`$ECHO "X$hardcode_action_CXX" | $Xsed -e "$delay_single_quote_subst"`' compiler_lib_search_dirs_CXX='`$ECHO "X$compiler_lib_search_dirs_CXX" | $Xsed -e "$delay_single_quote_subst"`' predep_objects_CXX='`$ECHO "X$predep_objects_CXX" | $Xsed -e "$delay_single_quote_subst"`' postdep_objects_CXX='`$ECHO "X$postdep_objects_CXX" | $Xsed -e "$delay_single_quote_subst"`' predeps_CXX='`$ECHO "X$predeps_CXX" | $Xsed -e "$delay_single_quote_subst"`' postdeps_CXX='`$ECHO "X$postdeps_CXX" | $Xsed -e "$delay_single_quote_subst"`' compiler_lib_search_path_CXX='`$ECHO "X$compiler_lib_search_path_CXX" | $Xsed -e "$delay_single_quote_subst"`' LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # Quote evaled strings. for var in SED \ GREP \ EGREP \ FGREP \ LD \ NM \ LN_S \ lt_SP2NL \ lt_NL2SP \ reload_flag \ OBJDUMP \ deplibs_check_method \ file_magic_cmd \ AR \ AR_FLAGS \ STRIP \ RANLIB \ CC \ CFLAGS \ compiler \ lt_cv_sys_global_symbol_pipe \ lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_c_name_address \ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ SHELL \ ECHO \ lt_prog_compiler_no_builtin_flag \ lt_prog_compiler_wl \ lt_prog_compiler_pic \ lt_prog_compiler_static \ lt_cv_prog_compiler_c_o \ need_locks \ DSYMUTIL \ NMEDIT \ LIPO \ OTOOL \ OTOOL64 \ shrext_cmds \ export_dynamic_flag_spec \ whole_archive_flag_spec \ compiler_needs_object \ with_gnu_ld \ allow_undefined_flag \ no_undefined_flag \ hardcode_libdir_flag_spec \ hardcode_libdir_flag_spec_ld \ hardcode_libdir_separator \ fix_srcfile_path \ exclude_expsyms \ include_expsyms \ file_list_spec \ variables_saved_for_relink \ libname_spec \ library_names_spec \ soname_spec \ finish_eval \ old_striplib \ striplib \ compiler_lib_search_dirs \ predep_objects \ postdep_objects \ predeps \ postdeps \ compiler_lib_search_path \ LD_CXX \ compiler_CXX \ lt_prog_compiler_no_builtin_flag_CXX \ lt_prog_compiler_wl_CXX \ lt_prog_compiler_pic_CXX \ lt_prog_compiler_static_CXX \ lt_cv_prog_compiler_c_o_CXX \ export_dynamic_flag_spec_CXX \ whole_archive_flag_spec_CXX \ compiler_needs_object_CXX \ with_gnu_ld_CXX \ allow_undefined_flag_CXX \ no_undefined_flag_CXX \ hardcode_libdir_flag_spec_CXX \ hardcode_libdir_flag_spec_ld_CXX \ hardcode_libdir_separator_CXX \ fix_srcfile_path_CXX \ exclude_expsyms_CXX \ include_expsyms_CXX \ file_list_spec_CXX \ compiler_lib_search_dirs_CXX \ predep_objects_CXX \ postdep_objects_CXX \ predeps_CXX \ postdeps_CXX \ compiler_lib_search_path_CXX; do case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in reload_cmds \ old_postinstall_cmds \ old_postuninstall_cmds \ old_archive_cmds \ extract_expsyms_cmds \ old_archive_from_new_cmds \ old_archive_from_expsyms_cmds \ archive_cmds \ archive_expsym_cmds \ module_cmds \ module_expsym_cmds \ export_symbols_cmds \ prelink_cmds \ postinstall_cmds \ postuninstall_cmds \ finish_cmds \ sys_lib_search_path_spec \ sys_lib_dlsearch_path_spec \ old_archive_cmds_CXX \ old_archive_from_new_cmds_CXX \ old_archive_from_expsyms_cmds_CXX \ archive_cmds_CXX \ archive_expsym_cmds_CXX \ module_cmds_CXX \ module_expsym_cmds_CXX \ export_symbols_cmds_CXX \ prelink_cmds_CXX; do case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Fix-up fallback echo if it was mangled by the above quoting rules. case \$lt_ECHO in *'\\\$0 --fallback-echo"') lt_ECHO=\`\$ECHO "X\$lt_ECHO" | \$Xsed -e 's/\\\\\\\\\\\\\\\$0 --fallback-echo"\$/\$0 --fallback-echo"/'\` ;; esac ac_aux_dir='$ac_aux_dir' xsi_shell='$xsi_shell' lt_shell_append='$lt_shell_append' # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi PACKAGE='$PACKAGE' VERSION='$VERSION' TIMESTAMP='$TIMESTAMP' RM='$RM' ofile='$ofile' _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; "po-directories") CONFIG_COMMANDS="$CONFIG_COMMANDS po-directories" ;; "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; "hack-libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS hack-libtool" ;; "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "man/Makefile") CONFIG_FILES="$CONFIG_FILES man/Makefile" ;; "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; "src/libdar/Makefile") CONFIG_FILES="$CONFIG_FILES src/libdar/Makefile" ;; "src/dar_suite/Makefile") CONFIG_FILES="$CONFIG_FILES src/dar_suite/Makefile" ;; "src/testing/Makefile") CONFIG_FILES="$CONFIG_FILES src/testing/Makefile" ;; "src/examples/Makefile") CONFIG_FILES="$CONFIG_FILES src/examples/Makefile" ;; "doc/Makefile") CONFIG_FILES="$CONFIG_FILES doc/Makefile" ;; "doc/samples/Makefile") CONFIG_FILES="$CONFIG_FILES doc/samples/Makefile" ;; "misc/Makefile") CONFIG_FILES="$CONFIG_FILES misc/Makefile" ;; "doc/mini-howto/Makefile") CONFIG_FILES="$CONFIG_FILES doc/mini-howto/Makefile" ;; "src/libdar/libdar.pc.tmpl") CONFIG_FILES="$CONFIG_FILES src/libdar/libdar.pc.tmpl" ;; "doc/man/Makefile") CONFIG_FILES="$CONFIG_FILES doc/man/Makefile" ;; "src/check/Makefile") CONFIG_FILES="$CONFIG_FILES src/check/Makefile" ;; "po/Makefile.in") CONFIG_FILES="$CONFIG_FILES po/Makefile.in" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5 ;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= trap 'exit_status=$? { test -z "$tmp" || test ! -d "$tmp" || rm -fr "$tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$tmp/subs1.awk" > "$tmp/subs.awk" \ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove sole $(srcdir), # ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ h s/// s/^/:/ s/[ ]*$/:/ s/:\$(srcdir):/:/g s/:\${srcdir}:/:/g s/:@srcdir@:/:/g s/^:*// s/:*$// x s/\(=[ ]*\).*/\1/ G s/\n// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" # Set up the scripts for CONFIG_HEADERS section. # No need to generate them if there are no CONFIG_HEADERS. # This happens for instance with `./config.status Makefile'. if test -n "$CONFIG_HEADERS"; then cat >"$tmp/defines.awk" <<\_ACAWK || BEGIN { _ACEOF # Transform confdefs.h into an awk script `defines.awk', embedded as # here-document in config.status, that substitutes the proper values into # config.h.in to produce config.h. # Create a delimiter string that does not exist in confdefs.h, to ease # handling of long lines. ac_delim='%!_!# ' for ac_last_try in false false :; do ac_t=`sed -n "/$ac_delim/p" confdefs.h` if test -z "$ac_t"; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done # For the awk script, D is an array of macro values keyed by name, # likewise P contains macro parameters if any. Preserve backslash # newline sequences. ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* sed -n ' s/.\{148\}/&'"$ac_delim"'/g t rset :rset s/^[ ]*#[ ]*define[ ][ ]*/ / t def d :def s/\\$// t bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3"/p s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p d :bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3\\\\\\n"\\/p t cont s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p t cont d :cont n s/.\{148\}/&'"$ac_delim"'/g t clear :clear s/\\$// t bsnlc s/["\\]/\\&/g; s/^/"/; s/$/"/p d :bsnlc s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p b cont ' >$CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 for (key in D) D_is_set[key] = 1 FS = "" } /^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { line = \$ 0 split(line, arg, " ") if (arg[1] == "#") { defundef = arg[2] mac1 = arg[3] } else { defundef = substr(arg[1], 2) mac1 = arg[2] } split(mac1, mac2, "(") #) macro = mac2[1] prefix = substr(line, 1, index(line, defundef) - 1) if (D_is_set[macro]) { # Preserve the white space surrounding the "#". print prefix "define", macro P[macro] D[macro] next } else { # Replace #undef with comments. This is necessary, for example, # in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. if (defundef == "undef") { print "/*", prefix defundef, macro, "*/" next } } } { print } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 fi # test -n "$CONFIG_HEADERS" eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5 ;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5 ;; esac case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 $as_echo "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`$as_echo "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$tmp/stdin" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; esac ac_MKDIR_P=$MKDIR_P case $MKDIR_P in [\\/$]* | ?:[\\/]* ) ;; */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; esac _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 $as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t s&@INSTALL@&$ac_INSTALL&;t t s&@MKDIR_P@&$ac_MKDIR_P&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$tmp/subs.awk" >$tmp/out \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' "$tmp/out"`; test -z "$ac_out"; } && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&5 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&2;} rm -f "$tmp/stdin" case $ac_file in -) cat "$tmp/out" && rm -f "$tmp/out";; *) rm -f "$ac_file" && mv "$tmp/out" "$ac_file";; esac \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; :H) # # CONFIG_HEADER # if test x"$ac_file" != x-; then { $as_echo "/* $configure_input */" \ && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" } >"$tmp/config.h" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 if diff "$ac_file" "$tmp/config.h" >/dev/null 2>&1; then { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 $as_echo "$as_me: $ac_file is unchanged" >&6;} else rm -f "$ac_file" mv "$tmp/config.h" "$ac_file" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 fi else $as_echo "/* $configure_input */" \ && eval '$AWK -f "$tmp/defines.awk"' "$ac_file_inputs" \ || as_fn_error $? "could not create -" "$LINENO" 5 fi # Compute "$ac_file"'s index in $config_headers. _am_arg="$ac_file" _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || $as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$_am_arg" : 'X\(//\)[^/]' \| \ X"$_am_arg" : 'X\(//\)$' \| \ X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$_am_arg" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'`/stamp-h$_am_stamp_count ;; :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 $as_echo "$as_me: executing $ac_file commands" >&6;} ;; esac case $ac_file$ac_mode in "po-directories":C) for ac_file in $CONFIG_FILES; do # Support "outfile[:infile[:infile...]]" case "$ac_file" in *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; esac # PO directories have a Makefile.in generated from Makefile.in.in. case "$ac_file" in */Makefile.in) # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix="/`echo "$ac_dir"|sed 's%^\./%%'`" ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Treat a directory as a PO directory if and only if it has a # POTFILES.in file. This allows packages to have multiple PO # directories under different names or in different locations. if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then rm -f "$ac_dir/POTFILES" test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES" cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES" POMAKEFILEDEPS="POTFILES.in" # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend # on $ac_dir but don't depend on user-specified configuration # parameters. if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then # The LINGUAS file contains the set of available languages. if test -n "$OBSOLETE_ALL_LINGUAS"; then test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete" fi ALL_LINGUAS_=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"` # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$ALL_LINGUAS_' POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS" else # The set of available languages was given in configure.in. # Hide the ALL_LINGUAS assigment from automake < 1.5. eval 'ALL_LINGUAS''=$OBSOLETE_ALL_LINGUAS' fi # Compute POFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po) # Compute UPDATEPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update) # Compute DUMMYPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop) # Compute GMOFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo) case "$ac_given_srcdir" in .) srcdirpre= ;; *) srcdirpre='$(srcdir)/' ;; esac POFILES= UPDATEPOFILES= DUMMYPOFILES= GMOFILES= for lang in $ALL_LINGUAS; do POFILES="$POFILES $srcdirpre$lang.po" UPDATEPOFILES="$UPDATEPOFILES $lang.po-update" DUMMYPOFILES="$DUMMYPOFILES $lang.nop" GMOFILES="$GMOFILES $srcdirpre$lang.gmo" done # CATALOGS depends on both $ac_dir and the user's LINGUAS # environment variable. INST_LINGUAS= if test -n "$ALL_LINGUAS"; then for presentlang in $ALL_LINGUAS; do useit=no if test "%UNSET%" != "$LINGUAS"; then desiredlanguages="$LINGUAS" else desiredlanguages="$ALL_LINGUAS" fi for desiredlang in $desiredlanguages; do # Use the presentlang catalog if desiredlang is # a. equal to presentlang, or # b. a variant of presentlang (because in this case, # presentlang can be used as a fallback for messages # which are not translated in the desiredlang catalog). case "$desiredlang" in "$presentlang"*) useit=yes;; esac done if test $useit = yes; then INST_LINGUAS="$INST_LINGUAS $presentlang" fi done fi CATALOGS= if test -n "$INST_LINGUAS"; then for lang in $INST_LINGUAS; do CATALOGS="$CATALOGS $lang.gmo" done fi test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile" sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile" for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do if test -f "$f"; then case "$f" in *.orig | *.bak | *~) ;; *) cat "$f" >> "$ac_dir/Makefile" ;; esac fi done fi ;; esac done ;; "depfiles":C) test x"$AMDEP_TRUE" != x"" || { # Autoconf 2.62 quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named `Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`$as_dirname -- "$mf" || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running `make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # When using ansi2knr, U may be empty or an underscore; expand it U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`$as_dirname -- "$file" || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir=$dirpart/$fdir; as_fn_mkdir_p # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ;; "libtool":C) # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi cfgfile="${ofile}T" trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. # Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # NOTE: Changes made to this file will be lost: look at ltmain.sh. # # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, # 2006, 2007, 2008 Free Software Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is part of GNU Libtool. # # GNU Libtool is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of # the License, or (at your option) any later version. # # As a special exception to the GNU General Public License, # if you distribute this file as part of a program or library that # is built using GNU Libtool, you may include this file under the # same distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Libtool; see the file COPYING. If not, a copy # can be downloaded from http://www.gnu.org/licenses/gpl.html, or # obtained by writing to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. # The names of the tagged configurations supported by this script. available_tags="CXX " # ### BEGIN LIBTOOL CONFIG # Which release of libtool.m4 was used? macro_version=$macro_version macro_revision=$macro_revision # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # What type of objects to build. pic_mode=$pic_mode # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # The host system. host_alias=$host_alias host=$host host_os=$host_os # The build system. build_alias=$build_alias build=$build build_os=$build_os # A sed program that does not truncate output. SED=$lt_SED # Sed that helps us avoid accidentally triggering echo(1) options like -n. Xsed="\$SED -e 1s/^X//" # A grep program that handles long lines. GREP=$lt_GREP # An ERE matcher. EGREP=$lt_EGREP # A literal string matcher. FGREP=$lt_FGREP # A BSD- or MS-compatible name lister. NM=$lt_NM # Whether we need soft or hard links. LN_S=$lt_LN_S # What is the maximum length of a command? max_cmd_len=$max_cmd_len # Object file suffix (normally "o"). objext=$ac_objext # Executable file suffix (normally ""). exeext=$exeext # whether the shell understands "unset". lt_unset=$lt_unset # turn spaces into newlines. SP2NL=$lt_lt_SP2NL # turn newlines into spaces. NL2SP=$lt_lt_NL2SP # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # An object symbol dumper. OBJDUMP=$lt_OBJDUMP # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method == "file_magic". file_magic_cmd=$lt_file_magic_cmd # The archiver. AR=$lt_AR AR_FLAGS=$lt_AR_FLAGS # A symbol stripping program. STRIP=$lt_STRIP # Commands used to install an old-style archive. RANLIB=$lt_RANLIB old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # A C compiler. LTCC=$lt_CC # LTCC compiler flags. LTCFLAGS=$lt_CFLAGS # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration. global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm in a C name address pair. global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # Transform the output of nm in a C name address pair when lib prefix is needed. global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix # The name of the directory that contains temporary libtool files. objdir=$objdir # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # An echo program that does not interpret backslashes. ECHO=$lt_ECHO # Used to examine libraries when file_magic_cmd begins with "file". MAGIC_CMD=$MAGIC_CMD # Must we lock files when doing compilation? need_locks=$lt_need_locks # Tool to manipulate archived DWARF debug symbol files on Mac OS X. DSYMUTIL=$lt_DSYMUTIL # Tool to change global to local symbols on Mac OS X. NMEDIT=$lt_NMEDIT # Tool to manipulate fat objects and archives on Mac OS X. LIPO=$lt_LIPO # ldd/readelf like tool for Mach-O binaries on Mac OS X. OTOOL=$lt_OTOOL # ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. OTOOL64=$lt_OTOOL64 # Old archive suffix (normally "a"). libext=$libext # Shared library suffix (normally ".so"). shrext_cmds=$lt_shrext_cmds # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Variables whose values should be saved in libtool wrapper scripts and # restored at link time. variables_saved_for_relink=$lt_variables_saved_for_relink # Do we need the "lib" prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Library versioning type. version_type=$version_type # Shared library runtime path variable. runpath_var=$runpath_var # Shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Command to use after installation of a shared archive. postinstall_cmds=$lt_postinstall_cmds # Command to use after uninstallation of a shared archive. postuninstall_cmds=$lt_postuninstall_cmds # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # As "finish_cmds", except a single script fragment to be evaled but # not shown. finish_eval=$lt_finish_eval # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Compile-time system search path for libraries. sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Run-time system search path for libraries. sys_lib_dlsearch_path_spec=$lt_sys_lib_dlsearch_path_spec # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # The linker used to build libraries. LD=$lt_LD # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds # A language specific compiler. CC=$lt_compiler # Is the compiler the GNU compiler? with_gcc=$GCC # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds archive_expsym_cmds=$lt_archive_expsym_cmds # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds module_expsym_cmds=$lt_module_expsym_cmds # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec # If ld is used when linking, flag to hardcode \$libdir into a binary # during linking. This must work even if \$libdir does not exist. hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \${shlibpath_var} if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs # Fix the shell variable \$srcfile for the compiler. fix_srcfile_path=$lt_fix_srcfile_path # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms # Symbols that must always be exported. include_expsyms=$lt_include_expsyms # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds # Specify filename containing input files. file_list_spec=$lt_file_list_spec # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects postdep_objects=$lt_postdep_objects predeps=$lt_predeps postdeps=$lt_postdeps # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path # ### END LIBTOOL CONFIG _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac ltmain="$ac_aux_dir/ltmain.sh" # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) case $xsi_shell in yes) cat << \_LT_EOF >> "$cfgfile" # func_dirname file append nondir_replacement # Compute the dirname of FILE. If nonempty, add APPEND to the result, # otherwise set result to NONDIR_REPLACEMENT. func_dirname () { case ${1} in */*) func_dirname_result="${1%/*}${2}" ;; * ) func_dirname_result="${3}" ;; esac } # func_basename file func_basename () { func_basename_result="${1##*/}" } # func_dirname_and_basename file append nondir_replacement # perform func_basename and func_dirname in a single function # call: # dirname: Compute the dirname of FILE. If nonempty, # add APPEND to the result, otherwise set result # to NONDIR_REPLACEMENT. # value returned in "$func_dirname_result" # basename: Compute filename of FILE. # value retuned in "$func_basename_result" # Implementation must be kept synchronized with func_dirname # and func_basename. For efficiency, we do not delegate to # those functions but instead duplicate the functionality here. func_dirname_and_basename () { case ${1} in */*) func_dirname_result="${1%/*}${2}" ;; * ) func_dirname_result="${3}" ;; esac func_basename_result="${1##*/}" } # func_stripname prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). func_stripname () { # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are # positional parameters, so assign one to ordinary parameter first. func_stripname_result=${3} func_stripname_result=${func_stripname_result#"${1}"} func_stripname_result=${func_stripname_result%"${2}"} } # func_opt_split func_opt_split () { func_opt_split_opt=${1%%=*} func_opt_split_arg=${1#*=} } # func_lo2o object func_lo2o () { case ${1} in *.lo) func_lo2o_result=${1%.lo}.${objext} ;; *) func_lo2o_result=${1} ;; esac } # func_xform libobj-or-source func_xform () { func_xform_result=${1%.*}.lo } # func_arith arithmetic-term... func_arith () { func_arith_result=$(( $* )) } # func_len string # STRING may not start with a hyphen. func_len () { func_len_result=${#1} } _LT_EOF ;; *) # Bourne compatible functions. cat << \_LT_EOF >> "$cfgfile" # func_dirname file append nondir_replacement # Compute the dirname of FILE. If nonempty, add APPEND to the result, # otherwise set result to NONDIR_REPLACEMENT. func_dirname () { # Extract subdirectory from the argument. func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"` if test "X$func_dirname_result" = "X${1}"; then func_dirname_result="${3}" else func_dirname_result="$func_dirname_result${2}" fi } # func_basename file func_basename () { func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"` } # func_stripname prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). # func_strip_suffix prefix name func_stripname () { case ${2} in .*) func_stripname_result=`$ECHO "X${3}" \ | $Xsed -e "s%^${1}%%" -e "s%\\\\${2}\$%%"`;; *) func_stripname_result=`$ECHO "X${3}" \ | $Xsed -e "s%^${1}%%" -e "s%${2}\$%%"`;; esac } # sed scripts: my_sed_long_opt='1s/^\(-[^=]*\)=.*/\1/;q' my_sed_long_arg='1s/^-[^=]*=//' # func_opt_split func_opt_split () { func_opt_split_opt=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_opt"` func_opt_split_arg=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_arg"` } # func_lo2o object func_lo2o () { func_lo2o_result=`$ECHO "X${1}" | $Xsed -e "$lo2o"` } # func_xform libobj-or-source func_xform () { func_xform_result=`$ECHO "X${1}" | $Xsed -e 's/\.[^.]*$/.lo/'` } # func_arith arithmetic-term... func_arith () { func_arith_result=`expr "$@"` } # func_len string # STRING may not start with a hyphen. func_len () { func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` } _LT_EOF esac case $lt_shell_append in yes) cat << \_LT_EOF >> "$cfgfile" # func_append var value # Append VALUE to the end of shell variable VAR. func_append () { eval "$1+=\$2" } _LT_EOF ;; *) cat << \_LT_EOF >> "$cfgfile" # func_append var value # Append VALUE to the end of shell variable VAR. func_append () { eval "$1=\$$1\$2" } _LT_EOF ;; esac sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" cat <<_LT_EOF >> "$ofile" # ### BEGIN LIBTOOL TAG CONFIG: CXX # The linker used to build libraries. LD=$lt_LD_CXX # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds_CXX # A language specific compiler. CC=$lt_compiler_CXX # Is the compiler the GNU compiler? with_gcc=$GCC_CXX # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_CXX # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_CXX # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_CXX # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_CXX # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_CXX # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_CXX # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_CXX # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_CXX # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object_CXX # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_CXX # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_CXX # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds_CXX archive_expsym_cmds=$lt_archive_expsym_cmds_CXX # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds_CXX module_expsym_cmds=$lt_module_expsym_cmds_CXX # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld_CXX # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_CXX # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_CXX # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_CXX # If ld is used when linking, flag to hardcode \$libdir into a binary # during linking. This must work even if \$libdir does not exist. hardcode_libdir_flag_spec_ld=$lt_hardcode_libdir_flag_spec_ld_CXX # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_CXX # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct_CXX # Set to "yes" if using DIR/libNAME\${shared_ext} during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \${shlibpath_var} if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute_CXX # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L_CXX # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_CXX # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic_CXX # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath_CXX # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_CXX # Fix the shell variable \$srcfile for the compiler. fix_srcfile_path=$lt_fix_srcfile_path_CXX # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols_CXX # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_CXX # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_CXX # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_CXX # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds_CXX # Specify filename containing input files. file_list_spec=$lt_file_list_spec_CXX # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_CXX # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_CXX # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects_CXX postdep_objects=$lt_postdep_objects_CXX predeps=$lt_predeps_CXX postdeps=$lt_postdeps_CXX # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_CXX # ### END LIBTOOL TAG CONFIG: CXX _LT_EOF ;; "hack-libtool":C) sed 's,^pic_flag=,pic_flag=" -D__DYNAMIC__ ",' libtool > libtoolT \ && mv -f libtoolT libtool && chmod 755 libtool ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit 1 fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi echo "" echo "--" echo "dar and libdar have been successfully configured with the following parameters:" echo "" echo " LIBDAR parameters:" echo -n " Zlib compression (gzip) : " if [ "$local_libz" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Libbz2 compression (bzip2) : " if [ "$local_libbz2" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Liblzo2 compression (lzo) : " if [ "$local_liblzo2" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Strong encryption support : " if [ "$local_crypto" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Extended Attributes support: " if [ "$local_ea_support" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Large files support (> 2GB): " if [ ! -z "$ac_cv_sys_file_offset_bits" -o ! -z "$ac_cv_sys_large_files" ] ; then echo "YES" else echo "NO" fi echo -n " ext2fs NODUMP flag support : " if [ "$local_nodump_feature" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Special allocation scheme : " if [ "$local_special_alloc" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Integer size used : " if [ -z "$build_mode" -o \( ! "$build_mode" = "32" -a ! "$build_mode" = "64" \) ] ; then echo "infinint" else echo "$build_mode" fi echo -n " Thread safe support : " if [ \( "$local_mutex_works" = "yes" -o ! "$local_special_alloc" = "yes" \) -a -z "$local_test_memory" ] ; then echo "YES" else echo "NO" fi echo -n " Furtive read mode : " if [ "$local_furtive_read_mode" = "yes" ]; then echo "YES" else echo "NO" fi echo -n " Large directory optim. : " if [ "$local_fast_dir" = "yes" ] ; then echo "YES" else echo "NO" fi echo "" echo " DAR SUITE command line programs:" echo -n " Long options available : " if [ "$local_have_getopt_long" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Building examples : " if [ "$examples" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " Building dar_static : " if [ "$build_static" = "yes" ]; then if [ "$static_pb" = "yes" ]; then echo "NO (system does not support static linking, see note below)" else echo "YES" fi else echo "NO" fi echo -n " using upx at install : " if [ "$upx" = "yes" ] ; then echo "YES" else echo "NO" fi echo -n " building documentation : " if [ "$doxygen" = "yes" ] ; then echo "YES" else echo "NO" fi if [ "$static_pb" = "yes" -a "$build_static" = "yes" ]; then echo "" echo " Note:" echo "" echo " If you want to know which libraries are not available as static" echo " libraries check the logs in the config.log generated file. the command" echo "" echo " 'grep -e -static -A 2 config.log'" echo "" echo " should bring you to the essentials." echo "" echo "You also might want to speed up the compilation process running ./configure" echo "with the --disable-static option" echo "" fi echo "" dar-2.4.8/aclocal.m40000644000175000017520000116013712023053147011064 00000000000000# generated automatically by aclocal 1.11.1 -*- Autoconf -*- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, # 2005, 2006, 2007, 2008, 2009 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.67],, [m4_warning([this file was generated for autoconf 2.67. You have another version of autoconf. It may work, but is not guaranteed to. If you have problems, you may need to regenerate the build system entirely. To do so, use the procedure documented by the package, typically `autoreconf'.])]) # intlmacosx.m4 serial 3 (gettext-0.18) dnl Copyright (C) 2004-2010 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Checks for special options needed on MacOS X. dnl Defines INTL_MACOSX_LIBS. AC_DEFUN([gt_INTL_MACOSX], [ dnl Check for API introduced in MacOS X 10.2. AC_CACHE_CHECK([for CFPreferencesCopyAppValue], [gt_cv_func_CFPreferencesCopyAppValue], [gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" AC_TRY_LINK([#include ], [CFPreferencesCopyAppValue(NULL, NULL)], [gt_cv_func_CFPreferencesCopyAppValue=yes], [gt_cv_func_CFPreferencesCopyAppValue=no]) LIBS="$gt_save_LIBS"]) if test $gt_cv_func_CFPreferencesCopyAppValue = yes; then AC_DEFINE([HAVE_CFPREFERENCESCOPYAPPVALUE], [1], [Define to 1 if you have the MacOS X function CFPreferencesCopyAppValue in the CoreFoundation framework.]) fi dnl Check for API introduced in MacOS X 10.3. AC_CACHE_CHECK([for CFLocaleCopyCurrent], [gt_cv_func_CFLocaleCopyCurrent], [gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" AC_TRY_LINK([#include ], [CFLocaleCopyCurrent();], [gt_cv_func_CFLocaleCopyCurrent=yes], [gt_cv_func_CFLocaleCopyCurrent=no]) LIBS="$gt_save_LIBS"]) if test $gt_cv_func_CFLocaleCopyCurrent = yes; then AC_DEFINE([HAVE_CFLOCALECOPYCURRENT], [1], [Define to 1 if you have the MacOS X function CFLocaleCopyCurrent in the CoreFoundation framework.]) fi INTL_MACOSX_LIBS= if test $gt_cv_func_CFPreferencesCopyAppValue = yes || test $gt_cv_func_CFLocaleCopyCurrent = yes; then INTL_MACOSX_LIBS="-Wl,-framework -Wl,CoreFoundation" fi AC_SUBST([INTL_MACOSX_LIBS]) ]) # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- # # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, # 2006, 2007, 2008 Free Software Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. m4_define([_LT_COPYING], [dnl # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2003, 2004, 2005, # 2006, 2007, 2008 Free Software Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is part of GNU Libtool. # # GNU Libtool is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License as # published by the Free Software Foundation; either version 2 of # the License, or (at your option) any later version. # # As a special exception to the GNU General Public License, # if you distribute this file as part of a program or library that # is built using GNU Libtool, you may include this file under the # same distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with GNU Libtool; see the file COPYING. If not, a copy # can be downloaded from http://www.gnu.org/licenses/gpl.html, or # obtained by writing to the Free Software Foundation, Inc., # 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301, USA. ]) # serial 56 LT_INIT # LT_PREREQ(VERSION) # ------------------ # Complain and exit if this libtool version is less that VERSION. m4_defun([LT_PREREQ], [m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1, [m4_default([$3], [m4_fatal([Libtool version $1 or higher is required], 63)])], [$2])]) # _LT_CHECK_BUILDDIR # ------------------ # Complain if the absolute build directory name contains unusual characters m4_defun([_LT_CHECK_BUILDDIR], [case `pwd` in *\ * | *\ *) AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;; esac ]) # LT_INIT([OPTIONS]) # ------------------ AC_DEFUN([LT_INIT], [AC_PREREQ([2.58])dnl We use AC_INCLUDES_DEFAULT AC_BEFORE([$0], [LT_LANG])dnl AC_BEFORE([$0], [LT_OUTPUT])dnl AC_BEFORE([$0], [LTDL_INIT])dnl m4_require([_LT_CHECK_BUILDDIR])dnl dnl Autoconf doesn't catch unexpanded LT_ macros by default: m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4 dnl unless we require an AC_DEFUNed macro: AC_REQUIRE([LTOPTIONS_VERSION])dnl AC_REQUIRE([LTSUGAR_VERSION])dnl AC_REQUIRE([LTVERSION_VERSION])dnl AC_REQUIRE([LTOBSOLETE_VERSION])dnl m4_require([_LT_PROG_LTMAIN])dnl dnl Parse OPTIONS _LT_SET_OPTIONS([$0], [$1]) # This can be used to rebuild libtool when needed LIBTOOL_DEPS="$ltmain" # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' AC_SUBST(LIBTOOL)dnl _LT_SETUP # Only expand once: m4_define([LT_INIT]) ])# LT_INIT # Old names: AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT]) AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PROG_LIBTOOL], []) dnl AC_DEFUN([AM_PROG_LIBTOOL], []) # _LT_CC_BASENAME(CC) # ------------------- # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. m4_defun([_LT_CC_BASENAME], [for cc_temp in $1""; do case $cc_temp in compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`$ECHO "X$cc_temp" | $Xsed -e 's%.*/%%' -e "s%^$host_alias-%%"` ]) # _LT_FILEUTILS_DEFAULTS # ---------------------- # It is okay to use these file commands and assume they have been set # sensibly after `m4_require([_LT_FILEUTILS_DEFAULTS])'. m4_defun([_LT_FILEUTILS_DEFAULTS], [: ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} ])# _LT_FILEUTILS_DEFAULTS # _LT_SETUP # --------- m4_defun([_LT_SETUP], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl _LT_DECL([], [host_alias], [0], [The host system])dnl _LT_DECL([], [host], [0])dnl _LT_DECL([], [host_os], [0])dnl dnl _LT_DECL([], [build_alias], [0], [The build system])dnl _LT_DECL([], [build], [0])dnl _LT_DECL([], [build_os], [0])dnl dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl dnl AC_REQUIRE([AC_PROG_LN_S])dnl test -z "$LN_S" && LN_S="ln -s" _LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl dnl AC_REQUIRE([LT_CMD_MAX_LEN])dnl _LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl m4_require([_LT_CMD_RELOAD])dnl m4_require([_LT_CHECK_MAGIC_METHOD])dnl m4_require([_LT_CMD_OLD_ARCHIVE])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl _LT_CONFIG_LIBTOOL_INIT([ # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi ]) if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi _LT_CHECK_OBJDIR m4_require([_LT_TAG_COMPILER])dnl _LT_PROG_ECHO_BACKSLASH case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. sed_quote_subst='s/\([["`$\\]]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\([["`\\]]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a `.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld="$lt_cv_prog_gnu_ld" old_CC="$CC" old_CFLAGS="$CFLAGS" # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o _LT_CC_BASENAME([$compiler]) # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then _LT_PATH_MAGIC fi ;; esac # Use C for the default configuration in the libtool script LT_SUPPORTED_TAG([CC]) _LT_LANG_C_CONFIG _LT_LANG_DEFAULT_CONFIG _LT_CONFIG_COMMANDS ])# _LT_SETUP # _LT_PROG_LTMAIN # --------------- # Note that this code is called both from `configure', and `config.status' # now that we use AC_CONFIG_COMMANDS to generate libtool. Notably, # `config.status' has no value for ac_aux_dir unless we are using Automake, # so we pass a copy along to make sure it has a sensible value anyway. m4_defun([_LT_PROG_LTMAIN], [m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl _LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir']) ltmain="$ac_aux_dir/ltmain.sh" ])# _LT_PROG_LTMAIN # So that we can recreate a full libtool script including additional # tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS # in macros and then make a single call at the end using the `libtool' # label. # _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS]) # ---------------------------------------- # Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL_INIT], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_INIT], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_INIT]) # _LT_CONFIG_LIBTOOL([COMMANDS]) # ------------------------------ # Register COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS]) # _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS]) # ----------------------------------------------------- m4_defun([_LT_CONFIG_SAVE_COMMANDS], [_LT_CONFIG_LIBTOOL([$1]) _LT_CONFIG_LIBTOOL_INIT([$2]) ]) # _LT_FORMAT_COMMENT([COMMENT]) # ----------------------------- # Add leading comment marks to the start of each line, and a trailing # full-stop to the whole comment if one is not present already. m4_define([_LT_FORMAT_COMMENT], [m4_ifval([$1], [ m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])], [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.]) )]) # _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?]) # ------------------------------------------------------------------- # CONFIGNAME is the name given to the value in the libtool script. # VARNAME is the (base) name used in the configure script. # VALUE may be 0, 1 or 2 for a computed quote escaped value based on # VARNAME. Any other value will be used directly. m4_define([_LT_DECL], [lt_if_append_uniq([lt_decl_varnames], [$2], [, ], [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name], [m4_ifval([$1], [$1], [$2])]) lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3]) m4_ifval([$4], [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])]) lt_dict_add_subkey([lt_decl_dict], [$2], [tagged?], [m4_ifval([$5], [yes], [no])])]) ]) # _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION]) # -------------------------------------------------------- m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])]) # lt_decl_tag_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_tag_varnames], [_lt_decl_filter([tagged?], [yes], $@)]) # _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..]) # --------------------------------------------------------- m4_define([_lt_decl_filter], [m4_case([$#], [0], [m4_fatal([$0: too few arguments: $#])], [1], [m4_fatal([$0: too few arguments: $#: $1])], [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)], [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)], [lt_dict_filter([lt_decl_dict], $@)])[]dnl ]) # lt_decl_quote_varnames([SEPARATOR], [VARNAME1...]) # -------------------------------------------------- m4_define([lt_decl_quote_varnames], [_lt_decl_filter([value], [1], $@)]) # lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_dquote_varnames], [_lt_decl_filter([value], [2], $@)]) # lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_varnames_tagged], [m4_assert([$# <= 2])dnl _$0(m4_quote(m4_default([$1], [[, ]])), m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]), m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))]) m4_define([_lt_decl_varnames_tagged], [m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])]) # lt_decl_all_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_all_varnames], [_$0(m4_quote(m4_default([$1], [[, ]])), m4_if([$2], [], m4_quote(lt_decl_varnames), m4_quote(m4_shift($@))))[]dnl ]) m4_define([_lt_decl_all_varnames], [lt_join($@, lt_decl_varnames_tagged([$1], lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl ]) # _LT_CONFIG_STATUS_DECLARE([VARNAME]) # ------------------------------------ # Quote a variable value, and forward it to `config.status' so that its # declaration there will have the same value as in `configure'. VARNAME # must have a single quote delimited value for this to work. m4_define([_LT_CONFIG_STATUS_DECLARE], [$1='`$ECHO "X$][$1" | $Xsed -e "$delay_single_quote_subst"`']) # _LT_CONFIG_STATUS_DECLARATIONS # ------------------------------ # We delimit libtool config variables with single quotes, so when # we write them to config.status, we have to be sure to quote all # embedded single quotes properly. In configure, this macro expands # each variable declared with _LT_DECL (and _LT_TAGDECL) into: # # ='`$ECHO "X$" | $Xsed -e "$delay_single_quote_subst"`' m4_defun([_LT_CONFIG_STATUS_DECLARATIONS], [m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames), [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAGS # ---------------- # Output comment and list of tags supported by the script m4_defun([_LT_LIBTOOL_TAGS], [_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl available_tags="_LT_TAGS"dnl ]) # _LT_LIBTOOL_DECLARE(VARNAME, [TAG]) # ----------------------------------- # Extract the dictionary values for VARNAME (optionally with TAG) and # expand to a commented shell variable setting: # # # Some comment about what VAR is for. # visible_name=$lt_internal_name m4_define([_LT_LIBTOOL_DECLARE], [_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [description])))[]dnl m4_pushdef([_libtool_name], m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])), [0], [_libtool_name=[$]$1], [1], [_libtool_name=$lt_[]$1], [2], [_libtool_name=$lt_[]$1], [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl ]) # _LT_LIBTOOL_CONFIG_VARS # ----------------------- # Produce commented declarations of non-tagged libtool config variables # suitable for insertion in the LIBTOOL CONFIG section of the `libtool' # script. Tagged libtool config variables (even for the LIBTOOL CONFIG # section) are produced by _LT_LIBTOOL_TAG_VARS. m4_defun([_LT_LIBTOOL_CONFIG_VARS], [m4_foreach([_lt_var], m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAG_VARS(TAG) # ------------------------- m4_define([_LT_LIBTOOL_TAG_VARS], [m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])]) # _LT_TAGVAR(VARNAME, [TAGNAME]) # ------------------------------ m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])]) # _LT_CONFIG_COMMANDS # ------------------- # Send accumulated output to $CONFIG_STATUS. Thanks to the lists of # variables for single and double quote escaping we saved from calls # to _LT_DECL, we can put quote escaped variables declarations # into `config.status', and then the shell code to quote escape them in # for loops in `config.status'. Finally, any additional code accumulated # from calls to _LT_CONFIG_LIBTOOL_INIT is expanded. m4_defun([_LT_CONFIG_COMMANDS], [AC_PROVIDE_IFELSE([LT_OUTPUT], dnl If the libtool generation code has been placed in $CONFIG_LT, dnl instead of duplicating it all over again into config.status, dnl then we will have config.status run $CONFIG_LT later, so it dnl needs to know what name is stored there: [AC_CONFIG_COMMANDS([libtool], [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])], dnl If the libtool generation code is destined for config.status, dnl expand the accumulated commands and init code now: [AC_CONFIG_COMMANDS([libtool], [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])]) ])#_LT_CONFIG_COMMANDS # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT], [ # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' _LT_CONFIG_STATUS_DECLARATIONS LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # Quote evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_quote_varnames); do case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_dquote_varnames); do case \`eval \\\\\$ECHO "X\\\\\$\$var"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"X\\\$\$var\\" | \\\$Xsed -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Fix-up fallback echo if it was mangled by the above quoting rules. case \$lt_ECHO in *'\\\[$]0 --fallback-echo"')dnl " lt_ECHO=\`\$ECHO "X\$lt_ECHO" | \$Xsed -e 's/\\\\\\\\\\\\\\\[$]0 --fallback-echo"\[$]/\[$]0 --fallback-echo"/'\` ;; esac _LT_OUTPUT_LIBTOOL_INIT ]) # LT_OUTPUT # --------- # This macro allows early generation of the libtool script (before # AC_OUTPUT is called), incase it is used in configure for compilation # tests. AC_DEFUN([LT_OUTPUT], [: ${CONFIG_LT=./config.lt} AC_MSG_NOTICE([creating $CONFIG_LT]) cat >"$CONFIG_LT" <<_LTEOF #! $SHELL # Generated by $as_me. # Run this file to recreate a libtool stub with the current configuration. lt_cl_silent=false SHELL=\${CONFIG_SHELL-$SHELL} _LTEOF cat >>"$CONFIG_LT" <<\_LTEOF AS_SHELL_SANITIZE _AS_PREPARE exec AS_MESSAGE_FD>&1 exec AS_MESSAGE_LOG_FD>>config.log { echo AS_BOX([Running $as_me.]) } >&AS_MESSAGE_LOG_FD lt_cl_help="\ \`$as_me' creates a local libtool stub from the current configuration, for use in further configure time tests before the real libtool is generated. Usage: $[0] [[OPTIONS]] -h, --help print this help, then exit -V, --version print version number, then exit -q, --quiet do not print progress messages -d, --debug don't remove temporary files Report bugs to ." lt_cl_version="\ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]) configured by $[0], generated by m4_PACKAGE_STRING. Copyright (C) 2008 Free Software Foundation, Inc. This config.lt script is free software; the Free Software Foundation gives unlimited permision to copy, distribute and modify it." while test $[#] != 0 do case $[1] in --version | --v* | -V ) echo "$lt_cl_version"; exit 0 ;; --help | --h* | -h ) echo "$lt_cl_help"; exit 0 ;; --debug | --d* | -d ) debug=: ;; --quiet | --q* | --silent | --s* | -q ) lt_cl_silent=: ;; -*) AC_MSG_ERROR([unrecognized option: $[1] Try \`$[0] --help' for more information.]) ;; *) AC_MSG_ERROR([unrecognized argument: $[1] Try \`$[0] --help' for more information.]) ;; esac shift done if $lt_cl_silent; then exec AS_MESSAGE_FD>/dev/null fi _LTEOF cat >>"$CONFIG_LT" <<_LTEOF _LT_OUTPUT_LIBTOOL_COMMANDS_INIT _LTEOF cat >>"$CONFIG_LT" <<\_LTEOF AC_MSG_NOTICE([creating $ofile]) _LT_OUTPUT_LIBTOOL_COMMANDS AS_EXIT(0) _LTEOF chmod +x "$CONFIG_LT" # configure is writing to config.log, but config.lt does its own redirection, # appending to config.log, which fails on DOS, as config.log is still kept # open by configure. Here we exec the FD to /dev/null, effectively closing # config.log, so it can be properly (re)opened and appended to by config.lt. if test "$no_create" != yes; then lt_cl_success=: test "$silent" = yes && lt_config_lt_args="$lt_config_lt_args --quiet" exec AS_MESSAGE_LOG_FD>/dev/null $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false exec AS_MESSAGE_LOG_FD>>config.log $lt_cl_success || AS_EXIT(1) fi ])# LT_OUTPUT # _LT_CONFIG(TAG) # --------------- # If TAG is the built-in tag, create an initial libtool script with a # default configuration from the untagged config vars. Otherwise add code # to config.status for appending the configuration named by TAG from the # matching tagged config vars. m4_defun([_LT_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_CONFIG_SAVE_COMMANDS([ m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl m4_if(_LT_TAG, [C], [ # See if we are running on zsh, and set the options which allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}" ; then setopt NO_GLOB_SUBST fi cfgfile="${ofile}T" trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # `$ECHO "$ofile" | sed 's%^.*/%%'` - Provide generalized library-building support services. # Generated automatically by $as_me ($PACKAGE$TIMESTAMP) $VERSION # Libtool was configured on host `(hostname || uname -n) 2>/dev/null | sed 1q`: # NOTE: Changes made to this file will be lost: look at ltmain.sh. # _LT_COPYING _LT_LIBTOOL_TAGS # ### BEGIN LIBTOOL CONFIG _LT_LIBTOOL_CONFIG_VARS _LT_LIBTOOL_TAG_VARS # ### END LIBTOOL CONFIG _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test "X${COLLECT_NAMES+set}" != Xset; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac _LT_PROG_LTMAIN # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '/^# Generated shell functions inserted here/q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) _LT_PROG_XSI_SHELLFNS sed -n '/^# Generated shell functions inserted here/,$p' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" ], [cat <<_LT_EOF >> "$ofile" dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded dnl in a comment (ie after a #). # ### BEGIN LIBTOOL TAG CONFIG: $1 _LT_LIBTOOL_TAG_VARS(_LT_TAG) # ### END LIBTOOL TAG CONFIG: $1 _LT_EOF ])dnl /m4_if ], [m4_if([$1], [], [ PACKAGE='$PACKAGE' VERSION='$VERSION' TIMESTAMP='$TIMESTAMP' RM='$RM' ofile='$ofile'], []) ])dnl /_LT_CONFIG_SAVE_COMMANDS ])# _LT_CONFIG # LT_SUPPORTED_TAG(TAG) # --------------------- # Trace this macro to discover what tags are supported by the libtool # --tag option, using: # autoconf --trace 'LT_SUPPORTED_TAG:$1' AC_DEFUN([LT_SUPPORTED_TAG], []) # C support is built-in for now m4_define([_LT_LANG_C_enabled], []) m4_define([_LT_TAGS], []) # LT_LANG(LANG) # ------------- # Enable libtool support for the given language if not already enabled. AC_DEFUN([LT_LANG], [AC_BEFORE([$0], [LT_OUTPUT])dnl m4_case([$1], [C], [_LT_LANG(C)], [C++], [_LT_LANG(CXX)], [Java], [_LT_LANG(GCJ)], [Fortran 77], [_LT_LANG(F77)], [Fortran], [_LT_LANG(FC)], [Windows Resource], [_LT_LANG(RC)], [m4_ifdef([_LT_LANG_]$1[_CONFIG], [_LT_LANG($1)], [m4_fatal([$0: unsupported language: "$1"])])])dnl ])# LT_LANG # _LT_LANG(LANGNAME) # ------------------ m4_defun([_LT_LANG], [m4_ifdef([_LT_LANG_]$1[_enabled], [], [LT_SUPPORTED_TAG([$1])dnl m4_append([_LT_TAGS], [$1 ])dnl m4_define([_LT_LANG_]$1[_enabled], [])dnl _LT_LANG_$1_CONFIG($1)])dnl ])# _LT_LANG # _LT_LANG_DEFAULT_CONFIG # ----------------------- m4_defun([_LT_LANG_DEFAULT_CONFIG], [AC_PROVIDE_IFELSE([AC_PROG_CXX], [LT_LANG(CXX)], [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])]) AC_PROVIDE_IFELSE([AC_PROG_F77], [LT_LANG(F77)], [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])]) AC_PROVIDE_IFELSE([AC_PROG_FC], [LT_LANG(FC)], [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])]) dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal dnl pulling things in needlessly. AC_PROVIDE_IFELSE([AC_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([A][M_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([LT_PROG_GCJ], [LT_LANG(GCJ)], [m4_ifdef([AC_PROG_GCJ], [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([A][M_PROG_GCJ], [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([LT_PROG_GCJ], [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])]) AC_PROVIDE_IFELSE([LT_PROG_RC], [LT_LANG(RC)], [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])]) ])# _LT_LANG_DEFAULT_CONFIG # Obsolete macros: AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)]) AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)]) AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)]) AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_CXX], []) dnl AC_DEFUN([AC_LIBTOOL_F77], []) dnl AC_DEFUN([AC_LIBTOOL_FC], []) dnl AC_DEFUN([AC_LIBTOOL_GCJ], []) # _LT_TAG_COMPILER # ---------------- m4_defun([_LT_TAG_COMPILER], [AC_REQUIRE([AC_PROG_CC])dnl _LT_DECL([LTCC], [CC], [1], [A C compiler])dnl _LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl _LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl _LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC ])# _LT_TAG_COMPILER # _LT_COMPILER_BOILERPLATE # ------------------------ # Check for compiler boilerplate output or warnings with # the simple compiler test code. m4_defun([_LT_COMPILER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ])# _LT_COMPILER_BOILERPLATE # _LT_LINKER_BOILERPLATE # ---------------------- # Check for linker boilerplate output or warnings with # the simple link test code. m4_defun([_LT_LINKER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* ])# _LT_LINKER_BOILERPLATE # _LT_REQUIRED_DARWIN_CHECKS # ------------------------- m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[ case $host_os in rhapsody* | darwin*) AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:]) AC_CHECK_TOOL([NMEDIT], [nmedit], [:]) AC_CHECK_TOOL([LIPO], [lipo], [:]) AC_CHECK_TOOL([OTOOL], [otool], [:]) AC_CHECK_TOOL([OTOOL64], [otool64], [:]) _LT_DECL([], [DSYMUTIL], [1], [Tool to manipulate archived DWARF debug symbol files on Mac OS X]) _LT_DECL([], [NMEDIT], [1], [Tool to change global to local symbols on Mac OS X]) _LT_DECL([], [LIPO], [1], [Tool to manipulate fat objects and archives on Mac OS X]) _LT_DECL([], [OTOOL], [1], [ldd/readelf like tool for Mach-O binaries on Mac OS X]) _LT_DECL([], [OTOOL64], [1], [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4]) AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], [lt_cv_apple_cc_single_mod=no if test -z "${LT_MULTI_MODULE}"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? if test -f libconftest.dylib && test ! -s conftest.err && test $_lt_result = 0; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&AS_MESSAGE_LOG_FD fi rm -rf libconftest.dylib* rm -f conftest.* fi]) AC_CACHE_CHECK([for -exported_symbols_list linker flag], [lt_cv_ld_exported_symbols_list], [lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [lt_cv_ld_exported_symbols_list=yes], [lt_cv_ld_exported_symbols_list=no]) LDFLAGS="$save_LDFLAGS" ]) case $host_os in rhapsody* | darwin1.[[012]]) _lt_dar_allow_undefined='${wl}-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; 10.[[012]]*) _lt_dar_allow_undefined='${wl}-flat_namespace ${wl}-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='${wl}-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test "$lt_cv_apple_cc_single_mod" = "yes"; then _lt_dar_single_mod='$single_module' fi if test "$lt_cv_ld_exported_symbols_list" = "yes"; then _lt_dar_export_syms=' ${wl}-exported_symbols_list,$output_objdir/${libname}-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/${libname}-symbols.expsym ${lib}' fi if test "$DSYMUTIL" != ":"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac ]) # _LT_DARWIN_LINKER_FEATURES # -------------------------- # Checks for linker and compiler features on darwin m4_defun([_LT_DARWIN_LINKER_FEATURES], [ m4_require([_LT_REQUIRED_DARWIN_CHECKS]) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(whole_archive_flag_spec, $1)='' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)="$_lt_dar_allow_undefined" case $cc_basename in ifort*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test "$_lt_dar_can_shared" = "yes"; then output_verbose_link_cmd=echo _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod${_lt_dsymutil}" _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dsymutil}" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring ${_lt_dar_single_mod}${_lt_dar_export_syms}${_lt_dsymutil}" _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags${_lt_dar_export_syms}${_lt_dsymutil}" m4_if([$1], [CXX], [ if test "$lt_cv_apple_cc_single_mod" != "yes"; then _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dsymutil}" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's,^,_,' < \$export_symbols > \$output_objdir/\${libname}-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \${lib}-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \${lib}-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring${_lt_dar_export_syms}${_lt_dsymutil}" fi ],[]) else _LT_TAGVAR(ld_shlibs, $1)=no fi ]) # _LT_SYS_MODULE_PATH_AIX # ----------------------- # Links a minimal program and checks the executable # for the system default hardcoded library path. In most cases, # this is /usr/lib:/lib, but when the MPI compilers are used # the location of the communication and MPI libs are included too. # If we don't find anything, use the default library path according # to the aix ld manual. m4_defun([_LT_SYS_MODULE_PATH_AIX], [m4_require([_LT_DECL_SED])dnl AC_LINK_IFELSE(AC_LANG_PROGRAM,[ lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/ p } }' aix_libpath=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi],[]) if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib"; fi ])# _LT_SYS_MODULE_PATH_AIX # _LT_SHELL_INIT(ARG) # ------------------- m4_define([_LT_SHELL_INIT], [ifdef([AC_DIVERSION_NOTICE], [AC_DIVERT_PUSH(AC_DIVERSION_NOTICE)], [AC_DIVERT_PUSH(NOTICE)]) $1 AC_DIVERT_POP ])# _LT_SHELL_INIT # _LT_PROG_ECHO_BACKSLASH # ----------------------- # Add some code to the start of the generated configure script which # will find an echo command which doesn't interpret backslashes. m4_defun([_LT_PROG_ECHO_BACKSLASH], [_LT_SHELL_INIT([ # Check that we are running under the correct shell. SHELL=${CONFIG_SHELL-/bin/sh} case X$lt_ECHO in X*--fallback-echo) # Remove one level of quotation (which was required for Make). ECHO=`echo "$lt_ECHO" | sed 's,\\\\\[$]\\[$]0,'[$]0','` ;; esac ECHO=${lt_ECHO-echo} if test "X[$]1" = X--no-reexec; then # Discard the --no-reexec flag, and continue. shift elif test "X[$]1" = X--fallback-echo; then # Avoid inline document here, it may be left over : elif test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' ; then # Yippee, $ECHO works! : else # Restart under the correct shell. exec $SHELL "[$]0" --no-reexec ${1+"[$]@"} fi if test "X[$]1" = X--fallback-echo; then # used as fallback echo shift cat <<_LT_EOF [$]* _LT_EOF exit 0 fi # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH if test -z "$lt_ECHO"; then if test "X${echo_test_string+set}" != Xset; then # find a string as large as possible, as long as the shell can cope with it for cmd in 'sed 50q "[$]0"' 'sed 20q "[$]0"' 'sed 10q "[$]0"' 'sed 2q "[$]0"' 'echo test'; do # expected sizes: less than 2Kb, 1Kb, 512 bytes, 16 bytes, ... if { echo_test_string=`eval $cmd`; } 2>/dev/null && { test "X$echo_test_string" = "X$echo_test_string"; } 2>/dev/null then break fi done fi if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then : else # The Solaris, AIX, and Digital Unix default echo programs unquote # backslashes. This makes it impossible to quote backslashes using # echo "$something" | sed 's/\\/\\\\/g' # # So, first we look for a working echo in the user's PATH. lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for dir in $PATH /usr/ucb; do IFS="$lt_save_ifs" if (test -f $dir/echo || test -f $dir/echo$ac_exeext) && test "X`($dir/echo '\t') 2>/dev/null`" = 'X\t' && echo_testing_string=`($dir/echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then ECHO="$dir/echo" break fi done IFS="$lt_save_ifs" if test "X$ECHO" = Xecho; then # We didn't find a better echo, so look for alternatives. if test "X`{ print -r '\t'; } 2>/dev/null`" = 'X\t' && echo_testing_string=`{ print -r "$echo_test_string"; } 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then # This shell has a builtin print -r that does the trick. ECHO='print -r' elif { test -f /bin/ksh || test -f /bin/ksh$ac_exeext; } && test "X$CONFIG_SHELL" != X/bin/ksh; then # If we have ksh, try running configure again with it. ORIGINAL_CONFIG_SHELL=${CONFIG_SHELL-/bin/sh} export ORIGINAL_CONFIG_SHELL CONFIG_SHELL=/bin/ksh export CONFIG_SHELL exec $CONFIG_SHELL "[$]0" --no-reexec ${1+"[$]@"} else # Try using printf. ECHO='printf %s\n' if test "X`{ $ECHO '\t'; } 2>/dev/null`" = 'X\t' && echo_testing_string=`{ $ECHO "$echo_test_string"; } 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then # Cool, printf works : elif echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` && test "X$echo_testing_string" = 'X\t' && echo_testing_string=`($ORIGINAL_CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then CONFIG_SHELL=$ORIGINAL_CONFIG_SHELL export CONFIG_SHELL SHELL="$CONFIG_SHELL" export SHELL ECHO="$CONFIG_SHELL [$]0 --fallback-echo" elif echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo '\t') 2>/dev/null` && test "X$echo_testing_string" = 'X\t' && echo_testing_string=`($CONFIG_SHELL "[$]0" --fallback-echo "$echo_test_string") 2>/dev/null` && test "X$echo_testing_string" = "X$echo_test_string"; then ECHO="$CONFIG_SHELL [$]0 --fallback-echo" else # maybe with a smaller string... prev=: for cmd in 'echo test' 'sed 2q "[$]0"' 'sed 10q "[$]0"' 'sed 20q "[$]0"' 'sed 50q "[$]0"'; do if { test "X$echo_test_string" = "X`eval $cmd`"; } 2>/dev/null then break fi prev="$cmd" done if test "$prev" != 'sed 50q "[$]0"'; then echo_test_string=`eval $prev` export echo_test_string exec ${ORIGINAL_CONFIG_SHELL-${CONFIG_SHELL-/bin/sh}} "[$]0" ${1+"[$]@"} else # Oops. We lost completely, so just stick with echo. ECHO=echo fi fi fi fi fi fi # Copy echo and quote the copy suitably for passing to libtool from # the Makefile, instead of quoting the original, which is used later. lt_ECHO=$ECHO if test "X$lt_ECHO" = "X$CONFIG_SHELL [$]0 --fallback-echo"; then lt_ECHO="$CONFIG_SHELL \\\$\[$]0 --fallback-echo" fi AC_SUBST(lt_ECHO) ]) _LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts]) _LT_DECL([], [ECHO], [1], [An echo program that does not interpret backslashes]) ])# _LT_PROG_ECHO_BACKSLASH # _LT_ENABLE_LOCK # --------------- m4_defun([_LT_ENABLE_LOCK], [AC_ARG_ENABLE([libtool-lock], [AS_HELP_STRING([--disable-libtool-lock], [avoid locking (might break parallel builds)])]) test "x$enable_libtool_lock" != xno && enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE="32" ;; *ELF-64*) HPUX_IA64_MODE="64" ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out which ABI we are using. echo '[#]line __oline__ "configure"' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then if test "$lt_cv_prog_gnu_ld" = yes; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|ppc*-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_i386" ;; ppc64-*linux*|powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; ppc*-*linux*|powerpc*-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS="$CFLAGS" CFLAGS="$CFLAGS -belf" AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, [AC_LANG_PUSH(C) AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) AC_LANG_POP]) if test x"$lt_cv_cc_needs_belf" != x"yes"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS="$SAVE_CFLAGS" fi ;; sparc*-*solaris*) # Find out which ABI we are using. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) LD="${LD-ld} -m elf64_sparc" ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks="$enable_libtool_lock" ])# _LT_ENABLE_LOCK # _LT_CMD_OLD_ARCHIVE # ------------------- m4_defun([_LT_CMD_OLD_ARCHIVE], [AC_CHECK_TOOL(AR, ar, false) test -z "$AR" && AR=ar test -z "$AR_FLAGS" && AR_FLAGS=cru _LT_DECL([], [AR], [1], [The archiver]) _LT_DECL([], [AR_FLAGS], [1]) AC_CHECK_TOOL(STRIP, strip, :) test -z "$STRIP" && STRIP=: _LT_DECL([], [STRIP], [1], [A symbol stripping program]) AC_CHECK_TOOL(RANLIB, ranlib, :) test -z "$RANLIB" && RANLIB=: _LT_DECL([], [RANLIB], [1], [Commands used to install an old-style archive]) # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$oldlib" fi _LT_DECL([], [old_postinstall_cmds], [2]) _LT_DECL([], [old_postuninstall_cmds], [2]) _LT_TAGDECL([], [old_archive_cmds], [2], [Commands used to build an old-style archive]) ])# _LT_CMD_OLD_ARCHIVE # _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------------------- # Check whether the given compiler option works AC_DEFUN([_LT_COMPILER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$3" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi fi $RM conftest* ]) if test x"[$]$2" = xyes; then m4_if([$5], , :, [$5]) else m4_if([$6], , :, [$6]) fi ])# _LT_COMPILER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], []) # _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------- # Check whether the given linker option works AC_DEFUN([_LT_LINKER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS $3" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&AS_MESSAGE_LOG_FD $ECHO "X$_lt_linker_boilerplate" | $Xsed -e '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi else $2=yes fi fi $RM -r conftest* LDFLAGS="$save_LDFLAGS" ]) if test x"[$]$2" = xyes; then m4_if([$4], , :, [$4]) else m4_if([$5], , :, [$5]) fi ])# _LT_LINKER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], []) # LT_CMD_MAX_LEN #--------------- AC_DEFUN([LT_CMD_MAX_LEN], [AC_REQUIRE([AC_CANONICAL_HOST])dnl # find the maximum length of command line arguments AC_MSG_CHECKING([the maximum length of command line arguments]) AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl i=0 teststring="ABCD" case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; netbsd* | freebsd* | openbsd* | darwin* | dragonfly*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8 ; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test "X"`$SHELL [$]0 --fallback-echo "X$teststring$teststring" 2>/dev/null` \ = "XX$teststring$teststring"; } >/dev/null 2>&1 && test $i != 17 # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac ]) if test -n $lt_cv_sys_max_cmd_len ; then AC_MSG_RESULT($lt_cv_sys_max_cmd_len) else AC_MSG_RESULT(none) fi max_cmd_len=$lt_cv_sys_max_cmd_len _LT_DECL([], [max_cmd_len], [0], [What is the maximum length of a command?]) ])# LT_CMD_MAX_LEN # Old name: AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], []) # _LT_HEADER_DLFCN # ---------------- m4_defun([_LT_HEADER_DLFCN], [AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl ])# _LT_HEADER_DLFCN # _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE, # ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING) # ---------------------------------------------------------------- m4_defun([_LT_TRY_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test "$cross_compiling" = yes; then : [$4] else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF [#line __oline__ "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif void fnord() { int i=42;} int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; /* dlclose (self); */ } else puts (dlerror ()); return status; }] _LT_EOF if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext} 2>/dev/null; then (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) $1 ;; x$lt_dlneed_uscore) $2 ;; x$lt_dlunknown|x*) $3 ;; esac else : # compilation failed $3 fi fi rm -fr conftest* ])# _LT_TRY_DLOPEN_SELF # LT_SYS_DLOPEN_SELF # ------------------ AC_DEFUN([LT_SYS_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test "x$enable_dlopen" != xyes; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen="load_add_on" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen="LoadLibrary" lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen="dlopen" lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"],[ lt_cv_dlopen="dyld" lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ]) ;; *) AC_CHECK_FUNC([shl_load], [lt_cv_dlopen="shl_load"], [AC_CHECK_LIB([dld], [shl_load], [lt_cv_dlopen="shl_load" lt_cv_dlopen_libs="-ldld"], [AC_CHECK_FUNC([dlopen], [lt_cv_dlopen="dlopen"], [AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-ldl"], [AC_CHECK_LIB([svld], [dlopen], [lt_cv_dlopen="dlopen" lt_cv_dlopen_libs="-lsvld"], [AC_CHECK_LIB([dld], [dld_link], [lt_cv_dlopen="dld_link" lt_cv_dlopen_libs="-ldld"]) ]) ]) ]) ]) ]) ;; esac if test "x$lt_cv_dlopen" != xno; then enable_dlopen=yes else enable_dlopen=no fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS="$CPPFLAGS" test "x$ac_cv_header_dlfcn_h" = xyes && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS="$LDFLAGS" wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS="$LIBS" LIBS="$lt_cv_dlopen_libs $LIBS" AC_CACHE_CHECK([whether a program can dlopen itself], lt_cv_dlopen_self, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes, lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) ]) if test "x$lt_cv_dlopen_self" = xyes; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" AC_CACHE_CHECK([whether a statically linked program can dlopen itself], lt_cv_dlopen_self_static, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross) ]) fi CPPFLAGS="$save_CPPFLAGS" LDFLAGS="$save_LDFLAGS" LIBS="$save_LIBS" ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi _LT_DECL([dlopen_support], [enable_dlopen], [0], [Whether dlopen is supported]) _LT_DECL([dlopen_self], [enable_dlopen_self], [0], [Whether dlopen of programs is supported]) _LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0], [Whether dlopen of statically linked programs is supported]) ])# LT_SYS_DLOPEN_SELF # Old name: AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], []) # _LT_COMPILER_C_O([TAGNAME]) # --------------------------- # Check to see if options -c and -o are simultaneously supported by compiler. # This macro does not hard code the compiler like AC_PROG_CC_C_O. m4_defun([_LT_COMPILER_C_O], [m4_require([_LT_DECL_SED])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:__oline__: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:__oline__: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "X$_lt_compiler_boilerplate" | $Xsed -e '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes fi fi chmod u+w . 2>&AS_MESSAGE_LOG_FD $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* ]) _LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1], [Does compiler simultaneously support -c and -o options?]) ])# _LT_COMPILER_C_O # _LT_COMPILER_FILE_LOCKS([TAGNAME]) # ---------------------------------- # Check to see if we can do hard links to lock some files if needed m4_defun([_LT_COMPILER_FILE_LOCKS], [m4_require([_LT_ENABLE_LOCK])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_COMPILER_C_O([$1]) hard_links="nottested" if test "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" = no && test "$need_locks" != no; then # do not overwrite the value of need_locks provided by the user AC_MSG_CHECKING([if we can lock with hard links]) hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no AC_MSG_RESULT([$hard_links]) if test "$hard_links" = no; then AC_MSG_WARN([`$CC' does not support `-c -o', so `make -j' may be unsafe]) need_locks=warn fi else need_locks=no fi _LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?]) ])# _LT_COMPILER_FILE_LOCKS # _LT_CHECK_OBJDIR # ---------------- m4_defun([_LT_CHECK_OBJDIR], [AC_CACHE_CHECK([for objdir], [lt_cv_objdir], [rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null]) objdir=$lt_cv_objdir _LT_DECL([], [objdir], [0], [The name of the directory that contains temporary libtool files])dnl m4_pattern_allow([LT_OBJDIR])dnl AC_DEFINE_UNQUOTED(LT_OBJDIR, "$lt_cv_objdir/", [Define to the sub-directory in which libtool stores uninstalled libraries.]) ])# _LT_CHECK_OBJDIR # _LT_LINKER_HARDCODE_LIBPATH([TAGNAME]) # -------------------------------------- # Check hardcoding attributes. m4_defun([_LT_LINKER_HARDCODE_LIBPATH], [AC_MSG_CHECKING([how to hardcode library paths into programs]) _LT_TAGVAR(hardcode_action, $1)= if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" || test -n "$_LT_TAGVAR(runpath_var, $1)" || test "X$_LT_TAGVAR(hardcode_automatic, $1)" = "Xyes" ; then # We can hardcode non-existent directories. if test "$_LT_TAGVAR(hardcode_direct, $1)" != no && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" != no && test "$_LT_TAGVAR(hardcode_minus_L, $1)" != no; then # Linking always hardcodes the temporary library directory. _LT_TAGVAR(hardcode_action, $1)=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. _LT_TAGVAR(hardcode_action, $1)=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. _LT_TAGVAR(hardcode_action, $1)=unsupported fi AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)]) if test "$_LT_TAGVAR(hardcode_action, $1)" = relink || test "$_LT_TAGVAR(inherit_rpath, $1)" = yes; then # Fast installation is not supported enable_fast_install=no elif test "$shlibpath_overrides_runpath" = yes || test "$enable_shared" = no; then # Fast installation is not necessary enable_fast_install=needless fi _LT_TAGDECL([], [hardcode_action], [0], [How to hardcode a shared library path into an executable]) ])# _LT_LINKER_HARDCODE_LIBPATH # _LT_CMD_STRIPLIB # ---------------- m4_defun([_LT_CMD_STRIPLIB], [m4_require([_LT_DECL_EGREP]) striplib= old_striplib= AC_MSG_CHECKING([whether stripping libraries is possible]) if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" AC_MSG_RESULT([yes]) else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP" ; then striplib="$STRIP -x" old_striplib="$STRIP -S" AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) fi ;; *) AC_MSG_RESULT([no]) ;; esac fi _LT_DECL([], [old_striplib], [1], [Commands to strip libraries]) _LT_DECL([], [striplib], [1]) ])# _LT_CMD_STRIPLIB # _LT_SYS_DYNAMIC_LINKER([TAG]) # ----------------------------- # PORTME Fill in your ld.so characteristics m4_defun([_LT_SYS_DYNAMIC_LINKER], [AC_REQUIRE([AC_CANONICAL_HOST])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_OBJDUMP])dnl m4_require([_LT_DECL_SED])dnl AC_MSG_CHECKING([dynamic linker characteristics]) m4_if([$1], [], [ if test "$GCC" = yes; then case $host_os in darwin*) lt_awk_arg="/^libraries:/,/LR/" ;; *) lt_awk_arg="/^libraries:/" ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e "s,=/,/,g"` if $ECHO "$lt_search_path_spec" | $GREP ';' >/dev/null ; then # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e 's/;/ /g'` else lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary. lt_tmp_lt_search_path_spec= lt_multi_os_dir=`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path/$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path/$lt_multi_os_dir" else test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO $lt_tmp_lt_search_path_spec | awk ' BEGIN {RS=" "; FS="/|\n";} { lt_foo=""; lt_count=0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo="/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[[lt_foo]]++; } if (lt_freq[[lt_foo]] == 1) { print lt_foo; } }'` sys_lib_search_path_spec=`$ECHO $lt_search_path_spec` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi]) library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=".so" postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='${libname}${release}${shared_ext}$major' ;; aix[[4-9]]*) version_type=linux need_lib_prefix=no need_version=no hardcode_into_libs=yes if test "$host_cpu" = ia64; then # AIX 5 supports IA64 library_names_spec='${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext}$versuffix $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line `#! .'. This would cause the generated library to # depend on `.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[[01]] | aix4.[[01]].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | ${CC} -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # AIX (on Power*) has no versioning support, so currently we can not hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. if test "$aix_use_runtimelinking" = yes; then # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' else # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='${libname}${release}.a $libname.a' soname_spec='${libname}${release}${shared_ext}$major' fi shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`$ECHO "X$lib" | $Xsed -e '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; test $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='${libname}${shared_ext}' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[[45]]*) version_type=linux need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=".dll" need_version=no need_lib_prefix=no case $GCC,$host_os in yes,cygwin* | yes,mingw* | yes,pw32* | yes,cegcc*) library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \${file}`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\${base_file}'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo ${libname} | sed -e 's/^lib/cyg/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec="/usr/lib /lib/w32api /lib /usr/local/lib" ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' sys_lib_search_path_spec=`$CC -print-search-dirs | $GREP "^libraries:" | $SED -e "s/^libraries://" -e "s,=/,/,g"` if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then # It is most probably a Windows format PATH printed by # mingw gcc, but we are running on Cygwin. Gcc prints its search # path with ; separators, and with drive letters. We can handle the # drive letters (cygwin fileutils understands them), so leave them, # especially as we might pass files found there to a mingw objdump, # which wouldn't understand a cygwinified path. Ahh. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo ${libname} | sed -e 's/^lib/pw/'``echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext}' ;; esac ;; *) library_names_spec='${libname}`echo ${release} | $SED -e 's/[[.]]/-/g'`${versuffix}${shared_ext} $libname.lib' ;; esac dynamic_linker='Win32 ld.exe' # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${major}$shared_ext ${libname}$shared_ext' soname_spec='${libname}${release}${major}$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' m4_if([$1], [],[ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"]) sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname$shared_ext' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd1*) dynamic_linker=no ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[[123]]*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='${libname}${release}${shared_ext}$versuffix $libname${shared_ext}$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2*) shlibpath_overrides_runpath=yes ;; freebsd3.[[01]]* | freebsdelf3.[[01]]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \ freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; gnu*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}${major} ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' if test "X$HPUX_IA64_MODE" = X32; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" fi sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555. postinstall_cmds='chmod 555 $lib' ;; interix[[3-9]]*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test "$lt_cv_prog_gnu_ld" = yes; then version_type=linux else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${release}${shared_ext} $libname${shared_ext}' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib${libsuff} /lib${libsuff} /usr/local/lib${libsuff}" sys_lib_dlsearch_path_spec="/usr/lib${libsuff} /lib${libsuff}" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; # This must be Linux ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \ LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\"" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null], [shlibpath_overrides_runpath=yes])]) LDFLAGS=$save_LDFLAGS libdir=$save_libdir # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Append ld.so.conf contents to the search path if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsdelf*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='NetBSD ld.elf_so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd*) version_type=sunos sys_lib_dlsearch_path_spec="/usr/lib" need_lib_prefix=no # Some older versions of OpenBSD (3.3 at least) *do* need versioned libs. case $host_os in openbsd3.3 | openbsd3.3.*) need_version=yes ;; *) need_version=no ;; esac library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then case $host_os in openbsd2.[[89]] | openbsd2.[[89]].*) shlibpath_overrides_runpath=no ;; *) shlibpath_overrides_runpath=yes ;; esac else shlibpath_overrides_runpath=yes fi ;; os2*) libname_spec='$name' shrext_cmds=".dll" need_lib_prefix=no library_names_spec='$libname${shared_ext} $libname.a' dynamic_linker='OS/2 ld.exe' shlibpath_var=LIBPATH ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='${libname}${release}${shared_ext}$major' library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec="$sys_lib_search_path_spec" ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${shared_ext}$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test "$with_gnu_ld" = yes; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec ;then version_type=linux library_names_spec='$libname${shared_ext}.$versuffix $libname${shared_ext}.$major $libname${shared_ext}' soname_spec='$libname${shared_ext}.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=freebsd-elf need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext} $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test "$with_gnu_ld" = yes; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major $libname${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac AC_MSG_RESULT([$dynamic_linker]) test "$dynamic_linker" = no && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test "$GCC" = yes; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test "${lt_cv_sys_lib_search_path_spec+set}" = set; then sys_lib_search_path_spec="$lt_cv_sys_lib_search_path_spec" fi if test "${lt_cv_sys_lib_dlsearch_path_spec+set}" = set; then sys_lib_dlsearch_path_spec="$lt_cv_sys_lib_dlsearch_path_spec" fi _LT_DECL([], [variables_saved_for_relink], [1], [Variables whose values should be saved in libtool wrapper scripts and restored at link time]) _LT_DECL([], [need_lib_prefix], [0], [Do we need the "lib" prefix for modules?]) _LT_DECL([], [need_version], [0], [Do we need a version for libraries?]) _LT_DECL([], [version_type], [0], [Library versioning type]) _LT_DECL([], [runpath_var], [0], [Shared library runtime path variable]) _LT_DECL([], [shlibpath_var], [0],[Shared library path variable]) _LT_DECL([], [shlibpath_overrides_runpath], [0], [Is shlibpath searched before the hard-coded library search path?]) _LT_DECL([], [libname_spec], [1], [Format of library name prefix]) _LT_DECL([], [library_names_spec], [1], [[List of archive names. First name is the real one, the rest are links. The last name is the one that the linker finds with -lNAME]]) _LT_DECL([], [soname_spec], [1], [[The coded name of the library, if different from the real name]]) _LT_DECL([], [postinstall_cmds], [2], [Command to use after installation of a shared archive]) _LT_DECL([], [postuninstall_cmds], [2], [Command to use after uninstallation of a shared archive]) _LT_DECL([], [finish_cmds], [2], [Commands used to finish a libtool library installation in a directory]) _LT_DECL([], [finish_eval], [1], [[As "finish_cmds", except a single script fragment to be evaled but not shown]]) _LT_DECL([], [hardcode_into_libs], [0], [Whether we should hardcode library paths into libraries]) _LT_DECL([], [sys_lib_search_path_spec], [2], [Compile-time system search path for libraries]) _LT_DECL([], [sys_lib_dlsearch_path_spec], [2], [Run-time system search path for libraries]) ])# _LT_SYS_DYNAMIC_LINKER # _LT_PATH_TOOL_PREFIX(TOOL) # -------------------------- # find a file program which can recognize shared library AC_DEFUN([_LT_PATH_TOOL_PREFIX], [m4_require([_LT_DECL_EGREP])dnl AC_MSG_CHECKING([for $1]) AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, [case $MAGIC_CMD in [[\\/*] | ?:[\\/]*]) lt_cv_path_MAGIC_CMD="$MAGIC_CMD" # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD="$MAGIC_CMD" lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR dnl $ac_dummy forces splitting on constant user-supplied paths. dnl POSIX.2 word splitting is done only on the output of word expansions, dnl not every word. This closes a longstanding sh security hole. ac_dummy="m4_if([$2], , $PATH, [$2])" for ac_dir in $ac_dummy; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f $ac_dir/$1; then lt_cv_path_MAGIC_CMD="$ac_dir/$1" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS="$lt_save_ifs" MAGIC_CMD="$lt_save_MAGIC_CMD" ;; esac]) MAGIC_CMD="$lt_cv_path_MAGIC_CMD" if test -n "$MAGIC_CMD"; then AC_MSG_RESULT($MAGIC_CMD) else AC_MSG_RESULT(no) fi _LT_DECL([], [MAGIC_CMD], [0], [Used to examine libraries when file_magic_cmd begins with "file"])dnl ])# _LT_PATH_TOOL_PREFIX # Old name: AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], []) # _LT_PATH_MAGIC # -------------- # find a file program which can recognize a shared library m4_defun([_LT_PATH_MAGIC], [_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH) else MAGIC_CMD=: fi fi ])# _LT_PATH_MAGIC # LT_PATH_LD # ---------- # find the pathname to the GNU or non-GNU linker AC_DEFUN([LT_PATH_LD], [AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl AC_ARG_WITH([gnu-ld], [AS_HELP_STRING([--with-gnu-ld], [assume the C compiler uses GNU ld @<:@default=no@:>@])], [test "$withval" = no || with_gnu_ld=yes], [with_gnu_ld=no])dnl ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by $CC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]]* | ?:[[\\/]]*) re_direlt='/[[^/]][[^/]]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL(lt_cv_path_LD, [if test -z "$LD"; then lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &1 /dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else lt_cv_deplibs_check_method='file_magic file format pei*-i386(.*architecture: i386)?' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; gnu*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - PA-RISC [0-9].[0-9]'] lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]].[[0-9]]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[[3-9]]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be Linux ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu) lt_cv_deplibs_check_method=pass_all ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; esac ]) file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown _LT_DECL([], [deplibs_check_method], [1], [Method to check whether dependent libraries are shared objects]) _LT_DECL([], [file_magic_cmd], [1], [Command to use when deplibs_check_method == "file_magic"]) ])# _LT_CHECK_MAGIC_METHOD # LT_PATH_NM # ---------- # find the pathname to a BSD- or MS-compatible name lister AC_DEFUN([LT_PATH_NM], [AC_REQUIRE([AC_PROG_CC])dnl AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM, [if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM="$NM" else lt_nm_to_check="${ac_tool_prefix}nm" if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS="$lt_save_ifs" test -z "$ac_dir" && ac_dir=. tmp_nm="$ac_dir/$lt_tmp_nm" if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext" ; then # Check to see if the nm accepts a BSD-compat flag. # Adding the `sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file case `"$tmp_nm" -B /dev/null 2>&1 | sed '1q'` in */dev/null* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS="$lt_save_ifs" done : ${lt_cv_path_NM=no} fi]) if test "$lt_cv_path_NM" != "no"; then NM="$lt_cv_path_NM" else # Didn't find any BSD compatible name lister, look for dumpbin. AC_CHECK_TOOLS(DUMPBIN, ["dumpbin -symbols" "link -dump -symbols"], :) AC_SUBST([DUMPBIN]) if test "$DUMPBIN" != ":"; then NM="$DUMPBIN" fi fi test -z "$NM" && NM=nm AC_SUBST([NM]) _LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface], [lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:__oline__: $ac_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:__oline__: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:__oline__: output\"" >&AS_MESSAGE_LOG_FD) cat conftest.out >&AS_MESSAGE_LOG_FD if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest*]) ])# LT_PATH_NM # Old names: AU_ALIAS([AM_PROG_NM], [LT_PATH_NM]) AU_ALIAS([AC_PROG_NM], [LT_PATH_NM]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_PROG_NM], []) dnl AC_DEFUN([AC_PROG_NM], []) # LT_LIB_M # -------- # check for math library AC_DEFUN([LT_LIB_M], [AC_REQUIRE([AC_CANONICAL_HOST])dnl LIBM= case $host in *-*-beos* | *-*-cygwin* | *-*-pw32* | *-*-darwin*) # These system don't have libm, or don't need it ;; *-ncr-sysv4.3*) AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM="-lmw") AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") ;; *) AC_CHECK_LIB(m, cos, LIBM="-lm") ;; esac AC_SUBST([LIBM]) ])# LT_LIB_M # Old name: AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_CHECK_LIBM], []) # _LT_COMPILER_NO_RTTI([TAGNAME]) # ------------------------------- m4_defun([_LT_COMPILER_NO_RTTI], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= if test "$GCC" = yes; then _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions], lt_cv_prog_compiler_rtti_exceptions, [-fno-rtti -fno-exceptions], [], [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"]) fi _LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1], [Compiler flag to turn off builtin functions]) ])# _LT_COMPILER_NO_RTTI # _LT_CMD_GLOBAL_SYMBOLS # ---------------------- m4_defun([_LT_CMD_GLOBAL_SYMBOLS], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([LT_PATH_NM])dnl AC_REQUIRE([LT_PATH_LD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_TAG_COMPILER])dnl # Check for command to grab the raw symbol name followed by C symbol from nm. AC_MSG_CHECKING([command to parse $NM output from $compiler object]) AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe], [ # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[[BCDEGRST]]' # Regexp to match symbols that can be accessed directly from C. sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[[BCDT]]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[[ABCDGISTW]]' ;; hpux*) if test "$host_cpu" = ia64; then symcode='[[ABCDEGRST]]' fi ;; irix* | nonstopux*) symcode='[[BCDEGRST]]' ;; osf*) symcode='[[BCDEGQRST]]' ;; solaris*) symcode='[[BDRT]]' ;; sco3.2v5*) symcode='[[DT]]' ;; sysv4.2uw2*) symcode='[[DT]]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[[ABDT]]' ;; sysv4) symcode='[[DFNSTU]]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[[ABCDGIRSTW]]' ;; esac # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n -e 's/^T .* \(.*\)$/extern int \1();/p' -e 's/^$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p'" lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n -e 's/^: \([[^ ]]*\) $/ {\\\"\1\\\", (void *) 0},/p' -e 's/^$symcode* \([[^ ]]*\) \(lib[[^ ]]*\)$/ {\"\2\", (void *) \&\2},/p' -e 's/^$symcode* \([[^ ]]*\) \([[^ ]]*\)$/ {\"lib\2\", (void *) \&\2},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function # and D for any global variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK ['"\ " {last_section=section; section=\$ 3};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=0}; \$ 0~/\(\).*\|/{f=1}; {printf f ? \"T \" : \"D \"};"\ " {split(\$ 0, a, /\||\r/); split(a[2], s)};"\ " s[1]~/^[@?]/{print s[1], s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print t[1], substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx]" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if AC_TRY_EVAL(ac_compile); then # Now try to grab the symbols. nlist=conftest.nm if AC_TRY_EVAL(NM conftest.$ac_objext \| $lt_cv_sys_global_symbol_pipe \> $nlist) && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ const struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[[]] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* \(.*\) \(.*\)$/ {\"\2\", (void *) \&\2},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_save_LIBS="$LIBS" lt_save_CFLAGS="$CFLAGS" LIBS="conftstm.$ac_objext" CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" if AC_TRY_EVAL(ac_link) && test -s conftest${ac_exeext}; then pipe_works=yes fi LIBS="$lt_save_LIBS" CFLAGS="$lt_save_CFLAGS" else echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD fi else echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test "$pipe_works" = yes; then break else lt_cv_sys_global_symbol_pipe= fi done ]) if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then AC_MSG_RESULT(failed) else AC_MSG_RESULT(ok) fi _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], [Take the output of nm and produce a listing of raw symbols and C names]) _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], [Transform the output of nm in a proper C declaration]) _LT_DECL([global_symbol_to_c_name_address], [lt_cv_sys_global_symbol_to_c_name_address], [1], [Transform the output of nm in a C name address pair]) _LT_DECL([global_symbol_to_c_name_address_lib_prefix], [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], [Transform the output of nm in a C name address pair when lib prefix is needed]) ]) # _LT_CMD_GLOBAL_SYMBOLS # _LT_COMPILER_PIC([TAGNAME]) # --------------------------- m4_defun([_LT_COMPILER_PIC], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_wl, $1)= _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)= AC_MSG_CHECKING([for $compiler option to produce PIC]) m4_if([$1], [CXX], [ # C++ specific cases for pic, static, wl, etc. if test "$GXX" = yes; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac else case $host_os in aix[[4-9]]*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; dgux*) case $cc_basename in ec++*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; ghcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' if test "$host_cpu" != ia64; then _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' fi ;; aCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # KAI C++ Compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64 which still supported -KPIC. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xlc* | xlC*) # IBM XL 8.0 on PPC _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall' ;; *) ;; esac ;; netbsd* | netbsdelf*-gnu) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; cxx*) # Digital/Compaq C++ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; lcc*) # Lucid _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ], [ if test "$GCC" = yes; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the `-m68020' flag to GCC prevents building anything better, # like `-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' if test "$host_cpu" = ia64; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; hpux9* | hpux10* | hpux11*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? _LT_TAGVAR(lt_prog_compiler_static, $1)='${wl}-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC (with -KPIC) is the default. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in # old Intel for x86_64 which still supported -KPIC. ecc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # Lahey Fortran 8.1. lf95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared' _LT_TAGVAR(lt_prog_compiler_static, $1)='--static' ;; pgcc* | pgf77* | pgf90* | pgf95*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; ccc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All Alpha code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xl*) # IBM XL C 8.0/Fortran 10.1 on PPC _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' ;; *Sun\ F*) # Sun Fortran 8.3 passes all unrecognized flags to the linker _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='' ;; esac ;; esac ;; newsos6) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All OSF/1 code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; rdos*) _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; solaris*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' case $cc_basename in f77* | f90* | f95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; *) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; esac ;; sunos4*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec ;then _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; unicos*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; uts4*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ]) case $host_os in # For platforms which do not support PIC, -DPIC is meaningless: *djgpp*) _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" ;; esac AC_MSG_RESULT([$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) _LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], [How to pass a linker flag through the compiler]) # # Check to make sure the PIC flag actually works. # if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works], [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)], [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [], [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in "" | " "*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;; esac], [_LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no]) fi _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], [Additional compiler flags for building library objects]) # # Check to make sure the static flag actually works. # wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\" _LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works], _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1), $lt_tmp_static_flag, [], [_LT_TAGVAR(lt_prog_compiler_static, $1)=]) _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1], [Compiler flag to prevent dynamic linking]) ])# _LT_COMPILER_PIC # _LT_LINKER_SHLIBS([TAGNAME]) # ---------------------------- # See if the linker supports building shared libraries. m4_defun([_LT_LINKER_SHLIBS], [AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) m4_if([$1], [CXX], [ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' case $host_os in aix[[4-9]]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi ;; pw32*) _LT_TAGVAR(export_symbols_cmds, $1)="$ltdll_cmds" ;; cygwin* | mingw* | cegcc*) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;/^.*[[ ]]__nm__/s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' ;; linux* | k*bsd*-gnu) _LT_TAGVAR(link_all_deplibs, $1)=no ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] ], [ runpath_var= _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_cmds, $1)= _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(old_archive_from_new_cmds, $1)= _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)= _LT_TAGVAR(thread_safe_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list _LT_TAGVAR(include_expsyms, $1)= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ` (' and `)$', so one must not match beginning or # end of line. Example: `a|bc|.*d.*' will exclude the symbols `a' and `bc', # as well as any symbol that contains `d'. _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. dnl Note also adjust exclude_expsyms for C++ above. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; linux* | k*bsd*-gnu) _LT_TAGVAR(link_all_deplibs, $1)=no ;; esac _LT_TAGVAR(ld_shlibs, $1)=yes if test "$with_gnu_ld" = yes; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='${wl}' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi supports_anon_versioning=no case `$LD -v 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[[3-9]]*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.9.1, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to modify your PATH *** so that a non-GNU linker is found, and then restart. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test "$host_os" = linux-dietlibc; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test "$tmp_diet" = no then tmp_addflag= tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95*) # Portland Group f77 and f90 compilers _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 _LT_TAGVAR(whole_archive_flag_spec, $1)= tmp_sharedflag='--shared' ;; xl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi case $cc_basename in xlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='-rpath $libdir' _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $compiler_flags -soname $soname -o $lib' if test "x$supports_anon_versioning" = xyes; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $compiler_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*) _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 can not *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; sunos4*) _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac if test "$_LT_TAGVAR(ld_shlibs, $1)" = no; then runpath_var= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. _LT_TAGVAR(hardcode_minus_L, $1)=yes if test "$GCC" = yes && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. _LT_TAGVAR(hardcode_direct, $1)=unsupported fi ;; aix[[4-9]]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to AIX nm, but means don't demangle with GNU nm if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='$NM -BCpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B")) && ([substr](\$ 3,1,1) != ".")) { print \$ 3 } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' if test "$GCC" = yes; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi ;; esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi _LT_TAGVAR(link_all_deplibs, $1)=no else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. _LT_TAGVAR(always_export_symbols, $1)=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' _LT_TAGVAR(archive_cmds_need_lc, $1)=yes # This is similar to how AIX traditionally builds its shared libraries. _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; bsdi[[45]]*) _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=".dll" # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `$ECHO "X$deplibs" | $Xsed -e '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' # FIXME: Should let the user specify the lib program. _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' _LT_TAGVAR(fix_srcfile_path, $1)='`cygpath -w "$srcfile"`' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; dgux*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; freebsd1*) _LT_TAGVAR(ld_shlibs, $1)=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; hpux9*) if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' ;; hpux10*) if test "$GCC" = yes -a "$with_gnu_ld" = no; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test "$with_gnu_ld" = no; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)='+b $libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes fi ;; hpux11*) if test "$GCC" = yes -a "$with_gnu_ld" = no; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac fi if test "$with_gnu_ld" = no; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. save_LDFLAGS="$LDFLAGS" LDFLAGS="$LDFLAGS -shared ${wl}-exported_symbol ${wl}foo ${wl}-update_registry ${wl}/dev/null" AC_LINK_IFELSE(int foo(void) {}, _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations ${wl}-exports_file ${wl}$export_symbols -o $lib' ) LDFLAGS="$save_LDFLAGS" else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -exports_file $export_symbols -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes _LT_TAGVAR(link_all_deplibs, $1)=yes ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; newsos6) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *nto* | *qnx*) ;; openbsd*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags ${wl}-retain-symbols-file,$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' else case $host_os in openbsd[[01]].* | openbsd2.[[0-7]] | openbsd2.[[0-7]].*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' ;; esac fi else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY $libname INITINSTANCE" > $output_objdir/$libname.def~$ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~$ECHO DATA >> $output_objdir/$libname.def~$ECHO " SINGLE NONSHARED" >> $output_objdir/$libname.def~$ECHO EXPORTS >> $output_objdir/$libname.def~emxexp $libobjs >> $output_objdir/$libname.def~$CC -Zdll -Zcrtdll -o $lib $libobjs $deplibs $compiler_flags $output_objdir/$libname.def' _LT_TAGVAR(old_archive_from_new_cmds, $1)='emximp -o $output_objdir/$libname.a $output_objdir/$libname.def' ;; osf3*) if test "$GCC" = yes; then _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test "$GCC" = yes; then _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared${allow_undefined_flag} ${wl}-input ${wl}$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; solaris*) _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' if test "$GCC" = yes; then wlarc='${wl}' _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-z ${wl}text ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared ${wl}-z ${wl}text ${wl}-M ${wl}$lib.exp ${wl}-h ${wl}$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' _LT_TAGVAR(archive_cmds, $1)='$LD -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='${wl}' _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h $soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. GCC discards it without `$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test "$GCC" = yes; then _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' else _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' fi ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes ;; sunos4*) if test "x$host_vendor" = xsequent; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h $soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4) case $host_vendor in sni) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs' _LT_TAGVAR(hardcode_direct, $1)=no ;; motorola) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4.3*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes _LT_TAGVAR(ld_shlibs, $1)=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' runpath_var='LD_RUN_PATH' if test "$GCC" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(ld_shlibs, $1)=no ;; esac if test x$host_vendor = xsni; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Blargedynsym' ;; esac fi fi ]) AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no _LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld _LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl _LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl _LT_DECL([], [extract_expsyms_cmds], [2], [The commands to extract the exported symbol list from a shared archive]) # # Do we need to explicitly link libc? # case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in x|xyes) # Assume -lc should be added _LT_TAGVAR(archive_cmds_need_lc, $1)=yes if test "$enable_shared" = yes && test "$GCC" = yes; then case $_LT_TAGVAR(archive_cmds, $1) in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. AC_MSG_CHECKING([whether -lc should be explicitly linked in]) $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if AC_TRY_EVAL(ac_compile) 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1) compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1) _LT_TAGVAR(allow_undefined_flag, $1)= if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) then _LT_TAGVAR(archive_cmds_need_lc, $1)=no else _LT_TAGVAR(archive_cmds_need_lc, $1)=yes fi _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* AC_MSG_RESULT([$_LT_TAGVAR(archive_cmds_need_lc, $1)]) ;; esac fi ;; esac _LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0], [Whether or not to add -lc for building shared libraries]) _LT_TAGDECL([allow_libtool_libs_with_static_runtimes], [enable_shared_with_static_runtimes], [0], [Whether or not to disallow shared libs when runtime libs are static]) _LT_TAGDECL([], [export_dynamic_flag_spec], [1], [Compiler flag to allow reflexive dlopens]) _LT_TAGDECL([], [whole_archive_flag_spec], [1], [Compiler flag to generate shared objects directly from archives]) _LT_TAGDECL([], [compiler_needs_object], [1], [Whether the compiler copes with passing no objects directly]) _LT_TAGDECL([], [old_archive_from_new_cmds], [2], [Create an old-style archive from a shared archive]) _LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2], [Create a temporary old-style archive to link instead of a shared archive]) _LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive]) _LT_TAGDECL([], [archive_expsym_cmds], [2]) _LT_TAGDECL([], [module_cmds], [2], [Commands used to build a loadable module if different from building a shared archive.]) _LT_TAGDECL([], [module_expsym_cmds], [2]) _LT_TAGDECL([], [with_gnu_ld], [1], [Whether we are building with GNU ld or not]) _LT_TAGDECL([], [allow_undefined_flag], [1], [Flag that allows shared libraries with undefined symbols to be built]) _LT_TAGDECL([], [no_undefined_flag], [1], [Flag that enforces no undefined symbols]) _LT_TAGDECL([], [hardcode_libdir_flag_spec], [1], [Flag to hardcode $libdir into a binary during linking. This must work even if $libdir does not exist]) _LT_TAGDECL([], [hardcode_libdir_flag_spec_ld], [1], [[If ld is used when linking, flag to hardcode $libdir into a binary during linking. This must work even if $libdir does not exist]]) _LT_TAGDECL([], [hardcode_libdir_separator], [1], [Whether we need a single "-rpath" flag with a separated argument]) _LT_TAGDECL([], [hardcode_direct], [0], [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_direct_absolute], [0], [Set to "yes" if using DIR/libNAME${shared_ext} during linking hardcodes DIR into the resulting binary and the resulting library dependency is "absolute", i.e impossible to change by setting ${shlibpath_var} if the library is relocated]) _LT_TAGDECL([], [hardcode_minus_L], [0], [Set to "yes" if using the -LDIR flag during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_shlibpath_var], [0], [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_automatic], [0], [Set to "yes" if building a shared library automatically hardcodes DIR into the library and all subsequent libraries and executables linked against it]) _LT_TAGDECL([], [inherit_rpath], [0], [Set to yes if linker adds runtime paths of dependent libraries to runtime path list]) _LT_TAGDECL([], [link_all_deplibs], [0], [Whether libtool must link a program against all its dependency libraries]) _LT_TAGDECL([], [fix_srcfile_path], [1], [Fix the shell variable $srcfile for the compiler]) _LT_TAGDECL([], [always_export_symbols], [0], [Set to "yes" if exported symbols are required]) _LT_TAGDECL([], [export_symbols_cmds], [2], [The commands to list exported symbols]) _LT_TAGDECL([], [exclude_expsyms], [1], [Symbols that should not be listed in the preloaded symbols]) _LT_TAGDECL([], [include_expsyms], [1], [Symbols that must always be exported]) _LT_TAGDECL([], [prelink_cmds], [2], [Commands necessary for linking programs (against libraries) with templates]) _LT_TAGDECL([], [file_list_spec], [1], [Specify filename containing input files]) dnl FIXME: Not yet implemented dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1], dnl [Compiler flag to generate thread safe objects]) ])# _LT_LINKER_SHLIBS # _LT_LANG_C_CONFIG([TAG]) # ------------------------ # Ensure that the configuration variables for a C compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to `libtool'. m4_defun([_LT_LANG_C_CONFIG], [m4_require([_LT_DECL_EGREP])dnl lt_save_CC="$CC" AC_LANG_PUSH(C) # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' _LT_TAG_COMPILER # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) LT_SYS_DLOPEN_SELF _LT_CMD_STRIPLIB # Report which library types will actually be built AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_CONFIG($1) fi AC_LANG_POP CC="$lt_save_CC" ])# _LT_LANG_C_CONFIG # _LT_PROG_CXX # ------------ # Since AC_PROG_CXX is broken, in that it returns g++ if there is no c++ # compiler, we have our own version here. m4_defun([_LT_PROG_CXX], [ pushdef([AC_MSG_ERROR], [_lt_caught_CXX_error=yes]) AC_PROG_CXX if test -n "$CXX" && ( test "X$CXX" != "Xno" && ( (test "X$CXX" = "Xg++" && `g++ -v >/dev/null 2>&1` ) || (test "X$CXX" != "Xg++"))) ; then AC_PROG_CXXCPP else _lt_caught_CXX_error=yes fi popdef([AC_MSG_ERROR]) ])# _LT_PROG_CXX dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([_LT_PROG_CXX], []) # _LT_LANG_CXX_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a C++ compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to `libtool'. m4_defun([_LT_LANG_CXX_CONFIG], [AC_REQUIRE([_LT_PROG_CXX])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl AC_LANG_PUSH(C++) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_caught_CXX_error" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test "$GXX" = yes; then _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' else _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= fi if test "$GXX" = yes; then # Set up default GNU C++ configuration LT_PATH_LD # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test "$with_gnu_ld" = yes; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='${wl}' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) _LT_TAGVAR(ld_shlibs, $1)=yes case $host_os in aix3*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aix[[4-9]]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag="" else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='${wl}-f,' if test "$GXX" = yes; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi esac shared_flag='-shared' if test "$aix_use_runtimelinking" = yes; then shared_flag="$shared_flag "'${wl}-G' fi else # not using gcc if test "$host_cpu" = ia64; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test "$aix_use_runtimelinking" = yes; then shared_flag='${wl}-G' else shared_flag='${wl}-bM:SRE' fi fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. _LT_TAGVAR(always_export_symbols, $1)=yes if test "$aix_use_runtimelinking" = yes; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an empty # executable. _LT_SYS_MODULE_PATH_AIX _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags `if test "x${allow_undefined_flag}" != "x"; then $ECHO "X${wl}${allow_undefined_flag}" | $Xsed; else :; fi` '"\${wl}$exp_sym_flag:\$export_symbols $shared_flag" else if test "$host_cpu" = ia64; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\${wl}$no_entry_flag"' $compiler_flags ${wl}${allow_undefined_flag} '"\${wl}$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-berok' # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' _LT_TAGVAR(archive_cmds_need_lc, $1)=yes # This is similar to how AIX traditionally builds its shared # libraries. _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs ${wl}-bnoentry $compiler_flags ${wl}-bE:$export_symbols${allow_undefined_flag}~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$soname' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file (1st line # is EXPORTS), use it as is; otherwise, prepend... _LT_TAGVAR(archive_expsym_cmds, $1)='if test "x`$SED 1q $export_symbols`" = xEXPORTS; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname ${wl}--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; freebsd[[12]]*) # C++ shared libraries reported to be fairly broken before # switch to ELF _LT_TAGVAR(ld_shlibs, $1)=no ;; freebsd-elf*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions _LT_TAGVAR(ld_shlibs, $1)=yes ;; gnu*) ;; hpux9*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; *) if test "$GXX" = yes; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib -fPIC ${wl}+b ${wl}$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test $output_objdir/$soname = $lib || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; hpux10*|hpux11*) if test $with_gnu_ld = no; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}+b ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) ;; *) _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -b ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; *) if test "$GXX" = yes; then if test $with_gnu_ld = no; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC ${wl}+h ${wl}$soname ${wl}+b ${wl}$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s,^,_," $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags ${wl}-h,$soname ${wl}--retain-symbols-file,$output_objdir/$soname.expsym ${wl}--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test "$GXX" = yes; then if test "$with_gnu_ld" = no; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' else _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` -o $lib' fi fi _LT_TAGVAR(link_all_deplibs, $1)=yes ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib ${wl}-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-retain-symbols-file $wl$export_symbols -o $lib' ;; esac _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive$convenience ${wl}--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [[1-5]]* | *pgcpp\ [[1-5]]*) _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | $NL2SP`"' _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | $NL2SP`~ $RANLIB $oldlib' _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | $NL2SP` $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; *) # Version 6 will use weak symbols _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname ${wl}-retain-symbols-file ${wl}$export_symbols -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}--rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' ;; cxx*) # Compaq C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $wl$soname -o $lib ${wl}-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; xl*) # IBM XL 8.0 on PPC, with GNU ld _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}--export-dynamic' _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname -o $lib' if test "x$supports_anon_versioning" = xyes; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags ${wl}-soname $wl$soname ${wl}-version-script ${wl}$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file ${wl}$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; $ECHO \"$new_convenience\"` ${wl}--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='echo' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; m88k*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) _LT_TAGVAR(ld_shlibs, $1)=yes ;; openbsd2*) # C++ shared libraries are fairly broken _LT_TAGVAR(ld_shlibs, $1)=no ;; openbsd*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-retain-symbols-file,$export_symbols -o $lib' _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-E' _LT_TAGVAR(whole_archive_flag_spec, $1)="$wlarc"'--whole-archive$convenience '"$wlarc"'--no-whole-archive' fi output_verbose_link_cmd=echo else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\${tempext}\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; cxx*) case $host in osf3*) _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname $soname `test -n "$verstring" && $ECHO "X${wl}-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' ;; *) _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname ${wl}-input ${wl}$lib.exp `test -n "$verstring" && $ECHO "X-set_version $verstring" | $Xsed` -update_registry ${output_objdir}/so_locations -o $lib~ $RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' ;; esac _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`$ECHO "X$templist" | $Xsed -e "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list=""; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; $ECHO "X$list" | $Xsed' ;; *) if test "$GXX" = yes && test "$with_gnu_ld" = no; then _LT_TAGVAR(allow_undefined_flag, $1)=' ${wl}-expect_unresolved ${wl}\*' case $host in osf3*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "X${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib ${allow_undefined_flag} $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-msym ${wl}-soname ${wl}$soname `test -n "$verstring" && $ECHO "${wl}-set_version ${wl}$verstring" | $Xsed` ${wl}-update_registry ${wl}${output_objdir}/so_locations -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-rpath ${wl}$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; solaris*) case $cc_basename in CC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(archive_cmds_need_lc,$1)=yes _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G${allow_undefined_flag} -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G${allow_undefined_flag} ${wl}-M ${wl}$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands `-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes output_verbose_link_cmd='echo' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test "$GXX" = yes && test "$with_gnu_ld" = no; then _LT_TAGVAR(no_undefined_flag, $1)=' ${wl}-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' else # g++ 2.7 appears to require `-G' NOT `-shared' on this # platform. _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $LDFLAGS $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags ${wl}-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib ${wl}-M $wl$lib.exp -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP "\-L"' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R $wl$libdir' case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) _LT_TAGVAR(whole_archive_flag_spec, $1)='${wl}-z ${wl}allextract$convenience ${wl}-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We can NOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='${wl}-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='${wl}-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='${wl}-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='${wl}-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared ${wl}-Bexport:$export_symbols ${wl}-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test "$_LT_TAGVAR(ld_shlibs, $1)" = no && can_build_shared=no _LT_TAGVAR(GCC, $1)="$GXX" _LT_TAGVAR(LD, $1)="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" CC=$lt_save_CC LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test "$_lt_caught_CXX_error" != yes AC_LANG_POP ])# _LT_LANG_CXX_CONFIG # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) # --------------------------------- # Figure out "hidden" library dependencies from verbose # compiler output when linking a shared library. # Parse the compiler output and extract the necessary # objects, libraries and library flags. m4_defun([_LT_SYS_HIDDEN_LIBDEPS], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl # Dependencies to place before and after the object being linked: _LT_TAGVAR(predep_objects, $1)= _LT_TAGVAR(postdep_objects, $1)= _LT_TAGVAR(predeps, $1)= _LT_TAGVAR(postdeps, $1)= _LT_TAGVAR(compiler_lib_search_path, $1)= dnl we can't use the lt_simple_compile_test_code here, dnl because it contains code intended for an executable, dnl not a library. It's possible we should let each dnl tag define a new lt_????_link_test_code variable, dnl but it's only used here... m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF int a; void foo (void) { a = 0; } _LT_EOF ], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF ], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer*4 a a=0 return end _LT_EOF ], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer a a=0 return end _LT_EOF ], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF public class foo { private int a; public void bar (void) { a = 0; } }; _LT_EOF ]) dnl Parse the compiler output and extract the necessary dnl objects, libraries and library flags. if AC_TRY_EVAL(ac_compile); then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case $p in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test $p = "-L" || test $p = "-R"; then prev=$p continue else prev= fi if test "$pre_test_object_deps_done" = no; then case $p in -L* | -R*) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then _LT_TAGVAR(compiler_lib_search_path, $1)="${prev}${p}" else _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} ${prev}${p}" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$_LT_TAGVAR(postdeps, $1)"; then _LT_TAGVAR(postdeps, $1)="${prev}${p}" else _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} ${prev}${p}" fi fi ;; *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test "$pre_test_object_deps_done" = no; then if test -z "$_LT_TAGVAR(predep_objects, $1)"; then _LT_TAGVAR(predep_objects, $1)="$p" else _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p" fi else if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then _LT_TAGVAR(postdep_objects, $1)="$p" else _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling $1 test program" fi $RM -f confest.$objext # PORTME: override above test on systems where it is broken m4_if([$1], [CXX], [case $host_os in interix[[3-9]]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. _LT_TAGVAR(predep_objects,$1)= _LT_TAGVAR(postdep_objects,$1)= _LT_TAGVAR(postdeps,$1)= ;; linux*) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac if test "$solaris_use_stlport4" != yes; then _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' fi ;; esac ;; solaris*) case $cc_basename in CC*) # The more standards-conforming stlport4 library is # incompatible with the Cstd library. Avoid specifying # it if it's in CXXFLAGS. Ignore libCrun as # -library=stlport4 depends on it. case " $CXX $CXXFLAGS " in *" -library=stlport4 "*) solaris_use_stlport4=yes ;; esac # Adding this requires a known-good setup of shared libraries for # Sun compiler versions before 5.6, else PIC objects from an old # archive will be linked into the output, leading to subtle bugs. if test "$solaris_use_stlport4" != yes; then _LT_TAGVAR(postdeps,$1)='-library=Cstd -library=Crun' fi ;; esac ;; esac ]) case " $_LT_TAGVAR(postdeps, $1) " in *" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; esac _LT_TAGVAR(compiler_lib_search_dirs, $1)= if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | ${SED} -e 's! -L! !g' -e 's!^ !!'` fi _LT_TAGDECL([], [compiler_lib_search_dirs], [1], [The directories searched by this compiler when creating a shared library]) _LT_TAGDECL([], [predep_objects], [1], [Dependencies to place before and after the objects being linked to create a shared library]) _LT_TAGDECL([], [postdep_objects], [1]) _LT_TAGDECL([], [predeps], [1]) _LT_TAGDECL([], [postdeps], [1]) _LT_TAGDECL([], [compiler_lib_search_path], [1], [The library search path used internally by the compiler when linking a shared library]) ])# _LT_SYS_HIDDEN_LIBDEPS # _LT_PROG_F77 # ------------ # Since AC_PROG_F77 is broken, in that it returns the empty string # if there is no fortran compiler, we have our own version here. m4_defun([_LT_PROG_F77], [ pushdef([AC_MSG_ERROR], [_lt_disable_F77=yes]) AC_PROG_F77 if test -z "$F77" || test "X$F77" = "Xno"; then _lt_disable_F77=yes fi popdef([AC_MSG_ERROR]) ])# _LT_PROG_F77 dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([_LT_PROG_F77], []) # _LT_LANG_F77_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a Fortran 77 compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_F77_CONFIG], [AC_REQUIRE([_LT_PROG_F77])dnl AC_LANG_PUSH(Fortran 77) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for f77 test sources. ac_ext=f # Object file extension for compiled f77 test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the F77 compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_disable_F77" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC CC=${F77-"f77"} compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) GCC=$G77 if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)="$G77" _LT_TAGVAR(LD, $1)="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC="$lt_save_CC" fi # test "$_lt_disable_F77" != yes AC_LANG_POP ])# _LT_LANG_F77_CONFIG # _LT_PROG_FC # ----------- # Since AC_PROG_FC is broken, in that it returns the empty string # if there is no fortran compiler, we have our own version here. m4_defun([_LT_PROG_FC], [ pushdef([AC_MSG_ERROR], [_lt_disable_FC=yes]) AC_PROG_FC if test -z "$FC" || test "X$FC" = "Xno"; then _lt_disable_FC=yes fi popdef([AC_MSG_ERROR]) ])# _LT_PROG_FC dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([_LT_PROG_FC], []) # _LT_LANG_FC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for a Fortran compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_FC_CONFIG], [AC_REQUIRE([_LT_PROG_FC])dnl AC_LANG_PUSH(Fortran) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_flag_spec_ld, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for fc test sources. ac_ext=${ac_fc_srcext-f} # Object file extension for compiled fc test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the FC compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test "$_lt_disable_FC" != yes; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC CC=${FC-"f95"} compiler=$CC GCC=$ac_cv_fc_compiler_gnu _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test "$can_build_shared" = "no" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test "$enable_shared" = yes && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test "$host_cpu" != ia64 && test "$aix_use_runtimelinking" = no ; then test "$enable_shared" = yes && enable_static=no fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test "$enable_shared" = yes || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)="$ac_cv_fc_compiler_gnu" _LT_TAGVAR(LD, $1)="$LD" ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC="$lt_save_CC" fi # test "$_lt_disable_FC" != yes AC_LANG_POP ])# _LT_LANG_FC_CONFIG # _LT_LANG_GCJ_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for the GNU Java Compiler compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_GCJ_CONFIG], [AC_REQUIRE([LT_PROG_GCJ])dnl AC_LANG_SAVE # Source file extension for Java test sources. ac_ext=java # Object file extension for compiled Java test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="class foo {}" # Code to be used in simple link tests lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC GCC=yes CC=${GCJ-"gcj"} compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)="$LD" _LT_CC_BASENAME([$compiler]) # GCJ did not exist at the time GCC didn't implicitly link libc in. _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi AC_LANG_RESTORE GCC=$lt_save_GCC CC="$lt_save_CC" ])# _LT_LANG_GCJ_CONFIG # _LT_LANG_RC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for the Windows resource compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to `libtool'. m4_defun([_LT_LANG_RC_CONFIG], [AC_REQUIRE([LT_PROG_RC])dnl AC_LANG_SAVE # Source file extension for RC test sources. ac_ext=rc # Object file extension for compiled RC test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' # Code to be used in simple link tests lt_simple_link_test_code="$lt_simple_compile_test_code" # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC="$CC" lt_save_GCC=$GCC GCC= CC=${RC-"windres"} compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes if test -n "$compiler"; then : _LT_CONFIG($1) fi GCC=$lt_save_GCC AC_LANG_RESTORE CC="$lt_save_CC" ])# _LT_LANG_RC_CONFIG # LT_PROG_GCJ # ----------- AC_DEFUN([LT_PROG_GCJ], [m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ], [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ], [AC_CHECK_TOOL(GCJ, gcj,) test "x${GCJFLAGS+set}" = xset || GCJFLAGS="-g -O2" AC_SUBST(GCJFLAGS)])])[]dnl ]) # Old name: AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_GCJ], []) # LT_PROG_RC # ---------- AC_DEFUN([LT_PROG_RC], [AC_CHECK_TOOL(RC, windres,) ]) # Old name: AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_RC], []) # _LT_DECL_EGREP # -------------- # If we don't have a new enough Autoconf to choose the best grep # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_EGREP], [AC_REQUIRE([AC_PROG_EGREP])dnl AC_REQUIRE([AC_PROG_FGREP])dnl test -z "$GREP" && GREP=grep _LT_DECL([], [GREP], [1], [A grep program that handles long lines]) _LT_DECL([], [EGREP], [1], [An ERE matcher]) _LT_DECL([], [FGREP], [1], [A literal string matcher]) dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too AC_SUBST([GREP]) ]) # _LT_DECL_OBJDUMP # -------------- # If we don't have a new enough Autoconf to choose the best objdump # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_OBJDUMP], [AC_CHECK_TOOL(OBJDUMP, objdump, false) test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper]) AC_SUBST([OBJDUMP]) ]) # _LT_DECL_SED # ------------ # Check for a fully-functional sed program, that truncates # as few characters as possible. Prefer GNU sed if found. m4_defun([_LT_DECL_SED], [AC_PROG_SED test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" _LT_DECL([], [SED], [1], [A sed program that does not truncate output]) _LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"], [Sed that helps us avoid accidentally triggering echo(1) options like -n]) ])# _LT_DECL_SED m4_ifndef([AC_PROG_SED], [ # NOTE: This macro has been submitted for inclusion into # # GNU Autoconf as AC_PROG_SED. When it is available in # # a released version of Autoconf we should remove this # # macro and use it instead. # m4_defun([AC_PROG_SED], [AC_MSG_CHECKING([for a sed that does not truncate output]) AC_CACHE_VAL(lt_cv_path_SED, [# Loop through the user's path and test for sed and gsed. # Then use that list of sed's as ones to test for truncation. as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for lt_ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext" fi done done done IFS=$as_save_IFS lt_ac_max=0 lt_ac_count=0 # Add /usr/xpg4/bin/sed as it is typically found on Solaris # along with /bin/sed that truncates output. for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do test ! -f $lt_ac_sed && continue cat /dev/null > conftest.in lt_ac_count=0 echo $ECHO_N "0123456789$ECHO_C" >conftest.in # Check for GNU sed and select it if it is found. if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then lt_cv_path_SED=$lt_ac_sed break fi while true; do cat conftest.in conftest.in >conftest.tmp mv conftest.tmp conftest.in cp conftest.in conftest.nl echo >>conftest.nl $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break cmp -s conftest.out conftest.nl || break # 10000 chars as input seems more than enough test $lt_ac_count -gt 10 && break lt_ac_count=`expr $lt_ac_count + 1` if test $lt_ac_count -gt $lt_ac_max; then lt_ac_max=$lt_ac_count lt_cv_path_SED=$lt_ac_sed fi done done ]) SED=$lt_cv_path_SED AC_SUBST([SED]) AC_MSG_RESULT([$SED]) ])#AC_PROG_SED ])#m4_ifndef # Old name: AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_SED], []) # _LT_CHECK_SHELL_FEATURES # ------------------------ # Find out whether the shell is Bourne or XSI compatible, # or has some other useful features. m4_defun([_LT_CHECK_SHELL_FEATURES], [AC_MSG_CHECKING([whether the shell understands some XSI constructs]) # Try some XSI features xsi_shell=no ( _lt_dummy="a/b/c" test "${_lt_dummy##*/},${_lt_dummy%/*},"${_lt_dummy%"$_lt_dummy"}, \ = c,a/b,, \ && eval 'test $(( 1 + 1 )) -eq 2 \ && test "${#_lt_dummy}" -eq 5' ) >/dev/null 2>&1 \ && xsi_shell=yes AC_MSG_RESULT([$xsi_shell]) _LT_CONFIG_LIBTOOL_INIT([xsi_shell='$xsi_shell']) AC_MSG_CHECKING([whether the shell understands "+="]) lt_shell_append=no ( foo=bar; set foo baz; eval "$[1]+=\$[2]" && test "$foo" = barbaz ) \ >/dev/null 2>&1 \ && lt_shell_append=yes AC_MSG_RESULT([$lt_shell_append]) _LT_CONFIG_LIBTOOL_INIT([lt_shell_append='$lt_shell_append']) if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi _LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac _LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl ])# _LT_CHECK_SHELL_FEATURES # _LT_PROG_XSI_SHELLFNS # --------------------- # Bourne and XSI compatible variants of some useful shell functions. m4_defun([_LT_PROG_XSI_SHELLFNS], [case $xsi_shell in yes) cat << \_LT_EOF >> "$cfgfile" # func_dirname file append nondir_replacement # Compute the dirname of FILE. If nonempty, add APPEND to the result, # otherwise set result to NONDIR_REPLACEMENT. func_dirname () { case ${1} in */*) func_dirname_result="${1%/*}${2}" ;; * ) func_dirname_result="${3}" ;; esac } # func_basename file func_basename () { func_basename_result="${1##*/}" } # func_dirname_and_basename file append nondir_replacement # perform func_basename and func_dirname in a single function # call: # dirname: Compute the dirname of FILE. If nonempty, # add APPEND to the result, otherwise set result # to NONDIR_REPLACEMENT. # value returned in "$func_dirname_result" # basename: Compute filename of FILE. # value retuned in "$func_basename_result" # Implementation must be kept synchronized with func_dirname # and func_basename. For efficiency, we do not delegate to # those functions but instead duplicate the functionality here. func_dirname_and_basename () { case ${1} in */*) func_dirname_result="${1%/*}${2}" ;; * ) func_dirname_result="${3}" ;; esac func_basename_result="${1##*/}" } # func_stripname prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). func_stripname () { # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are # positional parameters, so assign one to ordinary parameter first. func_stripname_result=${3} func_stripname_result=${func_stripname_result#"${1}"} func_stripname_result=${func_stripname_result%"${2}"} } # func_opt_split func_opt_split () { func_opt_split_opt=${1%%=*} func_opt_split_arg=${1#*=} } # func_lo2o object func_lo2o () { case ${1} in *.lo) func_lo2o_result=${1%.lo}.${objext} ;; *) func_lo2o_result=${1} ;; esac } # func_xform libobj-or-source func_xform () { func_xform_result=${1%.*}.lo } # func_arith arithmetic-term... func_arith () { func_arith_result=$(( $[*] )) } # func_len string # STRING may not start with a hyphen. func_len () { func_len_result=${#1} } _LT_EOF ;; *) # Bourne compatible functions. cat << \_LT_EOF >> "$cfgfile" # func_dirname file append nondir_replacement # Compute the dirname of FILE. If nonempty, add APPEND to the result, # otherwise set result to NONDIR_REPLACEMENT. func_dirname () { # Extract subdirectory from the argument. func_dirname_result=`$ECHO "X${1}" | $Xsed -e "$dirname"` if test "X$func_dirname_result" = "X${1}"; then func_dirname_result="${3}" else func_dirname_result="$func_dirname_result${2}" fi } # func_basename file func_basename () { func_basename_result=`$ECHO "X${1}" | $Xsed -e "$basename"` } dnl func_dirname_and_basename dnl A portable version of this function is already defined in general.m4sh dnl so there is no need for it here. # func_stripname prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). # func_strip_suffix prefix name func_stripname () { case ${2} in .*) func_stripname_result=`$ECHO "X${3}" \ | $Xsed -e "s%^${1}%%" -e "s%\\\\${2}\$%%"`;; *) func_stripname_result=`$ECHO "X${3}" \ | $Xsed -e "s%^${1}%%" -e "s%${2}\$%%"`;; esac } # sed scripts: my_sed_long_opt='1s/^\(-[[^=]]*\)=.*/\1/;q' my_sed_long_arg='1s/^-[[^=]]*=//' # func_opt_split func_opt_split () { func_opt_split_opt=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_opt"` func_opt_split_arg=`$ECHO "X${1}" | $Xsed -e "$my_sed_long_arg"` } # func_lo2o object func_lo2o () { func_lo2o_result=`$ECHO "X${1}" | $Xsed -e "$lo2o"` } # func_xform libobj-or-source func_xform () { func_xform_result=`$ECHO "X${1}" | $Xsed -e 's/\.[[^.]]*$/.lo/'` } # func_arith arithmetic-term... func_arith () { func_arith_result=`expr "$[@]"` } # func_len string # STRING may not start with a hyphen. func_len () { func_len_result=`expr "$[1]" : ".*" 2>/dev/null || echo $max_cmd_len` } _LT_EOF esac case $lt_shell_append in yes) cat << \_LT_EOF >> "$cfgfile" # func_append var value # Append VALUE to the end of shell variable VAR. func_append () { eval "$[1]+=\$[2]" } _LT_EOF ;; *) cat << \_LT_EOF >> "$cfgfile" # func_append var value # Append VALUE to the end of shell variable VAR. func_append () { eval "$[1]=\$$[1]\$[2]" } _LT_EOF ;; esac ]) # Helper functions for option handling. -*- Autoconf -*- # # Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 6 ltoptions.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) # _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME) # ------------------------------------------ m4_define([_LT_MANGLE_OPTION], [[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])]) # _LT_SET_OPTION(MACRO-NAME, OPTION-NAME) # --------------------------------------- # Set option OPTION-NAME for macro MACRO-NAME, and if there is a # matching handler defined, dispatch to it. Other OPTION-NAMEs are # saved as a flag. m4_define([_LT_SET_OPTION], [m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]), _LT_MANGLE_DEFUN([$1], [$2]), [m4_warning([Unknown $1 option `$2'])])[]dnl ]) # _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET]) # ------------------------------------------------------------ # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. m4_define([_LT_IF_OPTION], [m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])]) # _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET) # ------------------------------------------------------- # Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME # are set. m4_define([_LT_UNLESS_OPTIONS], [m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option), [m4_define([$0_found])])])[]dnl m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3 ])[]dnl ]) # _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST) # ---------------------------------------- # OPTION-LIST is a space-separated list of Libtool options associated # with MACRO-NAME. If any OPTION has a matching handler declared with # LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about # the unknown option and exit. m4_defun([_LT_SET_OPTIONS], [# Set options m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [_LT_SET_OPTION([$1], _LT_Option)]) m4_if([$1],[LT_INIT],[ dnl dnl Simply set some default values (i.e off) if boolean options were not dnl specified: _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no ]) _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no ]) dnl dnl If no reference was made to various pairs of opposing options, then dnl we run the default mode handler for the pair. For example, if neither dnl `shared' nor `disable-shared' was passed, we enable building of shared dnl archives by default: _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED]) _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC]) _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC]) _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install], [_LT_ENABLE_FAST_INSTALL]) ]) ])# _LT_SET_OPTIONS # _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME) # ----------------------------------------- m4_define([_LT_MANGLE_DEFUN], [[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])]) # LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE) # ----------------------------------------------- m4_define([LT_OPTION_DEFINE], [m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl ])# LT_OPTION_DEFINE # dlopen # ------ LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes ]) AU_DEFUN([AC_LIBTOOL_DLOPEN], [_LT_SET_OPTION([LT_INIT], [dlopen]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `dlopen' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], []) # win32-dll # --------- # Declare package support for building win32 dll's. LT_OPTION_DEFINE([LT_INIT], [win32-dll], [enable_win32_dll=yes case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-cegcc*) AC_CHECK_TOOL(AS, as, false) AC_CHECK_TOOL(DLLTOOL, dlltool, false) AC_CHECK_TOOL(OBJDUMP, objdump, false) ;; esac test -z "$AS" && AS=as _LT_DECL([], [AS], [0], [Assembler program])dnl test -z "$DLLTOOL" && DLLTOOL=dlltool _LT_DECL([], [DLLTOOL], [0], [DLL creation program])dnl test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [0], [Object dumper program])dnl ])# win32-dll AU_DEFUN([AC_LIBTOOL_WIN32_DLL], [AC_REQUIRE([AC_CANONICAL_HOST])dnl _LT_SET_OPTION([LT_INIT], [win32-dll]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `win32-dll' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], []) # _LT_ENABLE_SHARED([DEFAULT]) # ---------------------------- # implement the --enable-shared flag, and supports the `shared' and # `disable-shared' LT_INIT options. # DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. m4_define([_LT_ENABLE_SHARED], [m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([shared], [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@], [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS="$lt_save_ifs" ;; esac], [enable_shared=]_LT_ENABLE_SHARED_DEFAULT) _LT_DECL([build_libtool_libs], [enable_shared], [0], [Whether or not to build shared libraries]) ])# _LT_ENABLE_SHARED LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])]) # Old names: AC_DEFUN([AC_ENABLE_SHARED], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared]) ]) AC_DEFUN([AC_DISABLE_SHARED], [_LT_SET_OPTION([LT_INIT], [disable-shared]) ]) AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)]) AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_SHARED], []) dnl AC_DEFUN([AM_DISABLE_SHARED], []) # _LT_ENABLE_STATIC([DEFAULT]) # ---------------------------- # implement the --enable-static flag, and support the `static' and # `disable-static' LT_INIT options. # DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. m4_define([_LT_ENABLE_STATIC], [m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([static], [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@], [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS="$lt_save_ifs" ;; esac], [enable_static=]_LT_ENABLE_STATIC_DEFAULT) _LT_DECL([build_old_libs], [enable_static], [0], [Whether or not to build static libraries]) ])# _LT_ENABLE_STATIC LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])]) # Old names: AC_DEFUN([AC_ENABLE_STATIC], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static]) ]) AC_DEFUN([AC_DISABLE_STATIC], [_LT_SET_OPTION([LT_INIT], [disable-static]) ]) AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)]) AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_STATIC], []) dnl AC_DEFUN([AM_DISABLE_STATIC], []) # _LT_ENABLE_FAST_INSTALL([DEFAULT]) # ---------------------------------- # implement the --enable-fast-install flag, and support the `fast-install' # and `disable-fast-install' LT_INIT options. # DEFAULT is either `yes' or `no'. If omitted, it defaults to `yes'. m4_define([_LT_ENABLE_FAST_INSTALL], [m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([fast-install], [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@], [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs="$IFS"; IFS="${IFS}$PATH_SEPARATOR," for pkg in $enableval; do IFS="$lt_save_ifs" if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS="$lt_save_ifs" ;; esac], [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT) _LT_DECL([fast_install], [enable_fast_install], [0], [Whether or not to optimize for fast installation])dnl ])# _LT_ENABLE_FAST_INSTALL LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])]) # Old names: AU_DEFUN([AC_ENABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `fast-install' option into LT_INIT's first parameter.]) ]) AU_DEFUN([AC_DISABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], [disable-fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `disable-fast-install' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], []) dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], []) # _LT_WITH_PIC([MODE]) # -------------------- # implement the --with-pic flag, and support the `pic-only' and `no-pic' # LT_INIT options. # MODE is either `yes' or `no'. If omitted, it defaults to `both'. m4_define([_LT_WITH_PIC], [AC_ARG_WITH([pic], [AS_HELP_STRING([--with-pic], [try to use only PIC/non-PIC objects @<:@default=use both@:>@])], [pic_mode="$withval"], [pic_mode=default]) test -z "$pic_mode" && pic_mode=m4_default([$1], [default]) _LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl ])# _LT_WITH_PIC LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])]) # Old name: AU_DEFUN([AC_LIBTOOL_PICMODE], [_LT_SET_OPTION([LT_INIT], [pic-only]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the `pic-only' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_PICMODE], []) m4_define([_LTDL_MODE], []) LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive], [m4_define([_LTDL_MODE], [nonrecursive])]) LT_OPTION_DEFINE([LTDL_INIT], [recursive], [m4_define([_LTDL_MODE], [recursive])]) LT_OPTION_DEFINE([LTDL_INIT], [subproject], [m4_define([_LTDL_MODE], [subproject])]) m4_define([_LTDL_TYPE], []) LT_OPTION_DEFINE([LTDL_INIT], [installable], [m4_define([_LTDL_TYPE], [installable])]) LT_OPTION_DEFINE([LTDL_INIT], [convenience], [m4_define([_LTDL_TYPE], [convenience])]) # ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*- # # Copyright (C) 2004, 2005, 2007, 2008 Free Software Foundation, Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 6 ltsugar.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])]) # lt_join(SEP, ARG1, [ARG2...]) # ----------------------------- # Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their # associated separator. # Needed until we can rely on m4_join from Autoconf 2.62, since all earlier # versions in m4sugar had bugs. m4_define([lt_join], [m4_if([$#], [1], [], [$#], [2], [[$2]], [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])]) m4_define([_lt_join], [m4_if([$#$2], [2], [], [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])]) # lt_car(LIST) # lt_cdr(LIST) # ------------ # Manipulate m4 lists. # These macros are necessary as long as will still need to support # Autoconf-2.59 which quotes differently. m4_define([lt_car], [[$1]]) m4_define([lt_cdr], [m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])], [$#], 1, [], [m4_dquote(m4_shift($@))])]) m4_define([lt_unquote], $1) # lt_append(MACRO-NAME, STRING, [SEPARATOR]) # ------------------------------------------ # Redefine MACRO-NAME to hold its former content plus `SEPARATOR'`STRING'. # Note that neither SEPARATOR nor STRING are expanded; they are appended # to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked). # No SEPARATOR is output if MACRO-NAME was previously undefined (different # than defined and empty). # # This macro is needed until we can rely on Autoconf 2.62, since earlier # versions of m4sugar mistakenly expanded SEPARATOR but not STRING. m4_define([lt_append], [m4_define([$1], m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])]) # lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...]) # ---------------------------------------------------------- # Produce a SEP delimited list of all paired combinations of elements of # PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list # has the form PREFIXmINFIXSUFFIXn. # Needed until we can rely on m4_combine added in Autoconf 2.62. m4_define([lt_combine], [m4_if(m4_eval([$# > 3]), [1], [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl [[m4_foreach([_Lt_prefix], [$2], [m4_foreach([_Lt_suffix], ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[, [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])]) # lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ]) # ----------------------------------------------------------------------- # Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited # by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ. m4_define([lt_if_append_uniq], [m4_ifdef([$1], [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1], [lt_append([$1], [$2], [$3])$4], [$5])], [lt_append([$1], [$2], [$3])$4])]) # lt_dict_add(DICT, KEY, VALUE) # ----------------------------- m4_define([lt_dict_add], [m4_define([$1($2)], [$3])]) # lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE) # -------------------------------------------- m4_define([lt_dict_add_subkey], [m4_define([$1($2:$3)], [$4])]) # lt_dict_fetch(DICT, KEY, [SUBKEY]) # ---------------------------------- m4_define([lt_dict_fetch], [m4_ifval([$3], m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]), m4_ifdef([$1($2)], [m4_defn([$1($2)])]))]) # lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE]) # ----------------------------------------------------------------- m4_define([lt_if_dict_fetch], [m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4], [$5], [$6])]) # lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...]) # -------------------------------------------------------------- m4_define([lt_dict_filter], [m4_if([$5], [], [], [lt_join(m4_quote(m4_default([$4], [[, ]])), lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]), [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl ]) # ltversion.m4 -- version numbers -*- Autoconf -*- # # Copyright (C) 2004 Free Software Foundation, Inc. # Written by Scott James Remnant, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # Generated from ltversion.in. # serial 3017 ltversion.m4 # This file is part of GNU Libtool m4_define([LT_PACKAGE_VERSION], [2.2.6b]) m4_define([LT_PACKAGE_REVISION], [1.3017]) AC_DEFUN([LTVERSION_VERSION], [macro_version='2.2.6b' macro_revision='1.3017' _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) _LT_DECL(, macro_revision, 0) ]) # lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*- # # Copyright (C) 2004, 2005, 2007 Free Software Foundation, Inc. # Written by Scott James Remnant, 2004. # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 4 lt~obsolete.m4 # These exist entirely to fool aclocal when bootstrapping libtool. # # In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN) # which have later been changed to m4_define as they aren't part of the # exported API, or moved to Autoconf or Automake where they belong. # # The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN # in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us # using a macro with the same name in our local m4/libtool.m4 it'll # pull the old libtool.m4 in (it doesn't see our shiny new m4_define # and doesn't know about Autoconf macros at all.) # # So we provide this file, which has a silly filename so it's always # included after everything else. This provides aclocal with the # AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything # because those macros already exist, or will be overwritten later. # We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. # # Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here. # Yes, that means every name once taken will need to remain here until # we give up compatibility with versions before 1.7, at which point # we need to keep only those names which we still refer to. # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])]) m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])]) m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])]) m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])]) m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])]) m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])]) m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])]) m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])]) m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])]) m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])]) m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])]) m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])]) m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])]) m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])]) m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])]) m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])]) m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])]) m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])]) m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])]) m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])]) m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])]) m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])]) m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])]) m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])]) m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])]) m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])]) m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])]) m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])]) m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])]) m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])]) m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])]) m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])]) m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])]) m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])]) m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])]) m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])]) m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])]) m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])]) m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])]) m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])]) m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])]) m4_ifndef([AC_LIBTOOL_RC], [AC_DEFUN([AC_LIBTOOL_RC])]) m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])]) m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])]) m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])]) m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])]) m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])]) m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])]) m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])]) m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])]) # Copyright (C) 2002, 2003, 2005, 2006, 2007, 2008 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], [am__api_version='1.11' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. m4_if([$1], [1.11.1], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) # _AM_AUTOCONF_VERSION(VERSION) # ----------------------------- # aclocal traces this macro to find the Autoconf version. # This is a private macro too. Using m4_define simplifies # the logic in aclocal, which can simply ignore this definition. m4_define([_AM_AUTOCONF_VERSION], []) # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], [AM_AUTOMAKE_VERSION([1.11.1])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- # Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to `$srcdir/foo'. In other projects, it is set to # `$srcdir', `$srcdir/..', or `$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and # therefore $ac_aux_dir as well) can be either absolute or relative, # depending on how configure is run. This is pretty annoying, since # it makes $ac_aux_dir quite unusable in subdirectories: in the top # source directory, any form will work fine, but in subdirectories a # relative path needs to be adjusted first. # # $ac_aux_dir/missing # fails when called from a subdirectory if $ac_aux_dir is relative # $top_srcdir/$ac_aux_dir/missing # fails if $ac_aux_dir is absolute, # fails when called from a subdirectory in a VPATH build with # a relative $ac_aux_dir # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually # harmless because $srcdir is `.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, # iff we strip the leading $srcdir from $ac_aux_dir. That would be: # am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` # and then we would define $MISSING as # MISSING="\${SHELL} $am_aux_dir/missing" # This will work as long as MISSING is not called from configure, because # unfortunately $(top_srcdir) has no meaning in configure. # However there are other variables, like CC, which are often used in # configure, and could therefore not use this "fixed" $ac_aux_dir. # # Another solution, used here, is to always expand $ac_aux_dir to an # absolute PATH. The drawback is that using absolute paths prevent a # configured tree to be moved without reconfiguration. AC_DEFUN([AM_AUX_DIR_EXPAND], [dnl Rely on autoconf to set up CDPATH properly. AC_PREREQ([2.50])dnl # expand $ac_aux_dir to an absolute path am_aux_dir=`cd $ac_aux_dir && pwd` ]) # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997, 2000, 2001, 2003, 2004, 2005, 2006, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 9 # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], [AC_PREREQ(2.52)dnl ifelse([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE])dnl AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl _AM_SUBST_NOTMAKE([$1_FALSE])dnl m4_define([_AM_COND_VALUE_$1], [$2])dnl if $2; then $1_TRUE= $1_FALSE='#' else $1_TRUE='#' $1_FALSE= fi AC_CONFIG_COMMANDS_PRE( [if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then AC_MSG_ERROR([[conditional "$1" was never defined. Usually this means the macro was only invoked conditionally.]]) fi])]) # Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2006, 2009 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 10 # There are a few dirty hacks below to avoid letting `AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, # will think it sees a *use*, and therefore will trigger all it's # C support machinery. Also note that it means that autoscan, seeing # CC etc. in the Makefile, will ask for an AC_PROG_CC use... # _AM_DEPENDENCIES(NAME) # ---------------------- # See how the compiler implements dependency checking. # NAME is "CC", "CXX", "GCJ", or "OBJC". # We try a few techniques and use that to set a single cache variable. # # We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was # modified to invoke _AM_DEPENDENCIES(CC); we would have a circular # dependency, and given that the user is not expected to run this macro, # just rely on AC_PROG_CC. AC_DEFUN([_AM_DEPENDENCIES], [AC_REQUIRE([AM_SET_DEPDIR])dnl AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl AC_REQUIRE([AM_MAKE_INCLUDE])dnl AC_REQUIRE([AM_DEP_TRACK])dnl ifelse([$1], CC, [depcc="$CC" am_compiler_list=], [$1], CXX, [depcc="$CXX" am_compiler_list=], [$1], OBJC, [depcc="$OBJC" am_compiler_list='gcc3 gcc'], [$1], UPC, [depcc="$UPC" am_compiler_list=], [$1], GCJ, [depcc="$GCJ" am_compiler_list='gcc3 gcc'], [depcc="$$1" am_compiler_list=]) AC_CACHE_CHECK([dependency style of $depcc], [am_cv_$1_dependencies_compiler_type], [if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named `D' -- because `-MD' means `put the output # in D'. mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_$1_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` fi am__universal=false m4_case([$1], [CC], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac], [CXX], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac]) for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using `: > sub/conftst$i.h' creates only sub/conftst1.h with # Solaris 8's {/usr,}/bin/sh. touch sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with `-c' and `-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle `-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # after this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvisualcpp | msvcmsys) # This compiler won't grok `-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_$1_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_$1_dependencies_compiler_type=none fi ]) AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) AM_CONDITIONAL([am__fastdep$1], [ test "x$enable_dependency_tracking" != xno \ && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) ]) # AM_SET_DEPDIR # ------------- # Choose a directory name for dependency files. # This macro is AC_REQUIREd in _AM_DEPENDENCIES AC_DEFUN([AM_SET_DEPDIR], [AC_REQUIRE([AM_SET_LEADING_DOT])dnl AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl ]) # AM_DEP_TRACK # ------------ AC_DEFUN([AM_DEP_TRACK], [AC_ARG_ENABLE(dependency-tracking, [ --disable-dependency-tracking speeds up one-time build --enable-dependency-tracking do not reject slow dependency extractors]) if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' fi AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) AC_SUBST([AMDEPBACKSLASH])dnl _AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl ]) # Generate code to set up dependency tracking. -*- Autoconf -*- # Copyright (C) 1999, 2000, 2001, 2002, 2003, 2004, 2005, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. #serial 5 # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], [{ # Autoconf 2.62 quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named `Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`AS_DIRNAME("$mf")` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running `make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # When using ansi2knr, U may be empty or an underscore; expand it U=`sed -n 's/^U = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g' -e 's/\$U/'"$U"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`AS_DIRNAME(["$file"])` AS_MKDIR_P([$dirpart/$fdir]) # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ])# _AM_OUTPUT_DEPENDENCY_COMMANDS # AM_OUTPUT_DEPENDENCY_COMMANDS # ----------------------------- # This macro should only be invoked once -- use via AC_REQUIRE. # # This code is only required when automatic dependency tracking # is enabled. FIXME. This creates each `.P' file that we will # need in order to bootstrap the dependency handling code. AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], [AC_CONFIG_COMMANDS([depfiles], [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) ]) # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996, 1997, 1998, 1999, 2000, 2001, 2002, 2003, 2004, # 2005, 2006, 2008, 2009 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 16 # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- # The call with PACKAGE and VERSION arguments is the old style # call (pre autoconf-2.50), which is being phased out. PACKAGE # and VERSION should now be passed to AC_INIT and removed from # the call to AM_INIT_AUTOMAKE. # We support both call styles for the transition. After # the next Automake release, Autoconf can make the AC_INIT # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], [AC_PREREQ([2.62])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl # test to see if srcdir already configured if test -f $srcdir/config.status; then AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi AC_SUBST([CYGPATH_W]) # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], [m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. m4_if(m4_ifdef([AC_PACKAGE_NAME], 1)m4_ifdef([AC_PACKAGE_VERSION], 1), 11,, [m4_fatal([AC_INIT should be called with package and version arguments])])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, [AC_DEFINE_UNQUOTED(PACKAGE, "$PACKAGE", [Name of package]) AC_DEFINE_UNQUOTED(VERSION, "$VERSION", [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl AM_MISSING_PROG(ACLOCAL, aclocal-${am__api_version}) AM_MISSING_PROG(AUTOCONF, autoconf) AM_MISSING_PROG(AUTOMAKE, automake-${am__api_version}) AM_MISSING_PROG(AUTOHEADER, autoheader) AM_MISSING_PROG(MAKEINFO, makeinfo) AC_REQUIRE([AM_PROG_INSTALL_SH])dnl AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl AC_REQUIRE([AM_PROG_MKDIR_P])dnl # We need awk for the "check" target. The system "awk" is bad on # some platforms. AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], [_AM_DEPENDENCIES(CC)], [define([AC_PROG_CC], defn([AC_PROG_CC])[_AM_DEPENDENCIES(CC)])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], [_AM_DEPENDENCIES(CXX)], [define([AC_PROG_CXX], defn([AC_PROG_CXX])[_AM_DEPENDENCIES(CXX)])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], [_AM_DEPENDENCIES(OBJC)], [define([AC_PROG_OBJC], defn([AC_PROG_OBJC])[_AM_DEPENDENCIES(OBJC)])])dnl ]) _AM_IF_OPTION([silent-rules], [AC_REQUIRE([AM_SILENT_RULES])])dnl dnl The `parallel-tests' driver may need to know about EXEEXT, so add the dnl `am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This macro dnl is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl ]) dnl Hook into `_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. # Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the # loop where config.status creates the headers, so we can generate # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. _am_arg=$1 _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) # Copyright (C) 2001, 2003, 2005, 2008 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl if test x"${install_sh}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi AC_SUBST(install_sh)]) # Copyright (C) 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 2 # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], [rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null AC_SUBST([am__leading_dot])]) # Check to see how 'make' treats includes. -*- Autoconf -*- # Copyright (C) 2001, 2002, 2003, 2005, 2009 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 4 # AM_MAKE_INCLUDE() # ----------------- # Check to see how make treats includes. AC_DEFUN([AM_MAKE_INCLUDE], [am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. AC_MSG_CHECKING([for style of include used by $am_make]) am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from `make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi AC_SUBST([am__include]) AC_SUBST([am__quote]) AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997, 1999, 2000, 2001, 2003, 2004, 2005, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 6 # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], [AC_REQUIRE([AM_MISSING_HAS_RUN]) $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) # AM_MISSING_HAS_RUN # ------------------ # Define MISSING if not defined so far and test if it supports --run. # If it does, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --run true"; then am_missing_run="$MISSING --run " else am_missing_run= AC_MSG_WARN([`missing' script is too old or missing]) fi ]) # Copyright (C) 2003, 2004, 2005, 2006 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_MKDIR_P # --------------- # Check for `mkdir -p'. AC_DEFUN([AM_PROG_MKDIR_P], [AC_PREREQ([2.60])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl dnl Automake 1.8 to 1.9.6 used to define mkdir_p. We now use MKDIR_P, dnl while keeping a definition of mkdir_p for backward compatibility. dnl @MKDIR_P@ is magic: AC_OUTPUT adjusts its value for each Makefile. dnl However we cannot define mkdir_p as $(MKDIR_P) for the sake of dnl Makefile.ins that do not define MKDIR_P, so we do our own dnl adjustment using top_builddir (which is defined more often than dnl MKDIR_P). AC_SUBST([mkdir_p], ["$MKDIR_P"])dnl case $mkdir_p in [[\\/$]]* | ?:[[\\/]]*) ;; */*) mkdir_p="\$(top_builddir)/$mkdir_p" ;; esac ]) # Helper functions for option handling. -*- Autoconf -*- # Copyright (C) 2001, 2002, 2003, 2005, 2008 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 4 # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) # ------------------------------ # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), 1)]) # _AM_SET_OPTIONS(OPTIONS) # ---------------------------------- # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], [m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996, 1997, 2000, 2001, 2003, 2005, 2008 # Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 5 # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) # Just in case sleep 1 echo timestamp > conftest.file # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[[\\\"\#\$\&\'\`$am_lf]]*) AC_MSG_ERROR([unsafe absolute working directory name]);; esac case $srcdir in *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) AC_MSG_ERROR([unsafe srcdir value: `$srcdir']);; esac # Do `set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi rm -f conftest.file if test "$[*]" != "X $srcdir/configure conftest.file" \ && test "$[*]" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken alias in your environment]) fi test "$[2]" = conftest.file ) then # Ok. : else AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi AC_MSG_RESULT(yes)]) # Copyright (C) 2001, 2003, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor `install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we # always use install-sh in `make install-strip', and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl # Installed binaries are usually stripped using `strip' when the user # run `make install-strip'. However `strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the `STRIP' environment variable to overrule this program. dnl Don't test for $cross_compiling = yes, because it might be `maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) # Copyright (C) 2006, 2008 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 2 # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. # This macro is traced by Automake. AC_DEFUN([_AM_SUBST_NOTMAKE]) # AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Public sister of _AM_SUBST_NOTMAKE. AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) # Check how to create a tarball. -*- Autoconf -*- # Copyright (C) 2004, 2005 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # serial 2 # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. # FORMAT should be one of `v7', `ustar', or `pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory # $tardir. # tardir=directory && $(am__tar) > result.tar # # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. AM_MISSING_PROG([AMTAR], [tar]) m4_if([$1], [v7], [am__tar='${AMTAR} chof - "$$tardir"'; am__untar='${AMTAR} xf -'], [m4_case([$1], [ustar],, [pax],, [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) # Loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' _am_tools=${am_cv_prog_tar_$1-$_am_tools} # Do not fold the above two line into one, because Tru64 sh and # Solaris sh will not grok spaces in the rhs of `-'. for _am_tool in $_am_tools do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do AM_RUN_LOG([$_am_tar --version]) && break done am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x $1 -w "$$tardir"' am__tar_='pax -L -x $1 -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H $1 -L' am__tar_='find "$tardir" -print | cpio -o -H $1 -L' am__untar='cpio -i -H $1 -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_$1}" && break # tar/untar a dummy directory, and stop if the command works rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) rm -rf conftest.dir if test -s conftest.tar; then AM_RUN_LOG([$am__untar /dev/null 2>&1 && break fi done rm -rf conftest.dir AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) AC_MSG_RESULT([$am_cv_prog_tar_$1])]) AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR m4_include([m4/gettext.m4]) m4_include([m4/iconv.m4]) m4_include([m4/lib-ld.m4]) m4_include([m4/lib-link.m4]) m4_include([m4/lib-prefix.m4]) m4_include([m4/nls.m4]) m4_include([m4/po.m4]) m4_include([m4/progtest.m4]) dar-2.4.8/src/0000755000175000017520000000000012023053205010055 500000000000000dar-2.4.8/src/Makefile.in0000644000175000017520000004245412023053152012054 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = src DIST_COMMON = README $(dist_noinst_DATA) $(noinst_HEADERS) \ $(srcdir)/Makefile.am $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = SOURCES = DIST_SOURCES = RECURSIVE_TARGETS = all-recursive check-recursive dvi-recursive \ html-recursive info-recursive install-data-recursive \ install-dvi-recursive install-exec-recursive \ install-html-recursive install-info-recursive \ install-pdf-recursive install-ps-recursive install-recursive \ installcheck-recursive installdirs-recursive pdf-recursive \ ps-recursive uninstall-recursive DATA = $(dist_noinst_DATA) HEADERS = $(noinst_HEADERS) RECURSIVE_CLEAN_TARGETS = mostlyclean-recursive clean-recursive \ distclean-recursive maintainer-clean-recursive AM_RECURSIVE_TARGETS = $(RECURSIVE_TARGETS:-recursive=) \ $(RECURSIVE_CLEAN_TARGETS:-recursive=) tags TAGS ctags CTAGS \ distdir ETAGS = etags CTAGS = ctags DIST_SUBDIRS = libdar dar_suite check examples testing DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) am__relativize = \ dir0=`pwd`; \ sed_first='s,^\([^/]*\)/.*$$,\1,'; \ sed_rest='s,^[^/]*/*,,'; \ sed_last='s,^.*/\([^/]*\)$$,\1,'; \ sed_butlast='s,/*[^/]*$$,,'; \ while test -n "$$dir1"; do \ first=`echo "$$dir1" | sed -e "$$sed_first"`; \ if test "$$first" != "."; then \ if test "$$first" = ".."; then \ dir2=`echo "$$dir0" | sed -e "$$sed_last"`/"$$dir2"; \ dir0=`echo "$$dir0" | sed -e "$$sed_butlast"`; \ else \ first2=`echo "$$dir2" | sed -e "$$sed_first"`; \ if test "$$first2" = "$$first"; then \ dir2=`echo "$$dir2" | sed -e "$$sed_rest"`; \ else \ dir2="../$$dir2"; \ fi; \ dir0="$$dir0"/"$$first"; \ fi; \ fi; \ dir1=`echo "$$dir1" | sed -e "$$sed_rest"`; \ done; \ reldir="$$dir2" ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ @MAKE_ALL_DIR_TRUE@OPT_SUBDIR = examples testing SUBDIRS = libdar dar_suite check $(OPT_SUBDIR) noinst_HEADERS = my_config.h dist_noinst_DATA = README gettext.h all: all-recursive .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu src/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu src/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs # This directory's subdirectories are mostly independent; you can cd # into them and run `make' without going through this Makefile. # To change the values of `make' variables: instead of editing Makefiles, # (1) if the variable is set in `config.status', edit `config.status' # (which will cause the Makefiles to be regenerated when you run `make'); # (2) otherwise, pass the desired values on the `make' command line. $(RECURSIVE_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ target=`echo $@ | sed s/-recursive//`; \ list='$(SUBDIRS)'; for subdir in $$list; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ dot_seen=yes; \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done; \ if test "$$dot_seen" = "no"; then \ $(MAKE) $(AM_MAKEFLAGS) "$$target-am" || exit 1; \ fi; test -z "$$fail" $(RECURSIVE_CLEAN_TARGETS): @fail= failcom='exit 1'; \ for f in x $$MAKEFLAGS; do \ case $$f in \ *=* | --[!k]*);; \ *k*) failcom='fail=yes';; \ esac; \ done; \ dot_seen=no; \ case "$@" in \ distclean-* | maintainer-clean-*) list='$(DIST_SUBDIRS)' ;; \ *) list='$(SUBDIRS)' ;; \ esac; \ rev=''; for subdir in $$list; do \ if test "$$subdir" = "."; then :; else \ rev="$$subdir $$rev"; \ fi; \ done; \ rev="$$rev ."; \ target=`echo $@ | sed s/-recursive//`; \ for subdir in $$rev; do \ echo "Making $$target in $$subdir"; \ if test "$$subdir" = "."; then \ local_target="$$target-am"; \ else \ local_target="$$target"; \ fi; \ ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) $$local_target) \ || eval $$failcom; \ done && test -z "$$fail" tags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) tags); \ done ctags-recursive: list='$(SUBDIRS)'; for subdir in $$list; do \ test "$$subdir" = . || ($(am__cd) $$subdir && $(MAKE) $(AM_MAKEFLAGS) ctags); \ done ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: tags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ if ($(ETAGS) --etags-include --version) >/dev/null 2>&1; then \ include_option=--etags-include; \ empty_fix=.; \ else \ include_option=--include; \ empty_fix=; \ fi; \ list='$(SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test ! -f $$subdir/TAGS || \ set "$$@" "$$include_option=$$here/$$subdir/TAGS"; \ fi; \ done; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: ctags-recursive $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ test -d "$(distdir)/$$subdir" \ || $(MKDIR_P) "$(distdir)/$$subdir" \ || exit 1; \ fi; \ done @list='$(DIST_SUBDIRS)'; for subdir in $$list; do \ if test "$$subdir" = .; then :; else \ dir1=$$subdir; dir2="$(distdir)/$$subdir"; \ $(am__relativize); \ new_distdir=$$reldir; \ dir1=$$subdir; dir2="$(top_distdir)"; \ $(am__relativize); \ new_top_distdir=$$reldir; \ echo " (cd $$subdir && $(MAKE) $(AM_MAKEFLAGS) top_distdir="$$new_top_distdir" distdir="$$new_distdir" \\"; \ echo " am__remove_distdir=: am__skip_length_check=: am__skip_mode_fix=: distdir)"; \ ($(am__cd) $$subdir && \ $(MAKE) $(AM_MAKEFLAGS) \ top_distdir="$$new_top_distdir" \ distdir="$$new_distdir" \ am__remove_distdir=: \ am__skip_length_check=: \ am__skip_mode_fix=: \ distdir) \ || exit 1; \ fi; \ done check-am: all-am check: check-recursive all-am: Makefile $(DATA) $(HEADERS) installdirs: installdirs-recursive installdirs-am: install: install-recursive install-exec: install-exec-recursive install-data: install-data-recursive uninstall: uninstall-recursive install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-recursive install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-recursive clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-recursive -rm -f Makefile distclean-am: clean-am distclean-generic distclean-tags dvi: dvi-recursive dvi-am: html: html-recursive html-am: info: info-recursive info-am: install-data-am: install-dvi: install-dvi-recursive install-dvi-am: install-exec-am: install-html: install-html-recursive install-html-am: install-info: install-info-recursive install-info-am: install-man: install-pdf: install-pdf-recursive install-pdf-am: install-ps: install-ps-recursive install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-recursive -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-recursive mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-recursive pdf-am: ps: ps-recursive ps-am: uninstall-am: .MAKE: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) ctags-recursive \ install-am install-strip tags-recursive .PHONY: $(RECURSIVE_CLEAN_TARGETS) $(RECURSIVE_TARGETS) CTAGS GTAGS \ all all-am check check-am clean clean-generic clean-libtool \ ctags ctags-recursive distclean distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs installdirs-am maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags tags-recursive \ uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/src/examples/0000755000175000017520000000000012023053205011673 500000000000000dar-2.4.8/src/examples/Makefile.in0000644000175000017520000006123612023053152013671 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ noinst_PROGRAMS = factoriel$(EXEEXT) prime$(EXEEXT) formula$(EXEEXT) subdir = src/examples DIST_COMMON = README $(dist_noinst_DATA) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = PROGRAMS = $(noinst_PROGRAMS) am_factoriel_OBJECTS = factoriel.$(OBJEXT) shell_interaction.$(OBJEXT) \ dar_suite.$(OBJEXT) factoriel_OBJECTS = $(am_factoriel_OBJECTS) factoriel_LDADD = $(LDADD) am_formula_OBJECTS = formula.$(OBJEXT) shell_interaction.$(OBJEXT) \ dar_suite.$(OBJEXT) crit_action_cmd_line.$(OBJEXT) \ line_tools.$(OBJEXT) formula_OBJECTS = $(am_formula_OBJECTS) formula_LDADD = $(LDADD) am_prime_OBJECTS = prime.$(OBJEXT) shell_interaction.$(OBJEXT) \ dar_suite.$(OBJEXT) prime_OBJECTS = $(am_prime_OBJECTS) prime_LDADD = $(LDADD) DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) CXXLD = $(CXX) CXXLINK = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=link $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ SOURCES = $(factoriel_SOURCES) $(formula_SOURCES) $(prime_SOURCES) DIST_SOURCES = $(factoriel_SOURCES) $(formula_SOURCES) \ $(prime_SOURCES) DATA = $(dist_noinst_DATA) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ @BUILD_MODE32_FALSE@@BUILD_MODE64_FALSE@MYLIB = dar @BUILD_MODE32_FALSE@@BUILD_MODE64_TRUE@MYLIB = dar64 @BUILD_MODE32_TRUE@MYLIB = dar32 @BUILD_MODE32_FALSE@@BUILD_MODE64_FALSE@AM_CPPFLAGS = -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" @BUILD_MODE32_FALSE@@BUILD_MODE64_TRUE@AM_CPPFLAGS = -DLIBDAR_MODE=64 -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" @BUILD_MODE32_TRUE@AM_CPPFLAGS = -DLIBDAR_MODE=32 -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" dist_noinst_DATA = README LDADD = -L../libdar -l$(MYLIB) factoriel_SOURCES = factoriel.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp factoriel_DEPENDENCIES = ../libdar/lib$(MYLIB).la ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp prime_SOURCES = prime.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp prime_DEPENDENCIES = ../libdar/lib$(MYLIB).la ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp formula_SOURCES = formula.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp ../dar_suite/crit_action_cmd_line.cpp ../dar_suite/line_tools.cpp formula_DEPENDENCIES = ../libdar/lib$(MYLIB).la ../dar_suite/line_tools.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/crit_action_cmd_line.cpp ../dar_suite/dar_suite.cpp all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu src/examples/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu src/examples/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstPROGRAMS: @list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list factoriel$(EXEEXT): $(factoriel_OBJECTS) $(factoriel_DEPENDENCIES) @rm -f factoriel$(EXEEXT) $(CXXLINK) $(factoriel_OBJECTS) $(factoriel_LDADD) $(LIBS) formula$(EXEEXT): $(formula_OBJECTS) $(formula_DEPENDENCIES) @rm -f formula$(EXEEXT) $(CXXLINK) $(formula_OBJECTS) $(formula_LDADD) $(LIBS) prime$(EXEEXT): $(prime_OBJECTS) $(prime_DEPENDENCIES) @rm -f prime$(EXEEXT) $(CXXLINK) $(prime_OBJECTS) $(prime_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/crit_action_cmd_line.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/dar_suite.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/factoriel.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/formula.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/line_tools.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/prime.Po@am__quote@ @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/shell_interaction.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(LTCXXCOMPILE) -c -o $@ $< shell_interaction.o: ../dar_suite/shell_interaction.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT shell_interaction.o -MD -MP -MF $(DEPDIR)/shell_interaction.Tpo -c -o shell_interaction.o `test -f '../dar_suite/shell_interaction.cpp' || echo '$(srcdir)/'`../dar_suite/shell_interaction.cpp @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/shell_interaction.Tpo $(DEPDIR)/shell_interaction.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/shell_interaction.cpp' object='shell_interaction.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o shell_interaction.o `test -f '../dar_suite/shell_interaction.cpp' || echo '$(srcdir)/'`../dar_suite/shell_interaction.cpp shell_interaction.obj: ../dar_suite/shell_interaction.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT shell_interaction.obj -MD -MP -MF $(DEPDIR)/shell_interaction.Tpo -c -o shell_interaction.obj `if test -f '../dar_suite/shell_interaction.cpp'; then $(CYGPATH_W) '../dar_suite/shell_interaction.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/shell_interaction.cpp'; fi` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/shell_interaction.Tpo $(DEPDIR)/shell_interaction.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/shell_interaction.cpp' object='shell_interaction.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o shell_interaction.obj `if test -f '../dar_suite/shell_interaction.cpp'; then $(CYGPATH_W) '../dar_suite/shell_interaction.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/shell_interaction.cpp'; fi` dar_suite.o: ../dar_suite/dar_suite.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT dar_suite.o -MD -MP -MF $(DEPDIR)/dar_suite.Tpo -c -o dar_suite.o `test -f '../dar_suite/dar_suite.cpp' || echo '$(srcdir)/'`../dar_suite/dar_suite.cpp @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/dar_suite.Tpo $(DEPDIR)/dar_suite.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/dar_suite.cpp' object='dar_suite.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o dar_suite.o `test -f '../dar_suite/dar_suite.cpp' || echo '$(srcdir)/'`../dar_suite/dar_suite.cpp dar_suite.obj: ../dar_suite/dar_suite.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT dar_suite.obj -MD -MP -MF $(DEPDIR)/dar_suite.Tpo -c -o dar_suite.obj `if test -f '../dar_suite/dar_suite.cpp'; then $(CYGPATH_W) '../dar_suite/dar_suite.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/dar_suite.cpp'; fi` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/dar_suite.Tpo $(DEPDIR)/dar_suite.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/dar_suite.cpp' object='dar_suite.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o dar_suite.obj `if test -f '../dar_suite/dar_suite.cpp'; then $(CYGPATH_W) '../dar_suite/dar_suite.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/dar_suite.cpp'; fi` crit_action_cmd_line.o: ../dar_suite/crit_action_cmd_line.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT crit_action_cmd_line.o -MD -MP -MF $(DEPDIR)/crit_action_cmd_line.Tpo -c -o crit_action_cmd_line.o `test -f '../dar_suite/crit_action_cmd_line.cpp' || echo '$(srcdir)/'`../dar_suite/crit_action_cmd_line.cpp @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/crit_action_cmd_line.Tpo $(DEPDIR)/crit_action_cmd_line.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/crit_action_cmd_line.cpp' object='crit_action_cmd_line.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o crit_action_cmd_line.o `test -f '../dar_suite/crit_action_cmd_line.cpp' || echo '$(srcdir)/'`../dar_suite/crit_action_cmd_line.cpp crit_action_cmd_line.obj: ../dar_suite/crit_action_cmd_line.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT crit_action_cmd_line.obj -MD -MP -MF $(DEPDIR)/crit_action_cmd_line.Tpo -c -o crit_action_cmd_line.obj `if test -f '../dar_suite/crit_action_cmd_line.cpp'; then $(CYGPATH_W) '../dar_suite/crit_action_cmd_line.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/crit_action_cmd_line.cpp'; fi` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/crit_action_cmd_line.Tpo $(DEPDIR)/crit_action_cmd_line.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/crit_action_cmd_line.cpp' object='crit_action_cmd_line.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o crit_action_cmd_line.obj `if test -f '../dar_suite/crit_action_cmd_line.cpp'; then $(CYGPATH_W) '../dar_suite/crit_action_cmd_line.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/crit_action_cmd_line.cpp'; fi` line_tools.o: ../dar_suite/line_tools.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT line_tools.o -MD -MP -MF $(DEPDIR)/line_tools.Tpo -c -o line_tools.o `test -f '../dar_suite/line_tools.cpp' || echo '$(srcdir)/'`../dar_suite/line_tools.cpp @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/line_tools.Tpo $(DEPDIR)/line_tools.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/line_tools.cpp' object='line_tools.o' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o line_tools.o `test -f '../dar_suite/line_tools.cpp' || echo '$(srcdir)/'`../dar_suite/line_tools.cpp line_tools.obj: ../dar_suite/line_tools.cpp @am__fastdepCXX_TRUE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -MT line_tools.obj -MD -MP -MF $(DEPDIR)/line_tools.Tpo -c -o line_tools.obj `if test -f '../dar_suite/line_tools.cpp'; then $(CYGPATH_W) '../dar_suite/line_tools.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/line_tools.cpp'; fi` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/line_tools.Tpo $(DEPDIR)/line_tools.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='../dar_suite/line_tools.cpp' object='line_tools.obj' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) -c -o line_tools.obj `if test -f '../dar_suite/line_tools.cpp'; then $(CYGPATH_W) '../dar_suite/line_tools.cpp'; else $(CYGPATH_W) '$(srcdir)/../dar_suite/line_tools.cpp'; fi` mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(DATA) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstPROGRAMS ctags distclean \ distclean-compile distclean-generic distclean-libtool \ distclean-tags distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-compile mostlyclean-generic mostlyclean-libtool \ pdf pdf-am ps ps-am tags uninstall uninstall-am # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/src/examples/Makefile.am0000644000175000017430000000223612010477202013655 00000000000000localedir = @localedir@ if BUILD_MODE32 MYLIB=dar32 AM_CPPFLAGS=-DLIBDAR_MODE=32 -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" else if BUILD_MODE64 MYLIB=dar64 AM_CPPFLAGS=-DLIBDAR_MODE=64 -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" else MYLIB=dar AM_CPPFLAGS=-I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" endif endif noinst_PROGRAMS = factoriel prime formula dist_noinst_DATA = README LDADD = -L../libdar -l$(MYLIB) factoriel_SOURCES = factoriel.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp factoriel_DEPENDENCIES = ../libdar/lib$(MYLIB).la ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp prime_SOURCES = prime.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp prime_DEPENDENCIES = ../libdar/lib$(MYLIB).la ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp formula_SOURCES = formula.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/dar_suite.cpp ../dar_suite/crit_action_cmd_line.cpp ../dar_suite/line_tools.cpp formula_DEPENDENCIES = ../libdar/lib$(MYLIB).la ../dar_suite/line_tools.cpp ../dar_suite/shell_interaction.cpp ../dar_suite/crit_action_cmd_line.cpp ../dar_suite/dar_suite.cpp dar-2.4.8/src/examples/formula.cpp0000644000175000017430000000652412010477202013776 00000000000000/*********************************************************************/ // dar - disk archive - a backup/restoration program // Copyright (C) 2002-2052 Denis Corbin // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // to contact the author : http://dar.linux.free.fr/email.html /*********************************************************************/ #include "../my_config.h" #include #include #include "infinint.hpp" #include "deci.hpp" #include "integers.hpp" #include "erreurs.hpp" #include "tools.hpp" #include "../dar_suite/shell_interaction.hpp" #include "../dar_suite/dar_suite.hpp" #include "../dar_suite/crit_action_cmd_line.hpp" #include "../dar_suite/line_tools.hpp" using namespace libdar; using namespace std; int little_main(user_interaction & ui, S_I argc, char * const argv[], const char **env); static infinint calculus(const string & formula); // recusive call int main(S_I argc, char * const argv[], const char **env) { return dar_suite_global(argc, argv, env, &little_main); } int little_main(user_interaction & ui, S_I argc, char * const argv[], const char **env) { if(argc != 2) { cout << "usage: " << argv[0] << " \"arithmetical formula with only positive intergers, the following binary operators +, -, /, *, %, &, ^, | and parenthesis\"" << endl; return EXIT_SYNTAX; } else { string formula = crit_action_canonize_string(argv[1]); deci tmp = calculus(formula); cout << tmp.human() << endl; return EXIT_OK; } } static infinint calculus(const string & formula) { string::const_iterator it; string s1, s2; const string operators = "+-/*%&^|"; string::const_iterator op = operators.begin(); while(op != operators.end() && (it = line_tools_find_last_char_out_of_parenth(formula, *op)) == formula.end()) ++op; if(it != formula.end()) { if(formula.size() < 3) throw Erange("calculus", tools_printf("Unknown meaning for string: %S", &formula)); s1 = string(formula.begin(), it); s2 = string(it + 1, formula.end()); switch(*op) { case '+': return calculus(s1) + calculus(s2); case '-': return calculus(s1) - calculus(s2); case '/': return calculus(s1) / calculus(s2); case '*': return calculus(s1) * calculus(s2); case '%': return calculus(s1) % calculus(s2); case '&': return calculus(s1) & calculus(s2); case '^': return calculus(s1) ^ calculus(s2); case '|': return calculus(s1) | calculus(s2); default: throw SRC_BUG; } } if(*(formula.begin()) == '(' && *(formula.end() - 1) == ')') return calculus(string(formula.begin() + 1, formula.end() - 1)); else // assuming an integer { deci tmp = formula; return tmp.computer(); } } dar-2.4.8/src/examples/factoriel.cpp0000644000175000017430000000535212011672404014301 00000000000000/*********************************************************************/ // dar - disk archive - a backup/restoration program // Copyright (C) 2002-2052 Denis Corbin // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // to contact the author : http://dar.linux.free.fr/email.html /*********************************************************************/ #include "../my_config.h" extern "C" { #if HAVE_SYS_TYPE_H #include #endif #if HAVE_SYS_STAT_H #include #endif #if HAVE_FCNTL_H #include #endif #if HAVE_UNISTD_H #include #endif #if HAVE_ERRNO_H #include #endif #if HAVE_STRING_H #include #endif #if HAVE_STDLIB_H #include #endif } // end extern "C" #include #include #include "infinint.hpp" #include "deci.hpp" #include "erreurs.hpp" #include "generic_file.hpp" #include "integers.hpp" #include "cygwin_adapt.hpp" #include "fichier.hpp" #include "../dar_suite/shell_interaction.hpp" #include "../dar_suite/dar_suite.hpp" using namespace libdar; using namespace std; static int little_main(user_interaction & ui, int argc, char * const argv[], const char **env); int main(S_I argc, char * const argv[], const char **env) { return dar_suite_global(argc, argv, env, &little_main); } static int little_main(user_interaction & ui, int argc, char * const argv[], const char **env) { if(argc != 2 && argc != 3) exit(1); string s = argv[1]; deci f = s; infinint max = f.computer(); infinint i = 2; infinint p = 1; while(i <= max) { p *= i; ++i; } ui.warning("calcul finished, now computing the decimal representation ... "); f = deci(p); ui.warning(f.human()); if(argc == 3) { S_I fd = ::open(argv[2], O_RDWR|O_CREAT|O_TRUNC|O_BINARY, 0644); if(fd < 0) ui.warning(string("cannot open file for test ! ") + strerror(errno)); else { fichier fic = fichier(ui, fd); infinint cp; p.dump(fic); fic.skip(0); cp = infinint(fic); ui.warning(string("read from file: ") + deci(cp).human()); } } return EXIT_OK; } dar-2.4.8/src/examples/README0000644000175000017430000000126712010477202012504 00000000000000In this directory you will find some example programs. what do they do ? factoriel At the beginning it was a test program for infinint. It computes the factorial of the given integer. If compiled without --enable-mode=... option given to configure, it will always (should at least), return the decimal representation of the asked decimal. The problem is more the time it will take to compute it. And using --enable-mode=... you may get a message reporting that an integer overflow has occurred. prime tests if the given number is a prime factor. It relies on infinint. (also a test program at the beginning). formula calculate a simple arythmetic expression given as argument dar-2.4.8/src/examples/prime.cpp0000644000175000017430000000426712010477214013452 00000000000000/*********************************************************************/ // dar - disk archive - a backup/restoration program // Copyright (C) 2002-2052 Denis Corbin // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA 02111-1307, USA. // // to contact the author : http://dar.linux.free.fr/email.html /*********************************************************************/ #include "../my_config.h" extern "C" { #if HAVE_STDLIB_H #include #endif } #include #include "infinint.hpp" #include "deci.hpp" #include "integers.hpp" #include "../dar_suite/shell_interaction.hpp" #include "../dar_suite/dar_suite.hpp" using namespace libdar; using namespace std; int little_main(user_interaction & ui, S_I argc, char * const argv[], const char **env); int main(S_I argc, char * const argv[], const char **env) { return dar_suite_global(argc, argv, env, &little_main); } int little_main(user_interaction & ui, S_I argc, char * const argv[], const char **env) { if(argc != 2) { cout << "usage : " << argv[0] << " " << endl; exit(1); } deci x = string(argv[1]); cout << "converting string to infinint... " << endl; infinint num = x.computer(); cout << "checking whether the number is a prime factor... " << endl; infinint max = (num / 2) + 1; infinint i = 2; while(i < max) if(num%i == 0) break; else ++i; if(i < max) cout << argv[1] << " is NOT prime" << endl; else cout << argv[1] << " is PRIME" << endl; return EXIT_OK; } dar-2.4.8/src/check/0000755000175000017520000000000012023053205011132 500000000000000dar-2.4.8/src/check/loop.sh0000755000175000017430000000321712010477201012367 00000000000000#!/bin/sh if [ ! -x ../dar_suite/dar ] ; then echo "********************************************" echo "" echo "Dar is not built, aborting" echo "" echo "********************************************" exit 2 else export DAR=../dar_suite/dar fi if [ ! -x ./all_features ] ; then echo "********************************************" echo "" echo "all_features program not built, aborting" echo "" echo "********************************************" exit 2 fi if [ `id -u` -ne 0 ]; then echo "********************************************" echo "" echo "need to be run as root" echo "" echo "********************************************" exit 3 fi if ./all_features ; then echo "OK, all required features are available for testing" else exit 3 fi for hash in md5 none sha1 ; do for crypto in bf none scram aes twofish serpent camellia ; do for zip in gzip none bzip2 lzo ; do for slice in 1k none ; do for Slice in 500 none ; do for tape in y n ; do for seq_read in y n ; do for digit in 3 none ; do for sparse_size in 100 0 ; do for keep_compr in y n ; do for recheck_hole in y n ; do ./main.sh $crypto "$zip" "$slice" "$Slice" "$tape" "$seq_read" "$digit" "$sparse_size" "$keep_compr" "$recheck_hole" "$hash" || exit 1 done done done done done done done done done done done echo "+-------------------------------+" echo "| ALL TESTS PASSED SUCCESSFULLY |" echo "+-------------------------------+" dar-2.4.8/src/check/Makefile.in0000644000175000017520000004113312023053152013122 00000000000000# Makefile.in generated by automake 1.11.1 from Makefile.am. # @configure_input@ # Copyright (C) 1994, 1995, 1996, 1997, 1998, 1999, 2000, 2001, 2002, # 2003, 2004, 2005, 2006, 2007, 2008, 2009 Free Software Foundation, # Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ # # conditionnal part: --enable-mode=... # VPATH = @srcdir@ pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ noinst_PROGRAMS = all_features$(EXEEXT) check_PROGRAMS = permission$(EXEEXT) subdir = src/check DIST_COMMON = $(dist_noinst_DATA) $(srcdir)/Makefile.am \ $(srcdir)/Makefile.in ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = PROGRAMS = $(noinst_PROGRAMS) am_all_features_OBJECTS = all_features.$(OBJEXT) all_features_OBJECTS = $(am_all_features_OBJECTS) all_features_LDADD = $(LDADD) am_permission_OBJECTS = permission_OBJECTS = $(am_permission_OBJECTS) permission_LDADD = $(LDADD) permission_DEPENDENCIES = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f CXXCOMPILE = $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) LTCXXCOMPILE = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=compile $(CXX) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CXXFLAGS) $(CXXFLAGS) CXXLD = $(CXX) CXXLINK = $(LIBTOOL) --tag=CXX $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=link $(CXXLD) $(AM_CXXFLAGS) $(CXXFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=compile $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) \ $(AM_CPPFLAGS) $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) CCLD = $(CC) LINK = $(LIBTOOL) --tag=CC $(AM_LIBTOOLFLAGS) $(LIBTOOLFLAGS) \ --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) $(AM_LDFLAGS) \ $(LDFLAGS) -o $@ SOURCES = $(all_features_SOURCES) $(permission_SOURCES) DIST_SOURCES = $(all_features_SOURCES) $(permission_SOURCES) DATA = $(dist_noinst_DATA) ETAGS = etags CTAGS = ctags DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GREP = @GREP@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ MAKEINFO = @MAKEINFO@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ POSUB = @POSUB@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ lt_ECHO = @lt_ECHO@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ system_path = @sysconfdir@ @BUILD_MODE32_FALSE@@BUILD_MODE64_FALSE@MYLIB = dar @BUILD_MODE32_FALSE@@BUILD_MODE64_TRUE@MYLIB = dar64 @BUILD_MODE32_TRUE@MYLIB = dar32 @BUILD_MODE32_FALSE@@BUILD_MODE64_FALSE@AM_CPPFLAGS = -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" -DDAR_SYS_DIR=\"$(system_path)\" $(CPP_PROF) @BUILD_MODE32_FALSE@@BUILD_MODE64_TRUE@AM_CPPFLAGS = -DLIBDAR_MODE=64 -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" -DDAR_SYS_DIR=\"$(system_path)\" $(CPP_PROF) @BUILD_MODE32_TRUE@AM_CPPFLAGS = -DLIBDAR_MODE=32 -I../libdar -DDAR_LOCALEDIR=\"$(localedir)\" -DDAR_SYS_DIR=\"$(system_path)\" $(CPP_PROF) dist_noinst_DATA = build_tree.sh main.sh modif_tree.sh routine.sh loop.sh LDADD = -L../libdar -l$(MYLIB) all_features_SOURCES = all_features.cpp all_features_DEPENDENCIES = ../libdar/lib$(MYLIB).la permission_SOURCES = all: all-am .SUFFIXES: .SUFFIXES: .cpp .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu src/check/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu src/check/Makefile .PRECIOUS: Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-checkPROGRAMS: @list='$(check_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list clean-noinstPROGRAMS: @list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list all_features$(EXEEXT): $(all_features_OBJECTS) $(all_features_DEPENDENCIES) @rm -f all_features$(EXEEXT) $(CXXLINK) $(all_features_OBJECTS) $(all_features_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/all_features.Po@am__quote@ .cpp.o: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ $< .cpp.obj: @am__fastdepCXX_TRUE@ $(CXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ `$(CYGPATH_W) '$<'` @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Po @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(CXXCOMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .cpp.lo: @am__fastdepCXX_TRUE@ $(LTCXXCOMPILE) -MT $@ -MD -MP -MF $(DEPDIR)/$*.Tpo -c -o $@ $< @am__fastdepCXX_TRUE@ $(am__mv) $(DEPDIR)/$*.Tpo $(DEPDIR)/$*.Plo @AMDEP_TRUE@@am__fastdepCXX_FALSE@ source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCXX_FALSE@ DEPDIR=$(DEPDIR) $(CXXDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCXX_FALSE@ $(LTCXXCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(HEADERS) $(SOURCES) $(LISP) $(TAGS_FILES) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ mkid -fID $$unique tags: TAGS TAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) set x; \ here=`pwd`; \ list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: CTAGS CTAGS: $(HEADERS) $(SOURCES) $(TAGS_DEPENDENCIES) \ $(TAGS_FILES) $(LISP) list='$(SOURCES) $(HEADERS) $(LISP) $(TAGS_FILES)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | \ $(AWK) '{ files[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in files) print i; }; }'`; \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am $(MAKE) $(AM_MAKEFLAGS) $(check_PROGRAMS) check: check-am all-am: Makefile $(PROGRAMS) $(DATA) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ `test -z '$(STRIP)' || \ echo "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'"` install mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-checkPROGRAMS clean-generic clean-libtool clean-local \ clean-noinstPROGRAMS mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: check-am install-am install-strip .PHONY: CTAGS GTAGS all all-am check check-am clean \ clean-checkPROGRAMS clean-generic clean-libtool clean-local \ clean-noinstPROGRAMS ctags distclean distclean-compile \ distclean-generic distclean-libtool distclean-tags distdir dvi \ dvi-am html html-am info info-am install install-am \ install-data install-data-am install-dvi install-dvi-am \ install-exec install-exec-am install-html install-html-am \ install-info install-info-am install-man install-pdf \ install-pdf-am install-ps install-ps-am install-strip \ installcheck installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags uninstall uninstall-am permission: chmod a+x $(dist_noinst_DATA) loop.sh ./loop.sh clean-local: chmod u+x $(dist_noinst_DATA) OPT=tmp.file DAR=none ./routine.sh NONE rm -f tmp.file # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.4.8/src/check/main.sh0000755000175000017430000000462412010477201012345 00000000000000#!/bin/sh if [ "$1" = "" ] ; then echo "usage $0 " exit 1 fi crypto="$1" zip="$2" slice="$3" Slice="$4" tape_mark="$5" seq_read="$6" digit="$7" sparse="$8" keep_compr="$9" re_hole="${10}" hash="${11}" #echo "crypto = $crypto" #echo "zip = $zip" #echo "slice = $slice" #echo "Slice = $Slice" #echo "tape_mark = $tape_mark" #echo "seq_read = $seq_read" #echo "digit = $digit" #echo "sparse = $sparse" #echo "keep_compr = $keep_compr" #echo "re_hole = $re_hole" #echo "hash = [$hash]" ALL_TESTS="A1 B1 B2 B3 B4 C1 C2 C3 C4 D1 E1 E2 E3 F1 F2 F3" export OPT=tmp.file if [ "$crypto" != "none" ]; then crypto_K="-K $crypto:toto" crypto_J="-J $crypto:toto" crypto_A="'-$' $crypto:toto" else crypto_K="" crypto_J="" crypto_A="" fi if [ "$zip" != "none" ]; then zip=-z$zip else zip="" fi if [ "$slice" != "none" ]; then slicing="-s $slice" if [ "$Slice" != "none" ]; then slicing="$slicing -S $Slice" fi else slicing="" fi if [ "$tape_mark" = "y" ]; then tape="" else tape="-at" fi if [ "$seq_read" = "y" ]; then sequential="--sequential-read" ALL_TESTS=`echo $ALL_TESTS | sed -r -e 's/(F1|F2|F3)//g'` if [ "$tape_mark" != "y" ] ; then ALL_TESTS="none" fi else sequential="" fi if [ "$digit" != "none" ]; then min_digits="--min-digit $digit,$digit,$digit" else min_digits="" fi sparse="-1 $sparse" if [ "$keep_compr" = "y" ]; then keepcompressed="-ak" else keepcompressed="" fi if [ "$re_hole" = "y" ]; then recheck_hole="-ah" else recheck_hole="" fi if [ "$hash" != "none" ]; then hash="--hash $hash" else hash="" fi cat > $OPT < using namespace libdar; using namespace std; int main() { // initializing libdar U_I major; U_I medium; U_I minor; try { get_version(major, medium, minor); } catch(...) { cerr << "libdar library error, cannot initialize library" << endl; exit(1); } try { if(major != LIBDAR_COMPILE_TIME_MAJOR || medium < LIBDAR_COMPILE_TIME_MEDIUM) { cerr << "all_feature program has is not evaluating the expected libdar library, aborting" << endl; throw Efeature("version"); } if(!compile_time::libz()) { cerr << "MISSING GZIP COMPRESSION SUPPORT TO BE ABLE TO PERFORM ALL TESTS, ABORTING" << endl; throw Efeature("libz"); } if(!compile_time::libbz2()) { cerr << "MISSING BZIP2 COMPRESSION SUPPORT TO BE ABLE TO PERFORM ALL TESTS, ABORTING" << endl; throw Efeature("libz2"); } if(!compile_time::liblzo()) { cerr << "MISSING LZO COMPRESSION SUPPORT TO BE ABLE TO PERFORM ALL TESTS, ABORTING5D" << endl; throw Efeature("lzo2"); } if(!compile_time::libgcrypt()) { cerr << "MISSING STRONG ENCRYPTION SUPPORT TO BE ABLE TO PERFORM ALL TESTS, ABORTING" << endl; throw Efeature("libz2"); } } catch(...) { close_and_clean(); exit(2); } close_and_clean(); exit(0); } dar-2.4.8/src/check/build_tree.sh0000755000175000017430000000166712011710234013540 00000000000000#!/bin/sh if [ $# -ne 2 ] ; then echo "usage: $0 " exit 1 fi SUB1=S"$2"B SUB2=S"$2"B2 SUB3=S"$2"B3 mkdir "$1" cd "$1" echo "this is the content of a plain file" > plain_file.txt mkdir "$SUB1" cd "$SUB1" ln ../plain_file.txt hard_linked_inode.txt cd .. mkdir "$SUB2" cd "$SUB2" ln -s "../$SUB1"/hard_linked_inode.txt symlink.txt ln symlink.txt hard_to_symlink.txt dd bs=4096 seek=10 count=1 if=/dev/zero of=sparse.txt 1> /dev/null 2> /dev/null echo "some chars in the middle of holes" >> sparse.txt dd bs=4096 conv=notrunc if=/dev/zero count=10 >> sparse.txt 2> /dev/null cp sparse.txt sparse2.txt mkfifo tube1 cd .. mkdir "$SUB3" cd "$SUB3" ln ../plain_file.txt mkdir T cd T touch "another plain file" > titi.txt cd .. cd .. ln "$SUB2/sparse2.txt" "hard_linked_sparse.txt" mkfifo tube2 cd "$SUB1" ln ../tube2 hard_linked_pipe mknod chardev c 1 1 mknod blockdev b 1 1 ln chardev chardev_hard ln blockdev blockdev_hard dar-2.4.8/src/check/routine.sh0000755000175000017430000002266212010477201013110 00000000000000#!/bin/bash if [ $# -lt 1 ]; then echo "usage: $0 " echo "example: $0 A1 B1 B2 B3" exit 1 fi ## environement required: # OPT environment variable (DCF file for common parameters) if [ "$OPT" = "" ]; then echo '$OPT not set' exit 2 fi if [ "$DAR" = "" ]; then echo '$DAR not set' exit 2 fi # DAR environment variable points to dar # ## relies on: # ./build_tree.sh # ./modif_tree.sh src=SRC dst=DST src2=SRC2 full=full catf=catf catf_fly=catf_fly double_catf=double_catf double_catf_fly=double_catf_fly diff=diff catd=catd catd_fly=catd_fly double_catd=double_catd double_catd_fly=double_catd_fly diff_fly=diff_fly diff_double=diff_double diff_double_fly=diff_double_fly merge_full=merge_full merge_diff=merge_diff full2=full2 diff2=diff2 full3=full3 decr=decr hash="`sed -r -n -e 's/--hash (.*)/\1/p' tmp.file | tail -n 1`" if [ "$hash" = "" ] ; then hash="none" fi function my_diff { if [ "$1" = "" -o "$2" = "" ] ; then echo "usage: $0 " return 1 fi cd "$1" tar -cf "../$1.tmp.tar" . cd "../$2" tar -df "../$1.tmp.tar" local ret1=$? tar -cf "../$2.tmp.tar" . cd "../$1" tar -df "../$2.tmp.tar" local ret2=$? cd .. rm "$1.tmp.tar" "$2.tmp.tar" return `[ $ret1 -eq 0 -a $ret2 -eq 0 ]` } function GO { if [ "$1" == "" ] ; then echo "usage: $0