dar-2.6.8/0000755000175000017520000000000013617552352007311 500000000000000dar-2.6.8/compile0000755000175000017520000001624513042165456010614 00000000000000#! /bin/sh # Wrapper for compilers which do not understand '-c -o'. scriptversion=2012-10-14.11; # UTC # Copyright (C) 1999-2014 Free Software Foundation, Inc. # Written by Tom Tromey . # # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # This file is maintained in Automake, please report # bugs to or send patches to # . nl=' ' # We need space, tab and new line, in precisely that order. Quoting is # there to prevent tools from complaining about whitespace usage. IFS=" "" $nl" file_conv= # func_file_conv build_file lazy # Convert a $build file to $host form and store it in $file # Currently only supports Windows hosts. If the determined conversion # type is listed in (the comma separated) LAZY, no conversion will # take place. func_file_conv () { file=$1 case $file in / | /[!/]*) # absolute file, and not a UNC file if test -z "$file_conv"; then # lazily determine how to convert abs files case `uname -s` in MINGW*) file_conv=mingw ;; CYGWIN*) file_conv=cygwin ;; *) file_conv=wine ;; esac fi case $file_conv/,$2, in *,$file_conv,*) ;; mingw/*) file=`cmd //C echo "$file " | sed -e 's/"\(.*\) " *$/\1/'` ;; cygwin/*) file=`cygpath -m "$file" || echo "$file"` ;; wine/*) file=`winepath -w "$file" || echo "$file"` ;; esac ;; esac } # func_cl_dashL linkdir # Make cl look for libraries in LINKDIR func_cl_dashL () { func_file_conv "$1" if test -z "$lib_path"; then lib_path=$file else lib_path="$lib_path;$file" fi linker_opts="$linker_opts -LIBPATH:$file" } # func_cl_dashl library # Do a library search-path lookup for cl func_cl_dashl () { lib=$1 found=no save_IFS=$IFS IFS=';' for dir in $lib_path $LIB do IFS=$save_IFS if $shared && test -f "$dir/$lib.dll.lib"; then found=yes lib=$dir/$lib.dll.lib break fi if test -f "$dir/$lib.lib"; then found=yes lib=$dir/$lib.lib break fi if test -f "$dir/lib$lib.a"; then found=yes lib=$dir/lib$lib.a break fi done IFS=$save_IFS if test "$found" != yes; then lib=$lib.lib fi } # func_cl_wrapper cl arg... # Adjust compile command to suit cl func_cl_wrapper () { # Assume a capable shell lib_path= shared=: linker_opts= for arg do if test -n "$eat"; then eat= else case $1 in -o) # configure might choose to run compile as 'compile cc -o foo foo.c'. eat=1 case $2 in *.o | *.[oO][bB][jJ]) func_file_conv "$2" set x "$@" -Fo"$file" shift ;; *) func_file_conv "$2" set x "$@" -Fe"$file" shift ;; esac ;; -I) eat=1 func_file_conv "$2" mingw set x "$@" -I"$file" shift ;; -I*) func_file_conv "${1#-I}" mingw set x "$@" -I"$file" shift ;; -l) eat=1 func_cl_dashl "$2" set x "$@" "$lib" shift ;; -l*) func_cl_dashl "${1#-l}" set x "$@" "$lib" shift ;; -L) eat=1 func_cl_dashL "$2" ;; -L*) func_cl_dashL "${1#-L}" ;; -static) shared=false ;; -Wl,*) arg=${1#-Wl,} save_ifs="$IFS"; IFS=',' for flag in $arg; do IFS="$save_ifs" linker_opts="$linker_opts $flag" done IFS="$save_ifs" ;; -Xlinker) eat=1 linker_opts="$linker_opts $2" ;; -*) set x "$@" "$1" shift ;; *.cc | *.CC | *.cxx | *.CXX | *.[cC]++) func_file_conv "$1" set x "$@" -Tp"$file" shift ;; *.c | *.cpp | *.CPP | *.lib | *.LIB | *.Lib | *.OBJ | *.obj | *.[oO]) func_file_conv "$1" mingw set x "$@" "$file" shift ;; *) set x "$@" "$1" shift ;; esac fi shift done if test -n "$linker_opts"; then linker_opts="-link$linker_opts" fi exec "$@" $linker_opts exit 1 } eat= case $1 in '') echo "$0: No command. Try '$0 --help' for more information." 1>&2 exit 1; ;; -h | --h*) cat <<\EOF Usage: compile [--help] [--version] PROGRAM [ARGS] Wrapper for compilers which do not understand '-c -o'. Remove '-o dest.o' from ARGS, run PROGRAM with the remaining arguments, and rename the output as expected. If you are trying to build a whole package this is not the right script to run: please start by reading the file 'INSTALL'. Report bugs to . EOF exit $? ;; -v | --v*) echo "compile $scriptversion" exit $? ;; cl | *[/\\]cl | cl.exe | *[/\\]cl.exe ) func_cl_wrapper "$@" # Doesn't return... ;; esac ofile= cfile= for arg do if test -n "$eat"; then eat= else case $1 in -o) # configure might choose to run compile as 'compile cc -o foo foo.c'. # So we strip '-o arg' only if arg is an object. eat=1 case $2 in *.o | *.obj) ofile=$2 ;; *) set x "$@" -o "$2" shift ;; esac ;; *.c) cfile=$1 set x "$@" "$1" shift ;; *) set x "$@" "$1" shift ;; esac fi shift done if test -z "$ofile" || test -z "$cfile"; then # If no '-o' option was seen then we might have been invoked from a # pattern rule where we don't need one. That is ok -- this is a # normal compilation that the losing compiler can handle. If no # '.c' file was seen then we are probably linking. That is also # ok. exec "$@" fi # Name of file we expect compiler to create. cofile=`echo "$cfile" | sed 's|^.*[\\/]||; s|^[a-zA-Z]:||; s/\.c$/.o/'` # Create the lock directory. # Note: use '[/\\:.-]' here to ensure that we don't use the same name # that we are using for the .o file. Also, base the name on the expected # object file name, since that is what matters with a parallel build. lockdir=`echo "$cofile" | sed -e 's|[/\\:.-]|_|g'`.d while true; do if mkdir "$lockdir" >/dev/null 2>&1; then break fi sleep 1 done # FIXME: race condition here if user kills between mkdir and trap. trap "rmdir '$lockdir'; exit 1" 1 2 15 # Run the compile. "$@" ret=$? if test -f "$cofile"; then test "$cofile" = "$ofile" || mv "$cofile" "$ofile" elif test -f "${cofile}bj"; then test "${cofile}bj" = "$ofile" || mv "${cofile}bj" "$ofile" fi rmdir "$lockdir" exit $ret # Local Variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: dar-2.6.8/config.rpath0000755000175000017520000004421613617552277011556 00000000000000#! /bin/sh # Output a system dependent set of variables, describing how to set the # run time search path of shared libraries in an executable. # # Copyright 1996-2016 Free Software Foundation, Inc. # Taken from GNU libtool, 2001 # Originally by Gordon Matzigkeit , 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # # The first argument passed to this file is the canonical host specification, # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # The environment variables CC, GCC, LDFLAGS, LD, with_gnu_ld # should be set by the caller. # # The set of defined variables is at the end of this script. # Known limitations: # - On IRIX 6.5 with CC="cc", the run time search patch must not be longer # than 256 bytes, otherwise the compiler driver will dump core. The only # known workaround is to choose shorter directory names for the build # directory and/or the installation directory. # All known linkers require a '.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a shrext=.so host="$1" host_cpu=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\1/'` host_vendor=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\2/'` host_os=`echo "$host" | sed 's/^\([^-]*\)-\([^-]*\)-\(.*\)$/\3/'` # Code taken from libtool.m4's _LT_CC_BASENAME. for cc_temp in $CC""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done cc_basename=`echo "$cc_temp" | sed -e 's%^.*/%%'` # Code taken from libtool.m4's _LT_COMPILER_PIC. wl= if test "$GCC" = yes; then wl='-Wl,' else case "$host_os" in aix*) wl='-Wl,' ;; mingw* | cygwin* | pw32* | os2* | cegcc*) ;; hpux9* | hpux10* | hpux11*) wl='-Wl,' ;; irix5* | irix6* | nonstopux*) wl='-Wl,' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) case $cc_basename in ecc*) wl='-Wl,' ;; icc* | ifort*) wl='-Wl,' ;; lf95*) wl='-Wl,' ;; nagfor*) wl='-Wl,-Wl,,' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) wl='-Wl,' ;; ccc*) wl='-Wl,' ;; xl* | bgxl* | bgf* | mpixl*) wl='-Wl,' ;; como) wl='-lopt=' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ F* | *Sun*Fortran*) wl= ;; *Sun\ C*) wl='-Wl,' ;; esac ;; esac ;; newsos6) ;; *nto* | *qnx*) ;; osf3* | osf4* | osf5*) wl='-Wl,' ;; rdos*) ;; solaris*) case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) wl='-Qoption ld ' ;; *) wl='-Wl,' ;; esac ;; sunos4*) wl='-Qoption ld ' ;; sysv4 | sysv4.2uw2* | sysv4.3*) wl='-Wl,' ;; sysv4*MP*) ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) wl='-Wl,' ;; unicos*) wl='-Wl,' ;; uts4*) ;; esac fi # Code taken from libtool.m4's _LT_LINKER_SHLIBS. hardcode_libdir_flag_spec= hardcode_libdir_separator= hardcode_direct=no hardcode_minus_L=no case "$host_os" in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test "$GCC" != yes; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd*) with_gnu_ld=no ;; esac ld_shlibs=yes if test "$with_gnu_ld" = yes; then # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. # Unlike libtool, we use -rpath here, not --rpath, since the documented # option of GNU ld is called -rpath, not --rpath. hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' case "$host_os" in aix[3-9]*) # On AIX/PPC, the GNU linker is very broken if test "$host_cpu" != ia64; then ld_shlibs=no fi ;; amigaos*) case "$host_cpu" in powerpc) ;; m68k) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; beos*) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' if $LD --help 2>&1 | grep 'auto-import' > /dev/null; then : else ld_shlibs=no fi ;; haiku*) ;; interix[3-9]*) hardcode_direct=no hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; netbsd*) ;; solaris*) if $LD -v 2>&1 | grep 'BFD 2\.8' > /dev/null; then ld_shlibs=no elif $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-rpath,$libdir`' else ld_shlibs=no fi ;; esac ;; sunos4*) hardcode_direct=yes ;; *) if $LD --help 2>&1 | grep ': supported targets:.* elf' > /dev/null; then : else ld_shlibs=no fi ;; esac if test "$ld_shlibs" = no; then hardcode_libdir_flag_spec= fi else case "$host_os" in aix3*) # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test "$GCC" = yes; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix[4-9]*) if test "$host_cpu" = ia64; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # need to do runtime linking. case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do if (test $ld_flag = "-brtl" || test $ld_flag = "-Wl,-brtl"); then aix_use_runtimelinking=yes break fi done ;; esac fi hardcode_direct=yes hardcode_libdir_separator=':' if test "$GCC" = yes; then case $host_os in aix4.[012]|aix4.[012].*) collect2name=`${CC} -print-prog-name=collect2` if test -f "$collect2name" && \ strings "$collect2name" | grep resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac fi # Begin _LT_AC_SYS_LIBPATH_AIX. echo 'int main () { return 0; }' > conftest.c ${CC} ${LDFLAGS} conftest.c -o conftest aix_libpath=`dump -H conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` if test -z "$aix_libpath"; then aix_libpath=`dump -HX64 conftest 2>/dev/null | sed -n -e '/Import File Strings/,/^$/ { /^0/ { s/^0 *\(.*\)$/\1/; p; } }'` fi if test -z "$aix_libpath"; then aix_libpath="/usr/lib:/lib" fi rm -f conftest.c conftest # End _LT_AC_SYS_LIBPATH_AIX. if test "$aix_use_runtimelinking" = yes; then hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" else if test "$host_cpu" = ia64; then hardcode_libdir_flag_spec='${wl}-R $libdir:/usr/lib:/lib' else hardcode_libdir_flag_spec='${wl}-blibpath:$libdir:'"$aix_libpath" fi fi ;; amigaos*) case "$host_cpu" in powerpc) ;; m68k) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; bsdi[45]*) ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec=' ' libext=lib ;; darwin* | rhapsody*) hardcode_direct=no if { case $cc_basename in ifort*) true;; *) test "$GCC" = yes;; esac; }; then : else ld_shlibs=no fi ;; dgux*) hardcode_libdir_flag_spec='-L$libdir' ;; freebsd2.[01]*) hardcode_direct=yes hardcode_minus_L=yes ;; freebsd* | dragonfly*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; hpux9*) hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; hpux10*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test "$with_gnu_ld" = no; then hardcode_libdir_flag_spec='${wl}+b ${wl}$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no ;; *) hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; netbsd*) hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes ;; newsos6) hardcode_direct=yes hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; *nto* | *qnx*) ;; openbsd*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`" || test "$host_os-$host_cpu" = "openbsd2.8-powerpc"; then hardcode_libdir_flag_spec='${wl}-rpath,$libdir' else case "$host_os" in openbsd[01].* | openbsd2.[0-7] | openbsd2.[0-7].*) hardcode_libdir_flag_spec='-R$libdir' ;; *) hardcode_libdir_flag_spec='${wl}-rpath,$libdir' ;; esac fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; osf3*) hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) if test "$GCC" = yes; then hardcode_libdir_flag_spec='${wl}-rpath ${wl}$libdir' else # Both cc and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi hardcode_libdir_separator=: ;; solaris*) hardcode_libdir_flag_spec='-R$libdir' ;; sunos4*) hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes ;; sysv4) case $host_vendor in sni) hardcode_direct=yes # is this really true??? ;; siemens) hardcode_direct=no ;; motorola) hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac ;; sysv4.3*) ;; sysv4*MP*) if test -d /usr/nec; then ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) ;; sysv5* | sco3.2v5* | sco5v6*) hardcode_libdir_flag_spec='`test -z "$SCOABSPATH" && echo ${wl}-R,$libdir`' hardcode_libdir_separator=':' ;; uts4*) hardcode_libdir_flag_spec='-L$libdir' ;; *) ld_shlibs=no ;; esac fi # Check dynamic linker characteristics # Code taken from libtool.m4's _LT_SYS_DYNAMIC_LINKER. # Unlike libtool.m4, here we don't care about _all_ names of the library, but # only about the one the linker finds when passed -lNAME. This is the last # element of library_names_spec in libtool.m4, or possibly two of them if the # linker has special search rules. library_names_spec= # the last element of library_names_spec in libtool.m4 libname_spec='lib$name' case "$host_os" in aix3*) library_names_spec='$libname.a' ;; aix[4-9]*) library_names_spec='$libname$shrext' ;; amigaos*) case "$host_cpu" in powerpc*) library_names_spec='$libname$shrext' ;; m68k) library_names_spec='$libname.a' ;; esac ;; beos*) library_names_spec='$libname$shrext' ;; bsdi[45]*) library_names_spec='$libname$shrext' ;; cygwin* | mingw* | pw32* | cegcc*) shrext=.dll library_names_spec='$libname.dll.a $libname.lib' ;; darwin* | rhapsody*) shrext=.dylib library_names_spec='$libname$shrext' ;; dgux*) library_names_spec='$libname$shrext' ;; freebsd[23].*) library_names_spec='$libname$shrext$versuffix' ;; freebsd* | dragonfly*) library_names_spec='$libname$shrext' ;; gnu*) library_names_spec='$libname$shrext' ;; haiku*) library_names_spec='$libname$shrext' ;; hpux9* | hpux10* | hpux11*) case $host_cpu in ia64*) shrext=.so ;; hppa*64*) shrext=.sl ;; *) shrext=.sl ;; esac library_names_spec='$libname$shrext' ;; interix[3-9]*) library_names_spec='$libname$shrext' ;; irix5* | irix6* | nonstopux*) library_names_spec='$libname$shrext' case "$host_os" in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= ;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 ;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 ;; *) libsuff= shlibsuff= ;; esac ;; esac ;; linux*oldld* | linux*aout* | linux*coff*) ;; linux* | k*bsd*-gnu | kopensolaris*-gnu) library_names_spec='$libname$shrext' ;; knetbsd*-gnu) library_names_spec='$libname$shrext' ;; netbsd*) library_names_spec='$libname$shrext' ;; newsos6) library_names_spec='$libname$shrext' ;; *nto* | *qnx*) library_names_spec='$libname$shrext' ;; openbsd*) library_names_spec='$libname$shrext$versuffix' ;; os2*) libname_spec='$name' shrext=.dll library_names_spec='$libname.a' ;; osf3* | osf4* | osf5*) library_names_spec='$libname$shrext' ;; rdos*) ;; solaris*) library_names_spec='$libname$shrext' ;; sunos4*) library_names_spec='$libname$shrext$versuffix' ;; sysv4 | sysv4.3*) library_names_spec='$libname$shrext' ;; sysv4*MP*) library_names_spec='$libname$shrext' ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) library_names_spec='$libname$shrext' ;; tpf*) library_names_spec='$libname$shrext' ;; uts4*) library_names_spec='$libname$shrext' ;; esac sed_quote_subst='s/\(["`$\\]\)/\\\1/g' escaped_wl=`echo "X$wl" | sed -e 's/^X//' -e "$sed_quote_subst"` shlibext=`echo "$shrext" | sed -e 's,^\.,,'` escaped_libname_spec=`echo "X$libname_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_library_names_spec=`echo "X$library_names_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` escaped_hardcode_libdir_flag_spec=`echo "X$hardcode_libdir_flag_spec" | sed -e 's/^X//' -e "$sed_quote_subst"` LC_ALL=C sed -e 's/^\([a-zA-Z0-9_]*\)=/acl_cv_\1=/' <= 1.10 to complain if config.rpath is missing. m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([config.rpath])]) AC_REQUIRE([AC_PROG_CC]) dnl we use $CC, $GCC, $LDFLAGS AC_REQUIRE([AC_LIB_PROG_LD]) dnl we use $LD, $with_gnu_ld AC_REQUIRE([AC_CANONICAL_HOST]) dnl we use $host AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT]) dnl we use $ac_aux_dir AC_CACHE_CHECK([for shared library run path origin], [acl_cv_rpath], [ CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh . ./conftest.sh rm -f ./conftest.sh acl_cv_rpath=done ]) wl="$acl_cv_wl" acl_libext="$acl_cv_libext" acl_shlibext="$acl_cv_shlibext" acl_libname_spec="$acl_cv_libname_spec" acl_library_names_spec="$acl_cv_library_names_spec" acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" acl_hardcode_direct="$acl_cv_hardcode_direct" acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" dnl Determine whether the user wants rpath handling at all. AC_ARG_ENABLE([rpath], [ --disable-rpath do not hardcode runtime library paths], :, enable_rpath=yes) ]) dnl AC_LIB_FROMPACKAGE(name, package) dnl declares that libname comes from the given package. The configure file dnl will then not have a --with-libname-prefix option but a dnl --with-package-prefix option. Several libraries can come from the same dnl package. This declaration must occur before an AC_LIB_LINKFLAGS or similar dnl macro call that searches for libname. AC_DEFUN([AC_LIB_FROMPACKAGE], [ pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) define([acl_frompackage_]NAME, [$2]) popdef([NAME]) pushdef([PACK],[$2]) pushdef([PACKUP],[m4_translit(PACK,[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) define([acl_libsinpackage_]PACKUP, m4_ifdef([acl_libsinpackage_]PACKUP, [m4_defn([acl_libsinpackage_]PACKUP)[, ]],)[lib$1]) popdef([PACKUP]) popdef([PACK]) ]) dnl AC_LIB_LINKFLAGS_BODY(name [, dependencies]) searches for libname and dnl the libraries corresponding to explicit and implicit dependencies. dnl Sets the LIB${NAME}, LTLIB${NAME} and INC${NAME} variables. dnl Also, sets the LIB${NAME}_PREFIX variable to nonempty if libname was found dnl in ${LIB${NAME}_PREFIX}/$acl_libdirstem. AC_DEFUN([AC_LIB_LINKFLAGS_BODY], [ AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) pushdef([NAME],[m4_translit([$1],[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) pushdef([PACK],[m4_ifdef([acl_frompackage_]NAME, [acl_frompackage_]NAME, lib[$1])]) pushdef([PACKUP],[m4_translit(PACK,[abcdefghijklmnopqrstuvwxyz./+-], [ABCDEFGHIJKLMNOPQRSTUVWXYZ____])]) pushdef([PACKLIBS],[m4_ifdef([acl_frompackage_]NAME, [acl_libsinpackage_]PACKUP, lib[$1])]) dnl Autoconf >= 2.61 supports dots in --with options. pushdef([P_A_C_K],[m4_if(m4_version_compare(m4_defn([m4_PACKAGE_VERSION]),[2.61]),[-1],[m4_translit(PACK,[.],[_])],PACK)]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_ARG_WITH(P_A_C_K[-prefix], [[ --with-]]P_A_C_K[[-prefix[=DIR] search for ]PACKLIBS[ in DIR/include and DIR/lib --without-]]P_A_C_K[[-prefix don't search for ]PACKLIBS[ in includedir and libdir]], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" if test "$acl_libdirstem2" != "$acl_libdirstem" \ && ! test -d "$withval/$acl_libdirstem"; then additional_libdir="$withval/$acl_libdirstem2" fi fi fi ]) dnl Search the library and its dependencies in $additional_libdir and dnl $LDFLAGS. Using breadth-first-seach. LIB[]NAME= LTLIB[]NAME= INC[]NAME= LIB[]NAME[]_PREFIX= dnl HAVE_LIB${NAME} is an indicator that LIB${NAME}, LTLIB${NAME} have been dnl computed. So it has to be reset here. HAVE_LIB[]NAME= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='$1 $2' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" dnl See if it was already located by an earlier AC_LIB_LINKFLAGS dnl or AC_LIB_HAVE_LINKFLAGS call. uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./+-|ABCDEFGHIJKLMNOPQRSTUVWXYZ____|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$value" else dnl An earlier call to AC_LIB_HAVE_LINKFLAGS has determined dnl that this library doesn't exist. So just drop it. : fi else dnl Search the library lib$name in $additional_libdir and $LDFLAGS dnl and the already constructed $LIBNAME/$LTLIBNAME. found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" dnl The same code as in the loop below: dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` dnl First look for a shared library. if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi dnl Then look for a static library. if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then dnl Found the library. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then dnl Linking with a shared library. We attempt to hardcode its dnl directory into the executable's runpath, unless it's the dnl standard /usr/lib. if test "$enable_rpath" = no \ || test "X$found_dir" = "X/usr/$acl_libdirstem" \ || test "X$found_dir" = "X/usr/$acl_libdirstem2"; then dnl No hardcoding is needed. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl Use an explicit option to hardcode DIR into the resulting dnl binary. dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi dnl The hardcoding into $LIBNAME is system dependent. if test "$acl_hardcode_direct" = yes; then dnl Using DIR/libNAME.so during linking hardcodes DIR into the dnl resulting binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode DIR into the resulting dnl binary. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else dnl Rely on "-L$found_dir". dnl But don't add it if it's already contained in the LDFLAGS dnl or the already constructed $LIBNAME haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_so" else dnl We cannot use $acl_hardcode_runpath_var and LD_RUN_PATH dnl here, because this doesn't fit in flags passed to the dnl compiler. So give up. No hardcoding. This affects only dnl very old systems. dnl FIXME: Not sure whether we should use dnl "-L$found_dir -l$name" or "-L$found_dir $found_so" dnl here. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then dnl Linking with a static library. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$found_a" else dnl We shouldn't come here, but anyway it's good to have a dnl fallback. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$found_dir -l$name" fi fi dnl Assume the include files are nearby. additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` if test "$name" = '$1'; then LIB[]NAME[]_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; */$acl_libdirstem2 | */$acl_libdirstem2/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'` if test "$name" = '$1'; then LIB[]NAME[]_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then dnl Potentially add $additional_includedir to $INCNAME. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's /usr/local/include and we are using GCC on Linux, dnl 3. if it's already present in $CPPFLAGS or the already dnl constructed $INCNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INC[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $INCNAME. INC[]NAME="${INC[]NAME}${INC[]NAME:+ }-I$additional_includedir" fi fi fi fi fi dnl Look for dependencies. if test -n "$found_la"; then dnl Read the .la file. It defines the variables dnl dlname, library_names, old_library, dependency_libs, current, dnl age, revision, installed, dlopen, dlpreopen, libdir. save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" dnl We use only dependency_libs. for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` dnl Potentially add $additional_libdir to $LIBNAME and $LTLIBNAME. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's /usr/local/lib and we are using GCC on Linux, dnl 3. if it's already present in $LDFLAGS or the already dnl constructed $LIBNAME, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem" \ && test "X$additional_libdir" != "X/usr/$acl_libdirstem2"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem" \ || test "X$additional_libdir" = "X/usr/local/$acl_libdirstem2"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LIBNAME. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIB[]NAME; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LTLIBNAME. LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then dnl Potentially add DIR to rpathdirs. dnl The rpathdirs will be appended to $LIBNAME at the end. haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi dnl Potentially add DIR to ltrpathdirs. dnl The ltrpathdirs will be appended to $LTLIBNAME at the end. haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) dnl Handle this in the next round. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) dnl Handle this in the next round. Throw away the .la's dnl directory; it is already contained in a preceding -L dnl option. names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) dnl Most likely an immediate library name. LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$dep" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }$dep" ;; esac done fi else dnl Didn't find the library; assume it is in the system directories dnl known to the linker and runtime loader. (All the system dnl directories known to the linker should also be known to the dnl runtime loader, otherwise the system is severely misconfigured.) LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }-l$name" LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user must dnl pass all path elements in one option. We can arrange that for a dnl single library, but not when more than one $LIBNAMEs are used. alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done dnl Note: acl_hardcode_libdir_flag_spec uses $libdir and $wl. acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" else dnl The -rpath options are cumulative. for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIB[]NAME="${LIB[]NAME}${LIB[]NAME:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then dnl When using libtool, the option that works for both libraries and dnl executables is -R. The -R options are cumulative. for found_dir in $ltrpathdirs; do LTLIB[]NAME="${LTLIB[]NAME}${LTLIB[]NAME:+ }-R$found_dir" done fi popdef([P_A_C_K]) popdef([PACKLIBS]) popdef([PACKUP]) popdef([PACK]) popdef([NAME]) ]) dnl AC_LIB_APPENDTOVAR(VAR, CONTENTS) appends the elements of CONTENTS to VAR, dnl unless already present in VAR. dnl Works only for CPPFLAGS, not for LIB* variables because that sometimes dnl contains two or three consecutive elements that belong together. AC_DEFUN([AC_LIB_APPENDTOVAR], [ for element in [$2]; do haveit= for x in $[$1]; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then [$1]="${[$1]}${[$1]:+ }$element" fi done ]) dnl For those cases where a variable contains several -L and -l options dnl referring to unknown libraries and directories, this macro determines the dnl necessary additional linker options for the runtime path. dnl AC_LIB_LINKFLAGS_FROM_LIBS([LDADDVAR], [LIBSVALUE], [USE-LIBTOOL]) dnl sets LDADDVAR to linker options needed together with LIBSVALUE. dnl If USE-LIBTOOL evaluates to non-empty, linking with libtool is assumed, dnl otherwise linking without libtool is assumed. AC_DEFUN([AC_LIB_LINKFLAGS_FROM_LIBS], [ AC_REQUIRE([AC_LIB_RPATH]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) $1= if test "$enable_rpath" != no; then if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then dnl Use an explicit option to hardcode directories into the resulting dnl binary. rpathdirs= next= for opt in $2; do if test -n "$next"; then dir="$next" dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem" \ && test "X$dir" != "X/usr/$acl_libdirstem2"; then rpathdirs="$rpathdirs $dir" fi next= else case $opt in -L) next=yes ;; -L*) dir=`echo "X$opt" | sed -e 's,^X-L,,'` dnl No need to hardcode the standard /usr/lib. if test "X$dir" != "X/usr/$acl_libdirstem" \ && test "X$dir" != "X/usr/$acl_libdirstem2"; then rpathdirs="$rpathdirs $dir" fi next= ;; *) next= ;; esac fi done if test "X$rpathdirs" != "X"; then if test -n ""$3""; then dnl libtool is used for linking. Use -R options. for dir in $rpathdirs; do $1="${$1}${$1:+ }-R$dir" done else dnl The linker is used for linking directly. if test -n "$acl_hardcode_libdir_separator"; then dnl Weird platform: only the last -rpath option counts, the user dnl must pass all path elements in one option. alldirs= for dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="$flag" else dnl The -rpath options are cumulative. for dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" $1="${$1}${$1:+ }$flag" done fi fi fi fi fi AC_SUBST([$1]) ]) dar-2.6.8/m4/lib-ld.m40000644000175000017520000000714313617552277011171 00000000000000# lib-ld.m4 serial 6 dnl Copyright (C) 1996-2003, 2009-2016 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl Subroutines of libtool.m4, dnl with replacements s/_*LT_PATH/AC_LIB_PROG/ and s/lt_/acl_/ to avoid dnl collision with libtool.m4. dnl From libtool-2.4. Sets the variable with_gnu_ld to yes or no. AC_DEFUN([AC_LIB_PROG_LD_GNU], [AC_CACHE_CHECK([if the linker ($LD) is GNU ld], [acl_cv_prog_gnu_ld], [# I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 /dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by $CC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]]* | ?:[[\\/]]*) re_direlt='/[[^/]][[^/]]*/\.\./' # Canonicalize the pathname of ld ac_prog=`echo "$ac_prog"| sed 's%\\\\%/%g'` while echo "$ac_prog" | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL([acl_cv_path_LD], [if test -z "$LD"; then acl_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$acl_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then acl_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$acl_cv_path_LD" -v 2>&1 , 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ([2.50]) AC_DEFUN([AM_NLS], [ AC_MSG_CHECKING([whether NLS is requested]) dnl Default is enabled NLS AC_ARG_ENABLE([nls], [ --disable-nls do not use Native Language Support], USE_NLS=$enableval, USE_NLS=yes) AC_MSG_RESULT([$USE_NLS]) AC_SUBST([USE_NLS]) ]) dar-2.6.8/m4/progtest.m40000644000175000017520000000602413617552277011672 00000000000000# progtest.m4 serial 7 (gettext-0.18.2) dnl Copyright (C) 1996-2003, 2005, 2008-2016 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1996. AC_PREREQ([2.50]) # Search path for a program which passes the given test. dnl AM_PATH_PROG_WITH_TEST(VARIABLE, PROG-TO-CHECK-FOR, dnl TEST-PERFORMED-ON-FOUND_PROGRAM [, VALUE-IF-NOT-FOUND [, PATH]]) AC_DEFUN([AM_PATH_PROG_WITH_TEST], [ # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which # contains only /bin. Note that ksh looks also at the FPATH variable, # so we have to set that as well for the test. PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "$2", so it can be a program name with args. set dummy $2; ac_word=[$]2 AC_MSG_CHECKING([for $ac_word]) AC_CACHE_VAL([ac_cv_path_$1], [case "[$]$1" in [[\\/]]* | ?:[[\\/]]*) ac_cv_path_$1="[$]$1" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in ifelse([$5], , $PATH, [$5]); do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&AS_MESSAGE_LOG_FD if [$3]; then ac_cv_path_$1="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" dnl If no 4th arg is given, leave the cache variable unset, dnl so AC_PATH_PROGS will keep looking. ifelse([$4], , , [ test -z "[$]ac_cv_path_$1" && ac_cv_path_$1="$4" ])dnl ;; esac])dnl $1="$ac_cv_path_$1" if test ifelse([$4], , [-n "[$]$1"], ["[$]$1" != "$4"]); then AC_MSG_RESULT([$][$1]) else AC_MSG_RESULT([no]) fi AC_SUBST([$1])dnl ]) dar-2.6.8/m4/po.m40000644000175000017520000004503113617552277010442 00000000000000# po.m4 serial 24 (gettext-0.19) dnl Copyright (C) 1995-2014, 2016 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2003. AC_PREREQ([2.60]) dnl Checks for all prerequisites of the po subdirectory. AC_DEFUN([AM_PO_SUBDIRS], [ AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl AC_REQUIRE([AC_PROG_SED])dnl AC_REQUIRE([AM_NLS])dnl dnl Release version of the gettext macros. This is used to ensure that dnl the gettext macros and po/Makefile.in.in are in sync. AC_SUBST([GETTEXT_MACRO_VERSION], [0.19]) dnl Perform the following tests also if --disable-nls has been given, dnl because they are needed for "make dist" to work. dnl Search for GNU msgfmt in the PATH. dnl The first test excludes Solaris msgfmt and early GNU msgfmt versions. dnl The second test excludes FreeBSD msgfmt. AM_PATH_PROG_WITH_TEST(MSGFMT, msgfmt, [$ac_dir/$ac_word --statistics /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) AC_PATH_PROG([GMSGFMT], [gmsgfmt], [$MSGFMT]) dnl Test whether it is GNU msgfmt >= 0.15. changequote(,)dnl case `$MSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) MSGFMT_015=: ;; *) MSGFMT_015=$MSGFMT ;; esac changequote([,])dnl AC_SUBST([MSGFMT_015]) changequote(,)dnl case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;; *) GMSGFMT_015=$GMSGFMT ;; esac changequote([,])dnl AC_SUBST([GMSGFMT_015]) dnl Search for GNU xgettext 0.12 or newer in the PATH. dnl The first test excludes Solaris xgettext and early GNU xgettext versions. dnl The second test excludes FreeBSD xgettext. AM_PATH_PROG_WITH_TEST(XGETTEXT, xgettext, [$ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1 && (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi)], :) dnl Remove leftover from FreeBSD xgettext call. rm -f messages.po dnl Test whether it is GNU xgettext >= 0.15. changequote(,)dnl case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;; *) XGETTEXT_015=$XGETTEXT ;; esac changequote([,])dnl AC_SUBST([XGETTEXT_015]) dnl Search for GNU msgmerge 0.11 or newer in the PATH. AM_PATH_PROG_WITH_TEST(MSGMERGE, msgmerge, [$ac_dir/$ac_word --update -q /dev/null /dev/null >&]AS_MESSAGE_LOG_FD[ 2>&1], :) dnl Installation directories. dnl Autoconf >= 2.60 defines localedir. For older versions of autoconf, we dnl have to define it here, so that it can be used in po/Makefile. test -n "$localedir" || localedir='${datadir}/locale' AC_SUBST([localedir]) dnl Support for AM_XGETTEXT_OPTION. test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS= AC_SUBST([XGETTEXT_EXTRA_OPTIONS]) AC_CONFIG_COMMANDS([po-directories], [[ for ac_file in $CONFIG_FILES; do # Support "outfile[:infile[:infile...]]" case "$ac_file" in *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; esac # PO directories have a Makefile.in generated from Makefile.in.in. case "$ac_file" in */Makefile.in) # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix=/`echo "$ac_dir"|sed 's%^\./%%'` ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Treat a directory as a PO directory if and only if it has a # POTFILES.in file. This allows packages to have multiple PO # directories under different names or in different locations. if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then rm -f "$ac_dir/POTFILES" test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES" gt_tab=`printf '\t'` cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ${gt_tab}]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES" POMAKEFILEDEPS="POTFILES.in" # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend # on $ac_dir but don't depend on user-specified configuration # parameters. if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then # The LINGUAS file contains the set of available languages. if test -n "$OBSOLETE_ALL_LINGUAS"; then test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete" fi ALL_LINGUAS_=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"` # Hide the ALL_LINGUAS assignment from automake < 1.5. eval 'ALL_LINGUAS''=$ALL_LINGUAS_' POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS" else # The set of available languages was given in configure.in. # Hide the ALL_LINGUAS assignment from automake < 1.5. eval 'ALL_LINGUAS''=$OBSOLETE_ALL_LINGUAS' fi # Compute POFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po) # Compute UPDATEPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update) # Compute DUMMYPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop) # Compute GMOFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo) case "$ac_given_srcdir" in .) srcdirpre= ;; *) srcdirpre='$(srcdir)/' ;; esac POFILES= UPDATEPOFILES= DUMMYPOFILES= GMOFILES= for lang in $ALL_LINGUAS; do POFILES="$POFILES $srcdirpre$lang.po" UPDATEPOFILES="$UPDATEPOFILES $lang.po-update" DUMMYPOFILES="$DUMMYPOFILES $lang.nop" GMOFILES="$GMOFILES $srcdirpre$lang.gmo" done # CATALOGS depends on both $ac_dir and the user's LINGUAS # environment variable. INST_LINGUAS= if test -n "$ALL_LINGUAS"; then for presentlang in $ALL_LINGUAS; do useit=no if test "%UNSET%" != "$LINGUAS"; then desiredlanguages="$LINGUAS" else desiredlanguages="$ALL_LINGUAS" fi for desiredlang in $desiredlanguages; do # Use the presentlang catalog if desiredlang is # a. equal to presentlang, or # b. a variant of presentlang (because in this case, # presentlang can be used as a fallback for messages # which are not translated in the desiredlang catalog). case "$desiredlang" in "$presentlang"*) useit=yes;; esac done if test $useit = yes; then INST_LINGUAS="$INST_LINGUAS $presentlang" fi done fi CATALOGS= if test -n "$INST_LINGUAS"; then for lang in $INST_LINGUAS; do CATALOGS="$CATALOGS $lang.gmo" done fi test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile" sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile" for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do if test -f "$f"; then case "$f" in *.orig | *.bak | *~) ;; *) cat "$f" >> "$ac_dir/Makefile" ;; esac fi done fi ;; esac done]], [# Capture the value of obsolete ALL_LINGUAS because we need it to compute # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS. But hide it # from automake < 1.5. eval 'OBSOLETE_ALL_LINGUAS''="$ALL_LINGUAS"' # Capture the value of LINGUAS because we need it to compute CATALOGS. LINGUAS="${LINGUAS-%UNSET%}" ]) ]) dnl Postprocesses a Makefile in a directory containing PO files. AC_DEFUN([AM_POSTPROCESS_PO_MAKEFILE], [ # When this code is run, in config.status, two variables have already been # set: # - OBSOLETE_ALL_LINGUAS is the value of LINGUAS set in configure.in, # - LINGUAS is the value of the environment variable LINGUAS at configure # time. changequote(,)dnl # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix=/`echo "$ac_dir"|sed 's%^\./%%'` ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Find a way to echo strings without interpreting backslash. if test "X`(echo '\t') 2>/dev/null`" = 'X\t'; then gt_echo='echo' else if test "X`(printf '%s\n' '\t') 2>/dev/null`" = 'X\t'; then gt_echo='printf %s\n' else echo_func () { cat < "$ac_file.tmp" tab=`printf '\t'` if grep -l '@TCLCATALOGS@' "$ac_file" > /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/\..*$//' -e 'y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/'` cat >> "$ac_file.tmp" < /dev/null; then # Add dependencies that cannot be formulated as a simple suffix rule. for lang in $ALL_LINGUAS; do frobbedlang=`echo $lang | sed -e 's/_/-/g' -e 's/^sr-CS/sr-SP/' -e 's/@latin$/-Latn/' -e 's/@cyrillic$/-Cyrl/' -e 's/^sr-SP$/sr-SP-Latn/' -e 's/^uz-UZ$/uz-UZ-Latn/'` cat >> "$ac_file.tmp" <> "$ac_file.tmp" < * gettext.m4: New file, from gettext-0.19.8.1. * iconv.m4: New file, from gettext-0.19.8.1. * lib-ld.m4: New file, from gettext-0.19.8.1. * lib-link.m4: New file, from gettext-0.19.8.1. * lib-prefix.m4: New file, from gettext-0.19.8.1. * nls.m4: New file, from gettext-0.19.8.1. * po.m4: New file, from gettext-0.19.8.1. * progtest.m4: New file, from gettext-0.19.8.1. dar-2.6.8/m4/gettext.m40000644000175000017520000003676313617552277011524 00000000000000# gettext.m4 serial 68 (gettext-0.19.8) dnl Copyright (C) 1995-2014, 2016 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Authors: dnl Ulrich Drepper , 1995-2000. dnl Bruno Haible , 2000-2006, 2008-2010. dnl Macro to add for using GNU gettext. dnl Usage: AM_GNU_GETTEXT([INTLSYMBOL], [NEEDSYMBOL], [INTLDIR]). dnl INTLSYMBOL can be one of 'external', 'no-libtool', 'use-libtool'. The dnl default (if it is not specified or empty) is 'no-libtool'. dnl INTLSYMBOL should be 'external' for packages with no intl directory, dnl and 'no-libtool' or 'use-libtool' for packages with an intl directory. dnl If INTLSYMBOL is 'use-libtool', then a libtool library dnl $(top_builddir)/intl/libintl.la will be created (shared and/or static, dnl depending on --{enable,disable}-{shared,static} and on the presence of dnl AM-DISABLE-SHARED). If INTLSYMBOL is 'no-libtool', a static library dnl $(top_builddir)/intl/libintl.a will be created. dnl If NEEDSYMBOL is specified and is 'need-ngettext', then GNU gettext dnl implementations (in libc or libintl) without the ngettext() function dnl will be ignored. If NEEDSYMBOL is specified and is dnl 'need-formatstring-macros', then GNU gettext implementations that don't dnl support the ISO C 99 formatstring macros will be ignored. dnl INTLDIR is used to find the intl libraries. If empty, dnl the value '$(top_builddir)/intl/' is used. dnl dnl The result of the configuration is one of three cases: dnl 1) GNU gettext, as included in the intl subdirectory, will be compiled dnl and used. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 2) GNU gettext has been found in the system's C library. dnl Catalog format: GNU --> install in $(datadir) dnl Catalog extension: .mo after installation, .gmo in source tree dnl 3) No internationalization, always use English msgid. dnl Catalog format: none dnl Catalog extension: none dnl If INTLSYMBOL is 'external', only cases 2 and 3 can occur. dnl The use of .gmo is historical (it was needed to avoid overwriting the dnl GNU format catalogs when building on a platform with an X/Open gettext), dnl but we keep it in order not to force irrelevant filename changes on the dnl maintainers. dnl AC_DEFUN([AM_GNU_GETTEXT], [ dnl Argument checking. ifelse([$1], [], , [ifelse([$1], [external], , [ifelse([$1], [no-libtool], , [ifelse([$1], [use-libtool], , [errprint([ERROR: invalid first argument to AM_GNU_GETTEXT ])])])])]) ifelse(ifelse([$1], [], [old])[]ifelse([$1], [no-libtool], [old]), [old], [AC_DIAGNOSE([obsolete], [Use of AM_GNU_GETTEXT without [external] argument is deprecated.])]) ifelse([$2], [], , [ifelse([$2], [need-ngettext], , [ifelse([$2], [need-formatstring-macros], , [errprint([ERROR: invalid second argument to AM_GNU_GETTEXT ])])])]) define([gt_included_intl], ifelse([$1], [external], ifdef([AM_GNU_GETTEXT_][INTL_SUBDIR], [yes], [no]), [yes])) define([gt_libtool_suffix_prefix], ifelse([$1], [use-libtool], [l], [])) gt_NEEDS_INIT AM_GNU_GETTEXT_NEED([$2]) AC_REQUIRE([AM_PO_SUBDIRS])dnl ifelse(gt_included_intl, yes, [ AC_REQUIRE([AM_INTL_SUBDIR])dnl ]) dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Sometimes libintl requires libiconv, so first search for libiconv. dnl Ideally we would do this search only after the dnl if test "$USE_NLS" = "yes"; then dnl if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl tests. But if configure.in invokes AM_ICONV after AM_GNU_GETTEXT dnl the configure script would need to contain the same shell code dnl again, outside any 'if'. There are two solutions: dnl - Invoke AM_ICONV_LINKFLAGS_BODY here, outside any 'if'. dnl - Control the expansions in more detail using AC_PROVIDE_IFELSE. dnl Since AC_PROVIDE_IFELSE is only in autoconf >= 2.52 and not dnl documented, we avoid it. ifelse(gt_included_intl, yes, , [ AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) ]) dnl Sometimes, on Mac OS X, libintl requires linking with CoreFoundation. gt_INTL_MACOSX dnl Set USE_NLS. AC_REQUIRE([AM_NLS]) ifelse(gt_included_intl, yes, [ BUILD_INCLUDED_LIBINTL=no USE_INCLUDED_LIBINTL=no ]) LIBINTL= LTLIBINTL= POSUB= dnl Add a version number to the cache macros. case " $gt_needs " in *" need-formatstring-macros "*) gt_api_version=3 ;; *" need-ngettext "*) gt_api_version=2 ;; *) gt_api_version=1 ;; esac gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc" gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl" dnl If we use NLS figure out what method if test "$USE_NLS" = "yes"; then gt_use_preinstalled_gnugettext=no ifelse(gt_included_intl, yes, [ AC_MSG_CHECKING([whether included gettext is requested]) AC_ARG_WITH([included-gettext], [ --with-included-gettext use the GNU gettext library included here], nls_cv_force_use_gnu_gettext=$withval, nls_cv_force_use_gnu_gettext=no) AC_MSG_RESULT([$nls_cv_force_use_gnu_gettext]) nls_cv_use_gnu_gettext="$nls_cv_force_use_gnu_gettext" if test "$nls_cv_force_use_gnu_gettext" != "yes"; then ]) dnl User does not insist on using GNU NLS library. Figure out what dnl to use. If GNU gettext is available we use this. Else we have dnl to fall back to GNU NLS library. if test $gt_api_version -ge 3; then gt_revision_test_code=' #ifndef __GNU_GETTEXT_SUPPORTED_REVISION #define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1) #endif changequote(,)dnl typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1]; changequote([,])dnl ' else gt_revision_test_code= fi if test $gt_api_version -ge 2; then gt_expression_test_code=' + * ngettext ("", "", 0)' else gt_expression_test_code= fi AC_CACHE_CHECK([for GNU gettext in libc], [$gt_func_gnugettext_libc], [AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include #ifndef __GNU_GETTEXT_SUPPORTED_REVISION extern int _nl_msg_cat_cntr; extern int *_nl_domain_bindings; #define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_domain_bindings) #else #define __GNU_GETTEXT_SYMBOL_EXPRESSION 0 #endif $gt_revision_test_code ]], [[ bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION ]])], [eval "$gt_func_gnugettext_libc=yes"], [eval "$gt_func_gnugettext_libc=no"])]) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then dnl Sometimes libintl requires libiconv, so first search for libiconv. ifelse(gt_included_intl, yes, , [ AM_ICONV_LINK ]) dnl Search for libintl and define LIBINTL, LTLIBINTL and INCINTL dnl accordingly. Don't use AC_LIB_LINKFLAGS_BODY([intl],[iconv]) dnl because that would add "-liconv" to LIBINTL and LTLIBINTL dnl even if libiconv doesn't exist. AC_LIB_LINKFLAGS_BODY([intl]) AC_CACHE_CHECK([for GNU gettext in libintl], [$gt_func_gnugettext_libintl], [gt_save_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $INCINTL" gt_save_LIBS="$LIBS" LIBS="$LIBS $LIBINTL" dnl Now see whether libintl exists and does not depend on libiconv. AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include #ifndef __GNU_GETTEXT_SUPPORTED_REVISION extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); #define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_expand_alias ("")) #else #define __GNU_GETTEXT_SYMBOL_EXPRESSION 0 #endif $gt_revision_test_code ]], [[ bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION ]])], [eval "$gt_func_gnugettext_libintl=yes"], [eval "$gt_func_gnugettext_libintl=no"]) dnl Now see whether libintl exists and depends on libiconv. if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then LIBS="$LIBS $LIBICONV" AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include #ifndef __GNU_GETTEXT_SUPPORTED_REVISION extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); #define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_expand_alias ("")) #else #define __GNU_GETTEXT_SYMBOL_EXPRESSION 0 #endif $gt_revision_test_code ]], [[ bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION ]])], [LIBINTL="$LIBINTL $LIBICONV" LTLIBINTL="$LTLIBINTL $LTLIBICONV" eval "$gt_func_gnugettext_libintl=yes" ]) fi CPPFLAGS="$gt_save_CPPFLAGS" LIBS="$gt_save_LIBS"]) fi dnl If an already present or preinstalled GNU gettext() is found, dnl use it. But if this macro is used in GNU gettext, and GNU dnl gettext is already preinstalled in libintl, we update this dnl libintl. (Cf. the install rule in intl/Makefile.in.) if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \ || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \ && test "$PACKAGE" != gettext-runtime \ && test "$PACKAGE" != gettext-tools; }; then gt_use_preinstalled_gnugettext=yes else dnl Reset the values set by searching for libintl. LIBINTL= LTLIBINTL= INCINTL= fi ifelse(gt_included_intl, yes, [ if test "$gt_use_preinstalled_gnugettext" != "yes"; then dnl GNU gettext is not found in the C library. dnl Fall back on included GNU gettext library. nls_cv_use_gnu_gettext=yes fi fi if test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions used to generate GNU NLS library. BUILD_INCLUDED_LIBINTL=yes USE_INCLUDED_LIBINTL=yes LIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LIBICONV $LIBTHREAD" LTLIBINTL="ifelse([$3],[],\${top_builddir}/intl,[$3])/libintl.[]gt_libtool_suffix_prefix[]a $LTLIBICONV $LTLIBTHREAD" LIBS=`echo " $LIBS " | sed -e 's/ -lintl / /' -e 's/^ //' -e 's/ $//'` fi CATOBJEXT= if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Mark actions to use GNU gettext tools. CATOBJEXT=.gmo fi ]) if test -n "$INTL_MACOSX_LIBS"; then if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then dnl Some extra flags are needed during linking. LIBINTL="$LIBINTL $INTL_MACOSX_LIBS" LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS" fi fi if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then AC_DEFINE([ENABLE_NLS], [1], [Define to 1 if translation of program messages to the user's native language is requested.]) else USE_NLS=no fi fi AC_MSG_CHECKING([whether to use NLS]) AC_MSG_RESULT([$USE_NLS]) if test "$USE_NLS" = "yes"; then AC_MSG_CHECKING([where the gettext function comes from]) if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then gt_source="external libintl" else gt_source="libc" fi else gt_source="included intl directory" fi AC_MSG_RESULT([$gt_source]) fi if test "$USE_NLS" = "yes"; then if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then AC_MSG_CHECKING([how to link with libintl]) AC_MSG_RESULT([$LIBINTL]) AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCINTL]) fi dnl For backward compatibility. Some packages may be using this. AC_DEFINE([HAVE_GETTEXT], [1], [Define if the GNU gettext() function is already present or preinstalled.]) AC_DEFINE([HAVE_DCGETTEXT], [1], [Define if the GNU dcgettext() function is already present or preinstalled.]) fi dnl We need to process the po/ directory. POSUB=po fi ifelse(gt_included_intl, yes, [ dnl If this is used in GNU gettext we have to set BUILD_INCLUDED_LIBINTL dnl to 'yes' because some of the testsuite requires it. if test "$PACKAGE" = gettext-runtime || test "$PACKAGE" = gettext-tools; then BUILD_INCLUDED_LIBINTL=yes fi dnl Make all variables we use known to autoconf. AC_SUBST([BUILD_INCLUDED_LIBINTL]) AC_SUBST([USE_INCLUDED_LIBINTL]) AC_SUBST([CATOBJEXT]) dnl For backward compatibility. Some configure.ins may be using this. nls_cv_header_intl= nls_cv_header_libgt= dnl For backward compatibility. Some Makefiles may be using this. DATADIRNAME=share AC_SUBST([DATADIRNAME]) dnl For backward compatibility. Some Makefiles may be using this. INSTOBJEXT=.mo AC_SUBST([INSTOBJEXT]) dnl For backward compatibility. Some Makefiles may be using this. GENCAT=gencat AC_SUBST([GENCAT]) dnl For backward compatibility. Some Makefiles may be using this. INTLOBJS= if test "$USE_INCLUDED_LIBINTL" = yes; then INTLOBJS="\$(GETTOBJS)" fi AC_SUBST([INTLOBJS]) dnl Enable libtool support if the surrounding package wishes it. INTL_LIBTOOL_SUFFIX_PREFIX=gt_libtool_suffix_prefix AC_SUBST([INTL_LIBTOOL_SUFFIX_PREFIX]) ]) dnl For backward compatibility. Some Makefiles may be using this. INTLLIBS="$LIBINTL" AC_SUBST([INTLLIBS]) dnl Make all documented variables known to autoconf. AC_SUBST([LIBINTL]) AC_SUBST([LTLIBINTL]) AC_SUBST([POSUB]) ]) dnl gt_NEEDS_INIT ensures that the gt_needs variable is initialized. m4_define([gt_NEEDS_INIT], [ m4_divert_text([DEFAULTS], [gt_needs=]) m4_define([gt_NEEDS_INIT], []) ]) dnl Usage: AM_GNU_GETTEXT_NEED([NEEDSYMBOL]) AC_DEFUN([AM_GNU_GETTEXT_NEED], [ m4_divert_text([INIT_PREPARE], [gt_needs="$gt_needs $1"]) ]) dnl Usage: AM_GNU_GETTEXT_VERSION([gettext-version]) AC_DEFUN([AM_GNU_GETTEXT_VERSION], []) dnl Usage: AM_GNU_GETTEXT_REQUIRE_VERSION([gettext-version]) AC_DEFUN([AM_GNU_GETTEXT_REQUIRE_VERSION], []) dar-2.6.8/m4/lib-prefix.m40000644000175000017520000002042213617552277012062 00000000000000# lib-prefix.m4 serial 7 (gettext-0.18) dnl Copyright (C) 2001-2005, 2008-2016 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. dnl AC_LIB_ARG_WITH is synonymous to AC_ARG_WITH in autoconf-2.13, and dnl similar to AC_ARG_WITH in autoconf 2.52...2.57 except that is doesn't dnl require excessive bracketing. ifdef([AC_HELP_STRING], [AC_DEFUN([AC_LIB_ARG_WITH], [AC_ARG_WITH([$1],[[$2]],[$3],[$4])])], [AC_DEFUN([AC_][LIB_ARG_WITH], [AC_ARG_WITH([$1],[$2],[$3],[$4])])]) dnl AC_LIB_PREFIX adds to the CPPFLAGS and LDFLAGS the flags that are needed dnl to access previously installed libraries. The basic assumption is that dnl a user will want packages to use other packages he previously installed dnl with the same --prefix option. dnl This macro is not needed if only AC_LIB_LINKFLAGS is used to locate dnl libraries, but is otherwise very convenient. AC_DEFUN([AC_LIB_PREFIX], [ AC_BEFORE([$0], [AC_LIB_LINKFLAGS]) AC_REQUIRE([AC_PROG_CC]) AC_REQUIRE([AC_CANONICAL_HOST]) AC_REQUIRE([AC_LIB_PREPARE_MULTILIB]) AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) dnl By default, look in $includedir and $libdir. use_additional=yes AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) AC_LIB_ARG_WITH([lib-prefix], [ --with-lib-prefix[=DIR] search for libraries in DIR/include and DIR/lib --without-lib-prefix don't search for libraries in includedir and libdir], [ if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then AC_LIB_WITH_FINAL_PREFIX([ eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" ]) else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" fi fi ]) if test $use_additional = yes; then dnl Potentially add $additional_includedir to $CPPFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/include, dnl 2. if it's already present in $CPPFLAGS, dnl 3. if it's /usr/local/include and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_includedir" != "X/usr/include"; then haveit= for x in $CPPFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_includedir"; then dnl Really add $additional_includedir to $CPPFLAGS. CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }-I$additional_includedir" fi fi fi fi dnl Potentially add $additional_libdir to $LDFLAGS. dnl But don't add it dnl 1. if it's the standard /usr/lib, dnl 2. if it's already present in $LDFLAGS, dnl 3. if it's /usr/local/lib and we are using GCC on Linux, dnl 4. if it doesn't exist as a directory. if test "X$additional_libdir" != "X/usr/$acl_libdirstem"; then haveit= for x in $LDFLAGS; do AC_LIB_WITH_FINAL_PREFIX([eval x=\"$x\"]) if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem"; then if test -n "$GCC"; then case $host_os in linux*) haveit=yes;; esac fi fi if test -z "$haveit"; then if test -d "$additional_libdir"; then dnl Really add $additional_libdir to $LDFLAGS. LDFLAGS="${LDFLAGS}${LDFLAGS:+ }-L$additional_libdir" fi fi fi fi fi ]) dnl AC_LIB_PREPARE_PREFIX creates variables acl_final_prefix, dnl acl_final_exec_prefix, containing the values to which $prefix and dnl $exec_prefix will expand at the end of the configure script. AC_DEFUN([AC_LIB_PREPARE_PREFIX], [ dnl Unfortunately, prefix and exec_prefix get only finally determined dnl at the end of configure. if test "X$prefix" = "XNONE"; then acl_final_prefix="$ac_default_prefix" else acl_final_prefix="$prefix" fi if test "X$exec_prefix" = "XNONE"; then acl_final_exec_prefix='${prefix}' else acl_final_exec_prefix="$exec_prefix" fi acl_save_prefix="$prefix" prefix="$acl_final_prefix" eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" prefix="$acl_save_prefix" ]) dnl AC_LIB_WITH_FINAL_PREFIX([statement]) evaluates statement, with the dnl variables prefix and exec_prefix bound to the values they will have dnl at the end of the configure script. AC_DEFUN([AC_LIB_WITH_FINAL_PREFIX], [ acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" $1 exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" ]) dnl AC_LIB_PREPARE_MULTILIB creates dnl - a variable acl_libdirstem, containing the basename of the libdir, either dnl "lib" or "lib64" or "lib/64", dnl - a variable acl_libdirstem2, as a secondary possible value for dnl acl_libdirstem, either the same as acl_libdirstem or "lib/sparcv9" or dnl "lib/amd64". AC_DEFUN([AC_LIB_PREPARE_MULTILIB], [ dnl There is no formal standard regarding lib and lib64. dnl On glibc systems, the current practice is that on a system supporting dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under dnl $prefix/lib64 and 32-bit libraries go under $prefix/lib. We determine dnl the compiler's default mode by looking at the compiler's library search dnl path. If at least one of its elements ends in /lib64 or points to a dnl directory whose absolute pathname ends in /lib64, we assume a 64-bit ABI. dnl Otherwise we use the default, namely "lib". dnl On Solaris systems, the current practice is that on a system supporting dnl 32-bit and 64-bit instruction sets or ABIs, 64-bit libraries go under dnl $prefix/lib/64 (which is a symlink to either $prefix/lib/sparcv9 or dnl $prefix/lib/amd64) and 32-bit libraries go under $prefix/lib. AC_REQUIRE([AC_CANONICAL_HOST]) acl_libdirstem=lib acl_libdirstem2= case "$host_os" in solaris*) dnl See Solaris 10 Software Developer Collection > Solaris 64-bit Developer's Guide > The Development Environment dnl . dnl "Portable Makefiles should refer to any library directories using the 64 symbolic link." dnl But we want to recognize the sparcv9 or amd64 subdirectory also if the dnl symlink is missing, so we set acl_libdirstem2 too. AC_CACHE_CHECK([for 64-bit host], [gl_cv_solaris_64bit], [AC_EGREP_CPP([sixtyfour bits], [ #ifdef _LP64 sixtyfour bits #endif ], [gl_cv_solaris_64bit=yes], [gl_cv_solaris_64bit=no]) ]) if test $gl_cv_solaris_64bit = yes; then acl_libdirstem=lib/64 case "$host_cpu" in sparc*) acl_libdirstem2=lib/sparcv9 ;; i*86 | x86_64) acl_libdirstem2=lib/amd64 ;; esac fi ;; *) searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` if test -n "$searchpath"; then acl_save_IFS="${IFS= }"; IFS=":" for searchdir in $searchpath; do if test -d "$searchdir"; then case "$searchdir" in */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; */../ | */.. ) # Better ignore directories of this form. They are misleading. ;; *) searchdir=`cd "$searchdir" && pwd` case "$searchdir" in */lib64 ) acl_libdirstem=lib64 ;; esac ;; esac fi done IFS="$acl_save_IFS" fi ;; esac test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem" ]) dar-2.6.8/m4/iconv.m40000644000175000017520000002207213617552277011142 00000000000000# iconv.m4 serial 19 (gettext-0.18.2) dnl Copyright (C) 2000-2002, 2007-2014, 2016 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl From Bruno Haible. AC_DEFUN([AM_ICONV_LINKFLAGS_BODY], [ dnl Prerequisites of AC_LIB_LINKFLAGS_BODY. AC_REQUIRE([AC_LIB_PREPARE_PREFIX]) AC_REQUIRE([AC_LIB_RPATH]) dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_LIB_LINKFLAGS_BODY([iconv]) ]) AC_DEFUN([AM_ICONV_LINK], [ dnl Some systems have iconv in libc, some have it in libiconv (OSF/1 and dnl those with the standalone portable GNU libiconv installed). AC_REQUIRE([AC_CANONICAL_HOST]) dnl for cross-compiles dnl Search for libiconv and define LIBICONV, LTLIBICONV and INCICONV dnl accordingly. AC_REQUIRE([AM_ICONV_LINKFLAGS_BODY]) dnl Add $INCICONV to CPPFLAGS before performing the following checks, dnl because if the user has installed libiconv and not disabled its use dnl via --without-libiconv-prefix, he wants to use it. The first dnl AC_LINK_IFELSE will then fail, the second AC_LINK_IFELSE will succeed. am_save_CPPFLAGS="$CPPFLAGS" AC_LIB_APPENDTOVAR([CPPFLAGS], [$INCICONV]) AC_CACHE_CHECK([for iconv], [am_cv_func_iconv], [ am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include #include ]], [[iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);]])], [am_cv_func_iconv=yes]) if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[ #include #include ]], [[iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd);]])], [am_cv_lib_iconv=yes] [am_cv_func_iconv=yes]) LIBS="$am_save_LIBS" fi ]) if test "$am_cv_func_iconv" = yes; then AC_CACHE_CHECK([for working iconv], [am_cv_func_iconv_works], [ dnl This tests against bugs in AIX 5.1, AIX 6.1..7.1, HP-UX 11.11, dnl Solaris 10. am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi am_cv_func_iconv_works=no for ac_iconv_const in '' 'const'; do AC_RUN_IFELSE( [AC_LANG_PROGRAM( [[ #include #include #ifndef ICONV_CONST # define ICONV_CONST $ac_iconv_const #endif ]], [[int result = 0; /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 1; iconv_close (cd_utf8_to_88591); } } /* Test against Solaris 10 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646"); if (cd_ascii_to_88591 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\263"; char buf[10]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_ascii_to_88591, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 2; iconv_close (cd_ascii_to_88591); } } /* Test against AIX 6.1..7.1 bug: Buffer overrun. */ { iconv_t cd_88591_to_utf8 = iconv_open ("UTF-8", "ISO-8859-1"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\304"; static char buf[2] = { (char)0xDE, (char)0xAD }; ICONV_CONST char *inptr = input; size_t inbytesleft = 1; char *outptr = buf; size_t outbytesleft = 1; size_t res = iconv (cd_88591_to_utf8, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res != (size_t)(-1) || outptr - buf > 1 || buf[1] != (char)0xAD) result |= 4; iconv_close (cd_88591_to_utf8); } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) result |= 8; iconv_close (cd_88591_to_utf8); } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) result |= 16; return result; ]])], [am_cv_func_iconv_works=yes], , [case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac]) test "$am_cv_func_iconv_works" = no || break done LIBS="$am_save_LIBS" ]) case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then AC_DEFINE([HAVE_ICONV], [1], [Define if you have the iconv() function and it works.]) fi if test "$am_cv_lib_iconv" = yes; then AC_MSG_CHECKING([how to link with libiconv]) AC_MSG_RESULT([$LIBICONV]) else dnl If $LIBICONV didn't lead to a usable library, we don't need $INCICONV dnl either. CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi AC_SUBST([LIBICONV]) AC_SUBST([LTLIBICONV]) ]) dnl Define AM_ICONV using AC_DEFUN_ONCE for Autoconf >= 2.64, in order to dnl avoid warnings like dnl "warning: AC_REQUIRE: `AM_ICONV' was expanded before it was required". dnl This is tricky because of the way 'aclocal' is implemented: dnl - It requires defining an auxiliary macro whose name ends in AC_DEFUN. dnl Otherwise aclocal's initial scan pass would miss the macro definition. dnl - It requires a line break inside the AC_DEFUN_ONCE and AC_DEFUN expansions. dnl Otherwise aclocal would emit many "Use of uninitialized value $1" dnl warnings. m4_define([gl_iconv_AC_DEFUN], m4_version_prereq([2.64], [[AC_DEFUN_ONCE( [$1], [$2])]], [m4_ifdef([gl_00GNULIB], [[AC_DEFUN_ONCE( [$1], [$2])]], [[AC_DEFUN( [$1], [$2])]])])) gl_iconv_AC_DEFUN([AM_ICONV], [ AM_ICONV_LINK if test "$am_cv_func_iconv" = yes; then AC_MSG_CHECKING([for iconv declaration]) AC_CACHE_VAL([am_cv_proto_iconv], [ AC_COMPILE_IFELSE( [AC_LANG_PROGRAM( [[ #include #include extern #ifdef __cplusplus "C" #endif #if defined(__STDC__) || defined(_MSC_VER) || defined(__cplusplus) size_t iconv (iconv_t cd, char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft); #else size_t iconv(); #endif ]], [[]])], [am_cv_proto_iconv_arg1=""], [am_cv_proto_iconv_arg1="const"]) am_cv_proto_iconv="extern size_t iconv (iconv_t cd, $am_cv_proto_iconv_arg1 char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);"]) am_cv_proto_iconv=`echo "[$]am_cv_proto_iconv" | tr -s ' ' | sed -e 's/( /(/'` AC_MSG_RESULT([ $am_cv_proto_iconv]) AC_DEFINE_UNQUOTED([ICONV_CONST], [$am_cv_proto_iconv_arg1], [Define as const if the declaration of iconv() needs const.]) dnl Also substitute ICONV_CONST in the gnulib generated . m4_ifdef([gl_ICONV_H_DEFAULTS], [AC_REQUIRE([gl_ICONV_H_DEFAULTS]) if test -n "$am_cv_proto_iconv_arg1"; then ICONV_CONST="const" fi ]) fi ]) dar-2.6.8/Makefile.am0000444000175000017520000000224413617552300011256 00000000000000####################################################################### # dar - disk archive - a backup/restoration program # Copyright (C) 2002-2020 Denis Corbin # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # to contact the author : http://dar.linux.free.fr/email.html ####################################################################### SUBDIRS = man src doc misc po dist_noinst_DATA = INSTALL README THANKS TODO AUTHORS COPYING ChangeLog NEWS ABOUT-NLS ACLOCAL_AMFLAGS = -I m4 EXTRA_DIST = config.rpath m4/ChangeLog dar-2.6.8/THANKS0000644000175000017520000005060313617551261010146 00000000000000version 2.7.8 -------------- Thanks to Andy Capo for feedback and bug reports version 2.6.7 -------------- Thanks to John Bayh for feedback and bug report, thanks again to Vladimir Mosgalin for bug report. version 2.6.6 -------------- Thanks to Ivan Savcic for bug report. Thanks to Richard Gray for bug report and clear scenario to reproduce it. Thanks to Andy Capo for feedback. version 2.6.5 -------------- Thanks to Adam at sourceforge for bug report and clear scenario to reproduce it version 2.6.4 -------------- Thanks to Gulikosa for feedback and bug report and documentation fix/feedback version 2.6.3 -------------- Thanks to Graham Cobb for his hint about mathematical approximations of square and cube roots functions. Thanks to Andy for his feature improvement ideas and testings of binary delta feature (more precisely delta signatures). version 2.6.2 -------------- Thanks to Mike on dar-support mailing list for bug report. version 2.6.1 -------------- Thanks to David Niklas for contribution upgrading the /etc/darrc list of file types that do not worth compressing. Big thank to Gulikosa for feedback, bug reports, remarks and improvement, suggestions and discussions. Thanks to Ryan Schmidt the maintainer of dar in MacPorts for his feedback. Last but not least, thanks to Tobias Specht providing gdar software and support for years now and for having helped fixing the inconsistencies in libdar API that avoided compiling gdar against the previous release. version 2.6.0 -------------- Many Thanks to Gulikoza who helped troubleshooting around the librsync library which I found poorly documented. Thanks to "Lademann" at sourceforge for the proposed patch adapted and integrated in this release (--ignored-as-symlink new feature). A big thank to Zach Adams for feedback about security and encryption related possible improvements. Thanks to Tobias Specht for feedback and for providing gdar a graphical frontend to libdar. Note that gdar will not yet compile with dar 2.6.0, I will fix that with Tobias's help with a next dar/libdar 2.6.x releases in a few weeks. version 2.5.22 -------------- Thanks to Jeremy (jjfaith@sourcefroge) for bug report, Thanks to David Niklas for feedback version 2.5.21 -------------- Thanks to Mike on dar-support mailing-list for bug report and clear scenario to be able to reproduce it. version 2.5.19 -------------- Thanks to Tobias Specht for feedback version 2.5.18 -------------- Thanks again to Christoph Anton Mitterer for feedback. Thanks to Michele for feedback about EA restoration possible improvement. version 2.5.17 -------------- Thanks to Calestyo for bug report and feedbacks, thanks to Mateusz Konieczny for feedback. Thanks to Felix Leimbach for bug report. Thanks to Blandon Ray for reporting compilation problem under Solaris. Thanks to Andrea Vai for feedback. Thanks to David Kidd for bug report. version 2.5.16 -------------- Thanks to Steve Bergman for bug report and to Mihai Moldovan for his help troubleshooting this issue. version 2.5.15 -------------- Thanks to Marco Falda for bug report. version 2.5.14 -------------- Double thanks to Eric Junkermann for bug feedback. Thanks to Volker Wysk for feedback. Thanks to Tsukasa for feedback, bug report and help improving the dar_split command. version 2.5.13 -------------- Thanks to Volker Wysk for bug report, thanks to Neil Darlow for help in dar-support mailing list and to Ralph Slooten for point me to missing email I missed after having been unsubscribed from the list. version 2.5.12 -------------- Thanks once again to Andrew Loftus for help solving a bug, complex and difficult to reproduce. Thanks to Ron Lockwood-Childs for investigations, feedback and suggestion about dar behavior with negative dates. version 2.5.11 -------------- Thanks to dcb314 at sourceforge for bug report. Thanks to Jer Fort for bug report. Thanks to Andrew Loftus for bug report. version 2.5.10 -------------- Thanks to Jim Avera once again for bug report. version 2.5.9 -------------- Thanks to Gabor (aka HawajRambo2) for bug report and script to reproduce it and once again, thanks to Johan de Jong for bug report. Thanks to Giovanni Mariani at Rosa Linux ( http://wiki.rosalab.ru/en/ ) for bug report. version 2.5.8 -------------- Thanks to James F. for feedback and bug report. Thanks to hawajrambo2 for bug report and scenario to reproduce it. version 2.5.7 -------------- Thanks to Jim Avera for bug report with clear scenario to reproduce it version 2.5.6 -------------- Thanks to Johan de Jong for bug report. version 2.5.5 -------------- Thanks to Georg Sauthoff for feedback version 2.5.4 -------------- Thanks to Tobias Specht for bug report. Thanks to Jared Jennings for bug report and for the time saving troubleshooting done. Thanks to bulwersator for feedback. Thanks Ian Stakenvicius and Lars Wendler at Gentoo for feedback and patches. version 2.5.3 -------------- Thanks to Neil Darlow for feedback. Thanks to xloem at sourceforge for bugs report, troubleshooting and fixes, great job! Thanks to xloem again for the proposed patches. Thanks to Matteo Sasso for bug report. Thanks to Gulikoza for improvement proposal and patch. version 2.5.2 -------------- Thanks to Peter Colberg for feedback and bug report version 2.5.0 -------------- Thanks to Gour for help in testing dar_split with multi-volume dar archive on tapes. version 2.4.22 -------------- Thanks to Piviul for bug report. version 2.4.21 -------------- Thanks to Alexandre DENIS and Peter Schillinger for helping figuring out the problem met with dar and FUSE filesystems. Thanks for J. Roeleveld for feedback. Thanks to orogor and bulwersator at sourceforge for feedback. Thanks to Chan Wilson for bug report. version 2.4.20 -------------- Thanks to Mike P for bug report version 2.4.19 -------------- Thanks to Ivan Krylov for bug analysis and Gour for feedback. Thanks to Dave Serls for feedback and bug report. version 2.4.18 -------------- Thanks to Scott DeRousse for feedback and patch to have libdar working when libgcrypt is used in FIPS mode. version 2.4.17 -------------- version 2.4.16 -------------- Thanks to Georg Sauthoff for clear bug report and to Oleksandr Chumachenko for patches against bashisms in doc/sample scripts. version 2.4.15 -------------- Thanks to Ryan Schmidt for feedback and typo corrections. Thanks to Jer Fort for bug report and again to Neil Darlow for providing access to a FreeBSD system. version 2.4.14 -------------- Thanks to Neil Darlow again for help with FreeBSD systems. version 2.4.13 -------------- Thanks to Neil Darlow for providing access to his FreeBSD server in order to solve compilation issues using clang and compilation warning generated by libgcrypt included headers. Thanks to lopezibanez, alexander-a (aka pansa) and Mike Lenzen for bugs report. version 2.4.12 -------------- Thanks to Mike Lenzenn for bug report. Thanks to Misty de Meo, Fabian Stanke, Ryan Schmidt and Jan Gossmann for their feedback and patches about compilation and message display problems under Mac OS X. version 2.4.11 -------------- Thanks to Dan on dar-support mailing-list for feedback. Thanks to Aymeric Petit for feedback. Thanks to Alexey Dubinin for bug report. Last, Thanks to Thomas Jarosch for his patches generated using cppcheck tool. version 2.4.10 -------------- Thanks to Philippe Naudin for bug report and help. Thanks to Jim Avera for bug report. version 2.4.9 -------------- Thanks to "JBThiel" for bug report on dar-support mailing-list. Thanks to Kevin Wormington for feedback and tests. Thanks to Neil Darlow for feedbacks and patch about syntax error in configure script. Thanks to Aleksey Naumov for feedback, bug report and help. Thanks to Martin Zuther and Marcin Garski for bug report with provided test scenario to reproduce the bug. Thanks to Brian May at Debian and Graham Cobb for feedback and proposal for a modified behavior of dar_manager. version 2.4.8 -------------- Thanks to Mario for help investigating the reason of strange messages reported by libdar whe virtual memory is exhausted. Thanks to "Fredquerty2003" (aka Richard) and Jeremy Faith for bugs report, and to Mario Metzler for bug report and help resolving the reported bug. Last, thanks again to Philippe Naudin this time for improvement suggestion. version 2.4.7 -------------- Thanks to Vladimir Mosgalin (once again) for bug report and proposed fix. Thanks to Gilles Hamel and David Palmer for bug report. version 2.4.6 -------------- Thanks to Per Grahn for bug report and problem analysis. Thanks to Brian May I forgot so many times to underline his constribution by porting dar/libdar to Debian for that many years. version 2.4.5 -------------- Thanks to Piotr Domanski and Paolo Sala for bug report. Thanks to Andreas Wolff for feedback and bug fix. A Big Thank to Neil Darlow who provided a great support in solving a bug in libdar that only expressed on ArchLinux and dynamic binary and infinint mode when used at the same time. version 2.4.4 -------------- Thanks again to Jurgen Nagler-Ihlein for bug report. version 2.4.3 -------------- Thanks to Jurgen Nagler-Ihlein for bug report. Thanks to Stefan J. Betz, Philippe Naudin, Vladimir Mosgalin and Andrea Vai for help in troubleshooting dar's memory usage. version 2.4.2 -------------- Thanks to Philippe Naudin for feedback and tests. Thanks to Jurgen Nagler-Ihlein for reports and compilation fix. Thanks to Hauke Laging for feedback. version 2.4.1 -------------- Thanks again to Peter Ladgren for Swedish translation. Thanks to Giuseppe Borzi for feedback. Thanks to Jurgen Nagler-Ihlein for feedback and help in troubleshooting. version 2.4.0 -------------- Thanks to Wiebe Cazemier for feedback and patch for documentation. Thanks to Dave Vasilevsky for bug fix and feedback. Thanks to Erik Wasser for patch. Thanks to Sony Electronics Inc. Thanks to T. Leske for his proposal of using libgcrypt and patch against 2.3.x versions. Thanks to Vladimir Mosgalin for feedback and testing on Solaris and several other Unix systems. Thanks again to Peter Landgren for Swedish translation. version 2.3.12 -------------- Thanks to Angela Kahealani for feedback. version 2.3.11 -------------- Thanks to Douglas for bug report and feedback. Thanks to Jean-Christophe Arnu and Jonathan (via Sourceforge) for bug reports. version 2.3.10 -------------- Thanks to Patrick G. for feedback and help solving the bug [SF 2922417], and to Jan-Pascal van Best for his patch. Thanks to TONG for feedback and bug report. version 2.3.9 -------------- Thanks to Jan-Pascal van Best for bug report and patch. version 2.3.8 -------------- Thanks to smartyxt for bug report. A big thank you to Szymon Juraszczyk for double bug report and detailed scenario to reproduce them. version 2.3.7 -------------- Thanks to Don and Andy Shellam for feedback. B0mber for bug report. Thanks to timkoder for bug report about documentation. version 2.3.6 -------------- Thanks to Marco Atzeri for feedback. Thanks to lucatrv for feedback. Thanks once again to Jo (aka Jo - ex-bart) for bug feedback. Thanks to Dorphell for bug report. version 2.3.5 -------------- Thanks to Sonni Norlov for optimization patch. Thanks again to Peter Landgren for Swedish translation and bug report. Thanks also to David Fries for patch. version 2.3.4 -------------- Thanks once again to Jo (aka Jo - ex-bart) for feedback and clean scenario to reproduce an unexpected behavior. Thanks to Norman J Davis for bug and patch feedback. Johnathan Burchill for feedback. Thanks to Dwayne C. Litzenberger for feedback and patch. version 2.3.3 -------------- Thanks to Mike P. for bug feedback. Thanks to "Jo - ex-bart" for feedback and very useful scenario to reproduce the reported bugs. Thanks to "Marius" for feedback. Thanks to Veysel Ozer for feedback and patch. Thanks to tianmiao for feedback and bug report. Thanks to Oliver Winker for feedback and investigations. Thanks to Andrea Palazzi for patch. version 2.3.2 -------------- Thanks to Thomas Jacob for feedback and patch about bug in Native Language Support and also for feedback and investigations about the --recent bug. Thanks to Dave Vasilevsky for bug report and design considerations. Thanks to Richard Fish for feedback, bug report and patch. Thanks to Stephan for feedback about Guus Jansman's Midnight Commander's plugin for Dar. Thanks also to Guus Jansman for his work. Thanks to Piotr Oh for feedback. version 2.3.1 -------------- Thanks to Adam Fuksa for bug feedback. Thanks to Nick Alcock and to Wiebe Cazemier for feedback and patch. Thanks to Nebojsa for feedback. Thanks to Brian May (@Debian) and Alexei Sheplyakov for feedback. Thanks also to Marko Lavikainen for feedback. version 2.3.0 -------------- Thanks to Wesley Leggette again for his feedback and for his many proposed patchs for new features. Thanks to Dave Vasilevsky for his patch opening support for EA and file forks under MacOS X. Thanks to "ex-bart" for feedback and very interesting feature request. Thanks to Uwe Freese for feedback. Thanks to Aaron D. Marasco for feedback. And as always, thanks to Johnathan Burchill (author of kdar) for appreciated feedback remarks and proposals. Thanks to Michael Roitzsch for bug fix. A special Thanks goes to Michael Roitzsch (again him ;-)) Rodd Zurcher, Helmut Jarausch and markus Kamp (again him ;-)) for helping make this release stable thanks to their feedback and participation to the pre-release phase. version 2.2.7 ------------- Thanks to Markus Kamp for German Translation version 2.2.6 ------------- Thanks to Gerald Britton and Halfgaar (Wiebe Cazemier) for reporting bug problem. version 2.2.5 -------------- Thanks to "Steffe" for feedback and suggestion, to Andrey Yasniy and to Nick Alcock for feedback and patches. version 2.2.4 -------------- Thanks to Martin Ehmsen for feedback and investigations. Thanks to Roi Rodriguez Mendez & Mauro Silvosa Rivera for sharing their backup script. version 2.2.3 -------------- Thanks to Andrew(ziem) for feedback. Thanks to Bob Barry for his script giving a raw estimation of the memory requirement for dar. Thanks again to Wesley Leggette for feedback. Thanks to Jakub Holy for feedback and providing some helper scripts for dar. Thanks also to Brian May (Debian maintainer for dar) for transmitting me feedback and patches. Thanks to Martin Jost for patch and feedback about the API tutorial. Thanks to Jeremy C. Reed and to HansS713 for feedback. version 2.2.2 -------------- Thanks to Ryan Vietri for feedback. Thanks to Ralph Slooten for feedback, and tutorial spelling fix. Thanks to Juergen Menden for feedback and patch and to Peter Landgren for feedback and Swedish translation, thanks to Olive for feedback and patience ;-). Last, a Very Big Thank to Wesley Leggette again for his many feedbacks, patches and constructive remarks. version 2.2.1 -------------- Thanks to Steve Evans for feedback and solution about bug #30. A big thank to P-O Yliniemi (alias Peo) for feedback and for providing me the necessary mean to investigate the problem of the "stack overflow" (compiler problem). Thanks to Karl W. Weigel for reporting bug #31 and for his useful feedback. A Big Thanks to Thomas Bettler for feedback about bug #32 which was difficult to reproduce and fix. Thanks to Matthias Keller for reporting bug #33 and to Joe Harvell for reporting bug #34. version 2.2.0 -------------- Thanks to "Gandalf" for his feedback and the "cache" implementation to decrease context switches when dar is run through ssh for differential backup. Thanks to Lars H. Rohwedder for a very interesting discussion feedback and suggestion for the -aSI/-abinary options, thanks also to him for the informations provided and for the implementation suggestions about strong encryption. Thanks to "nedkonz" for his idea of the "on-fly" isolation. Thanks to Holger Hoffstaette for feedback, Thanks to Uwe Freese for feedback about dar_manager, and a big Thanks to Tristan Rhodes who helps me answering support requests. A special thanks to Matthew Caron for his sens of humor and interesting script. Thanks also to Brian Daniels for feedback about a compilation problem under x86_64 and to Todd Vierling for giving feedback and sending patches to make dar possible to compile on Interix system. Thanks to David Gervasoni for having translated to Italian Grzegorz Adam Hankiewicz's mini-howto. Thanks also to Christian Neumann for API design suggestions and once again to Johnathan Burchill (the author of KDAR) for API suggestions, ideas and feedback. version 2.1.5 ------------- Thanks to Peter Landgren and Johnathan Burchill for reporting bug #29. Thanks to Brian May and to Travis (Tilley ?) for feedback. version 2.1.4 ------------- Thanks to Marius Tomaschewski, for reporting investigating and fix proposal for bug #28. version 2.1.3 ------------- Jean-Louis Liagre for feedback and help for porting dar to solaris 9. Thanks to Peter Kupec for feedback and for having found bug #26. Thanks to Wesley Leggette for having found bug #25. version 2.1.2 ------------- Many thanks to Omer Enbar for his patience, perseverance, and the patch he proposed that fixed the bug #24. Thanks again to Arcady Genkin for reporting bug #23 version 2.1.1 ------------- Bill P. for feedback. Thanks to Arcady Genkin for reporting bug #21, #22, #23 version 2.1.0 ------------- Thanks to Shanon Denize for its suggestion about -wa option, to Lars Schmidt-Thieme for his suggestion about -as option. And, thanks to "nedkonz" on the historical forum, for feedback, and Nick Alcock for his "tiny" patch and feedback about GNU Coding Standards. Thanks to Tami King for feedback and help. Thanks to Patrick Drechsler for typo error feedback version 2.0.4 -------------- Thanks to Matthew Valites and "nedkonz" for feedback about large file support. version 2.0.3 -------------- Thanks to "Gandalf" on the historical forum for having reported bug #20 version 2.0.2 -------------- Thanks to Ruslan Hristov for reporting bug #18 (Sourceforge bug 850279), and thanks to Joshua Neal for reporting bug #19 (Sourceforge bug 851320). version 2.0.1 -------------- Thanks to David Baldwin for reporting bug #17 (Sourceforge bug 837735). version 2.0.0 -------------- Thanks to Jules for its suggestion about altern memory allocation when a lot of small blocks are asked and are only released at the end of the program, this is the base of the "special allocation". Thanks to Adam Sherman for presenting me Parchive. Thanks to Johnathan Burchill (author of kdar) for his feedback about the libdar API, and help in fixing bugs in pre-release 1, and in release 1.3.0 bug #15. Thanks to Sergei Suyev for feedback. Thanks to Grzegorz Adam Hankiewicz for its mini-howto. Thanks to Peter Kupec for having found and reported bug #16, and for his attention to documentation coherence, and his intensive testing. version 1.3.0 -------------- Thanks to David Rose for having found, identified an proposed a patch for bug #12. Memory leakages in general are difficult to find, thus congratulations ! Thanks to Oliver Brendel for the great help and patience to my many asked tests to solve the bug #13, bug I could not reproduce. Thanks to John Reynolds for feedback. Thanks also to Rgbtxus for the --flat option need expression. Thanks to Sergio Borghese for reporting bug #11. Nicola Larosa for his suggestions about UPX (which will be considered for dar 2.x.x series), Dietrich Rothe for the --nodump feature request, and many others for feedback and encouragements. version 1.2.1 --------------- Thanks to Axel Kohlmeyer for his patch about RPMS, Dietrich Rothe for its patch on compression level, Brian May for his patches to comply with Debian distro, Ulrich Hagen D.A.M Revok, Jahn Wells, Juhana Sadeharju, for their feedback and John Little for compiling the FAQ and man pages on the web. version 1.2.0 --------------- Thanks to Chris Martin, Jerome Zago, Tim Doerzbacher, Sebastien Robart, Kurt Palmer, Bernd Storsberg, Egor Cheshkov, Richard Bratt for feedback and suggestions. I must add a special Thanks to Chris Martin who realized a simple and efficient tool that generates the usage C++ code from files written in XML syntax, making automatic word wrapping and indentation. This power tool named "dar-help" is licensed as part of DAR, but has been totally designed and written by Chris. version 1.1.0 --------------- Thanks to Moritz Franosch for intensive testing, for finding several bugs and proposing patches, to John Little for testing dar and for providing a forum area, to Ulrich Hagen, Francois Botha, Tobias Jahn for feedback, and to Andreas Gruenbacher for its Extended Attributes software, and its useful answers to my questions. Thanks to the many other people that just encouraged me by their remarks and compliments. dar-2.6.8/TODO0000644000175000017520000000066313606427316007725 00000000000000TODO (order is not significant) =========== List of things TODO is now kept at sourceforge http://sourceforge.net/tracker/?group_id=65612&atid=511615 There you can monitor the feature(s) you are interested in and get automatic feedback when they are under work, under test, completely implementation or released. Bugs report should still go to the Bug tracker and support request to the dar-support mailing-list dar-2.6.8/ABOUT-NLS0000644000175000017520000026747413617552277010512 000000000000001 Notes on the Free Translation Project *************************************** Free software is going international! The Free Translation Project is a way to get maintainers of free software, translators, and users all together, so that free software will gradually become able to speak many languages. A few packages already provide translations for their messages. If you found this 'ABOUT-NLS' file inside a distribution, you may assume that the distributed package does use GNU 'gettext' internally, itself available at your nearest GNU archive site. But you do _not_ need to install GNU 'gettext' prior to configuring, installing or using this package with messages translated. Installers will find here some useful hints. These notes also explain how users should proceed for getting the programs to use the available translations. They tell how people wanting to contribute and work on translations can contact the appropriate team. 1.1 INSTALL Matters =================== Some packages are "localizable" when properly installed; the programs they contain can be made to speak your own native language. Most such packages use GNU 'gettext'. Other packages have their own ways to internationalization, predating GNU 'gettext'. By default, this package will be installed to allow translation of messages. It will automatically detect whether the system already provides the GNU 'gettext' functions. Installers may use special options at configuration time for changing the default behaviour. The command: ./configure --disable-nls will _totally_ disable translation of messages. When you already have GNU 'gettext' installed on your system and run configure without an option for your new package, 'configure' will probably detect the previously built and installed 'libintl' library and will decide to use it. If not, you may have to to use the '--with-libintl-prefix' option to tell 'configure' where to look for it. Internationalized packages usually have many 'po/LL.po' files, where LL gives an ISO 639 two-letter code identifying the language. Unless translations have been forbidden at 'configure' time by using the '--disable-nls' switch, all available translations are installed together with the package. However, the environment variable 'LINGUAS' may be set, prior to configuration, to limit the installed set. 'LINGUAS' should then contain a space separated list of two-letter codes, stating which languages are allowed. 1.2 Using This Package ====================== As a user, if your language has been installed for this package, you only have to set the 'LANG' environment variable to the appropriate 'LL_CC' combination. If you happen to have the 'LC_ALL' or some other 'LC_xxx' environment variables set, you should unset them before setting 'LANG', otherwise the setting of 'LANG' will not have the desired effect. Here 'LL' is an ISO 639 two-letter language code, and 'CC' is an ISO 3166 two-letter country code. For example, let's suppose that you speak German and live in Germany. At the shell prompt, merely execute 'setenv LANG de_DE' (in 'csh'), 'export LANG; LANG=de_DE' (in 'sh') or 'export LANG=de_DE' (in 'bash'). This can be done from your '.login' or '.profile' file, once and for all. You might think that the country code specification is redundant. But in fact, some languages have dialects in different countries. For example, 'de_AT' is used for Austria, and 'pt_BR' for Brazil. The country code serves to distinguish the dialects. The locale naming convention of 'LL_CC', with 'LL' denoting the language and 'CC' denoting the country, is the one use on systems based on GNU libc. On other systems, some variations of this scheme are used, such as 'LL' or 'LL_CC.ENCODING'. You can get the list of locales supported by your system for your language by running the command 'locale -a | grep '^LL''. Not all programs have translations for all languages. By default, an English message is shown in place of a nonexistent translation. If you understand other languages, you can set up a priority list of languages. This is done through a different environment variable, called 'LANGUAGE'. GNU 'gettext' gives preference to 'LANGUAGE' over 'LANG' for the purpose of message handling, but you still need to have 'LANG' set to the primary language; this is required by other parts of the system libraries. For example, some Swedish users who would rather read translations in German than English for when Swedish is not available, set 'LANGUAGE' to 'sv:de' while leaving 'LANG' to 'sv_SE'. Special advice for Norwegian users: The language code for Norwegian bokma*l changed from 'no' to 'nb' recently (in 2003). During the transition period, while some message catalogs for this language are installed under 'nb' and some older ones under 'no', it's recommended for Norwegian users to set 'LANGUAGE' to 'nb:no' so that both newer and older translations are used. In the 'LANGUAGE' environment variable, but not in the 'LANG' environment variable, 'LL_CC' combinations can be abbreviated as 'LL' to denote the language's main dialect. For example, 'de' is equivalent to 'de_DE' (German as spoken in Germany), and 'pt' to 'pt_PT' (Portuguese as spoken in Portugal) in this context. 1.3 Translating Teams ===================== For the Free Translation Project to be a success, we need interested people who like their own language and write it well, and who are also able to synergize with other translators speaking the same language. Each translation team has its own mailing list. The up-to-date list of teams can be found at the Free Translation Project's homepage, 'http://translationproject.org/', in the "Teams" area. If you'd like to volunteer to _work_ at translating messages, you should become a member of the translating team for your own language. The subscribing address is _not_ the same as the list itself, it has '-request' appended. For example, speakers of Swedish can send a message to 'sv-request@li.org', having this message body: subscribe Keep in mind that team members are expected to participate _actively_ in translations, or at solving translational difficulties, rather than merely lurking around. If your team does not exist yet and you want to start one, or if you are unsure about what to do or how to get started, please write to 'coordinator@translationproject.org' to reach the coordinator for all translator teams. The English team is special. It works at improving and uniformizing the terminology in use. Proven linguistic skills are praised more than programming skills, here. 1.4 Available Packages ====================== Languages are not equally supported in all packages. The following matrix shows the current state of internationalization, as of Jun 2014. The matrix shows, in regard of each package, for which languages PO files have been submitted to translation coordination, with a translation percentage of at least 50%. Ready PO files af am an ar as ast az be bg bn bn_IN bs ca crh cs +---------------------------------------------------+ a2ps | [] [] [] | aegis | | anubis | | aspell | [] [] [] | bash | [] [] [] | bfd | | binutils | [] | bison | | bison-runtime | [] | buzztrax | [] | ccd2cue | | ccide | | cflow | | clisp | | coreutils | [] [] | cpio | | cppi | | cpplib | [] | cryptsetup | [] | datamash | | denemo | [] [] | dfarc | [] | dialog | [] [] [] | dico | | diffutils | [] | dink | [] | direvent | | doodle | [] | dos2unix | | dos2unix-man | | e2fsprogs | [] [] | enscript | [] | exif | [] | fetchmail | [] [] | findutils | [] | flex | [] | freedink | [] [] | fusionforge | | gas | | gawk | [] | gcal | [] | gcc | | gdbm | | gettext-examples | [] [] [] [] [] | gettext-runtime | [] [] [] | gettext-tools | [] [] | gjay | | glunarclock | [] [] [] | gnubiff | [] | gnubik | [] | gnucash | () () [] | gnuchess | | gnulib | [] | gnunet | | gnunet-gtk | | gold | | gphoto2 | [] | gprof | [] | gramadoir | | grep | [] [] [] | grub | [] | gsasl | | gss | | gst-plugins-bad | [] [] | gst-plugins-base | [] [] [] | gst-plugins-good | [] [] [] | gst-plugins-ugly | [] [] [] | gstreamer | [] [] [] [] | gtick | [] | gtkam | [] [] | gtkspell | [] [] [] [] [] | guix | | guix-packages | | gutenprint | [] | hello | [] | help2man | | help2man-texi | | hylafax | | idutils | | iso_15924 | [] | iso_3166 | [] [] [] [] [] [] [] [] [] [] | iso_3166_2 | | iso_4217 | [] | iso_639 | [] [] [] [] [] [] [] [] [] | iso_639_3 | [] [] | iso_639_5 | | jwhois | | kbd | [] | klavaro | [] [] [] [] [] | ld | [] | leafpad | [] [] [] [] | libc | [] [] [] | libexif | () | libextractor | | libgnutls | [] | libgphoto2 | [] | libgphoto2_port | [] | libgsasl | | libiconv | [] [] | libidn | [] | liferea | [] [] [] [] | lilypond | [] [] | lordsawar | [] | lprng | | lynx | [] [] | m4 | [] | mailfromd | | mailutils | | make | [] | man-db | [] [] | man-db-manpages | | midi-instruments | [] [] [] | minicom | [] | mkisofs | [] | myserver | [] | nano | [] [] [] | opcodes | | parted | [] | pies | | pnmixer | | popt | [] | procps-ng | | procps-ng-man | | psmisc | [] | pspp | [] | pushover | [] | pwdutils | | pyspread | | radius | [] | recode | [] [] [] | recutils | | rpm | | rush | | sarg | | sed | [] [] [] [] | sharutils | [] | shishi | | skribilo | | solfege | [] [] | solfege-manual | | spotmachine | | sudo | [] [] | sudoers | [] [] | sysstat | [] | tar | [] [] [] | texinfo | [] [] | texinfo_document | [] [] | tigervnc | [] | tin | | tin-man | | tracgoogleappsa... | | trader | | util-linux | [] | ve | | vice | | vmm | | vorbis-tools | [] | wastesedge | | wcd | | wcd-man | | wdiff | [] [] | wget | [] | wyslij-po | | xboard | | xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] | +---------------------------------------------------+ af am an ar as ast az be bg bn bn_IN bs ca crh cs 4 0 2 5 3 11 0 8 25 3 3 1 55 4 74 da de el en en_GB en_ZA eo es et eu fa fi fr +--------------------------------------------------+ a2ps | [] [] [] [] [] [] [] [] [] | aegis | [] [] [] [] | anubis | [] [] [] [] [] | aspell | [] [] [] [] [] [] [] | bash | [] [] [] | bfd | [] [] [] [] | binutils | [] [] [] | bison | [] [] [] [] [] [] [] [] | bison-runtime | [] [] [] [] [] [] [] [] | buzztrax | [] [] [] [] | ccd2cue | [] [] [] [] | ccide | [] [] [] [] [] [] | cflow | [] [] [] [] [] | clisp | [] [] [] [] [] | coreutils | [] [] [] [] [] | cpio | [] [] [] [] [] | cppi | [] [] [] [] [] | cpplib | [] [] [] [] [] [] | cryptsetup | [] [] [] [] [] | datamash | [] [] [] [] | denemo | [] | dfarc | [] [] [] [] [] [] | dialog | [] [] [] [] [] [] [] [] [] | dico | [] [] [] [] | diffutils | [] [] [] [] [] [] | dink | [] [] [] [] [] [] | direvent | [] [] [] [] | doodle | [] [] [] [] | dos2unix | [] [] [] [] [] | dos2unix-man | [] [] [] | e2fsprogs | [] [] [] [] [] | enscript | [] [] [] [] [] [] | exif | [] [] [] [] [] [] | fetchmail | [] () [] [] [] [] [] | findutils | [] [] [] [] [] [] [] [] | flex | [] [] [] [] [] [] | freedink | [] [] [] [] [] [] [] [] | fusionforge | [] [] [] | gas | [] [] [] | gawk | [] [] [] [] [] | gcal | [] [] [] [] | gcc | [] | gdbm | [] [] [] [] [] | gettext-examples | [] [] [] [] [] [] [] | gettext-runtime | [] [] [] [] [] [] | gettext-tools | [] [] [] [] [] | gjay | [] [] [] [] | glunarclock | [] [] [] [] [] | gnubiff | () [] [] () | gnubik | [] [] [] [] [] | gnucash | [] () () () () () () | gnuchess | [] [] [] [] | gnulib | [] [] [] [] [] [] [] | gnunet | [] | gnunet-gtk | [] | gold | [] [] [] | gphoto2 | [] () [] [] | gprof | [] [] [] [] [] [] | gramadoir | [] [] [] [] [] | grep | [] [] [] [] [] [] [] | grub | [] [] [] [] [] | gsasl | [] [] [] [] [] | gss | [] [] [] [] [] | gst-plugins-bad | [] [] [] | gst-plugins-base | [] [] [] [] [] [] | gst-plugins-good | [] [] [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] [] [] [] | gstreamer | [] [] [] [] [] [] [] | gtick | [] () [] [] [] | gtkam | [] () [] [] [] [] | gtkspell | [] [] [] [] [] [] [] [] | guix | [] [] | guix-packages | | gutenprint | [] [] [] [] | hello | [] [] [] [] [] [] [] [] | help2man | [] [] [] [] [] [] [] | help2man-texi | [] [] [] | hylafax | [] [] | idutils | [] [] [] [] [] | iso_15924 | [] () [] [] () [] () | iso_3166 | [] () [] [] [] [] () [] () | iso_3166_2 | [] () () () | iso_4217 | [] () [] [] [] () [] () | iso_639 | [] () [] [] () [] () | iso_639_3 | () () () | iso_639_5 | () () () | jwhois | [] [] [] [] [] | kbd | [] [] [] [] [] [] | klavaro | [] [] [] [] [] [] [] | ld | [] [] [] [] | leafpad | [] [] [] [] [] [] [] [] | libc | [] [] [] [] [] | libexif | [] [] () [] [] | libextractor | [] | libgnutls | [] [] [] [] | libgphoto2 | [] () [] | libgphoto2_port | [] () [] [] [] [] | libgsasl | [] [] [] [] [] | libiconv | [] [] [] [] [] [] [] | libidn | [] [] [] [] [] | liferea | [] () [] [] [] [] [] | lilypond | [] [] [] [] [] [] | lordsawar | [] [] | lprng | | lynx | [] [] [] [] [] [] | m4 | [] [] [] [] [] [] | mailfromd | [] | mailutils | [] [] [] [] | make | [] [] [] [] [] | man-db | [] [] [] [] | man-db-manpages | [] [] | midi-instruments | [] [] [] [] [] [] [] [] [] | minicom | [] [] [] [] [] | mkisofs | [] [] [] | myserver | [] [] [] [] | nano | [] [] [] [] [] [] [] | opcodes | [] [] [] [] [] | parted | [] [] [] | pies | [] | pnmixer | [] [] | popt | [] [] [] [] [] [] | procps-ng | [] [] | procps-ng-man | [] [] | psmisc | [] [] [] [] [] [] [] | pspp | [] [] [] | pushover | () [] [] [] | pwdutils | [] [] [] | pyspread | [] [] [] | radius | [] [] | recode | [] [] [] [] [] [] [] | recutils | [] [] [] [] | rpm | [] [] [] [] [] | rush | [] [] [] | sarg | [] [] | sed | [] [] [] [] [] [] [] [] | sharutils | [] [] [] [] | shishi | [] [] [] | skribilo | [] [] [] | solfege | [] [] [] [] [] [] [] [] | solfege-manual | [] [] [] [] [] | spotmachine | [] [] [] [] [] | sudo | [] [] [] [] [] [] | sudoers | [] [] [] [] [] [] | sysstat | [] [] [] [] [] [] | tar | [] [] [] [] [] [] [] | texinfo | [] [] [] [] [] | texinfo_document | [] [] [] [] | tigervnc | [] [] [] [] [] [] | tin | [] [] [] [] | tin-man | [] | tracgoogleappsa... | [] [] [] [] [] | trader | [] [] [] [] [] [] | util-linux | [] [] [] [] | ve | [] [] [] [] [] | vice | () () () | vmm | [] [] | vorbis-tools | [] [] [] [] | wastesedge | [] | wcd | [] [] [] [] | wcd-man | [] | wdiff | [] [] [] [] [] [] [] | wget | [] [] [] [] [] [] | wyslij-po | [] [] [] [] | xboard | [] [] [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] [] [] [] [] | +--------------------------------------------------+ da de el en en_GB en_ZA eo es et eu fa fi fr 119 131 32 1 6 0 94 95 22 13 4 102 139 ga gd gl gu he hi hr hu hy ia id is it ja ka kk +-------------------------------------------------+ a2ps | [] [] [] [] | aegis | [] | anubis | [] [] [] [] | aspell | [] [] [] [] [] | bash | [] [] [] [] | bfd | [] [] | binutils | [] [] [] | bison | [] | bison-runtime | [] [] [] [] [] [] [] [] | buzztrax | | ccd2cue | [] | ccide | [] [] | cflow | [] [] [] | clisp | | coreutils | [] [] | cpio | [] [] [] [] [] [] | cppi | [] [] [] [] [] | cpplib | [] [] | cryptsetup | [] | datamash | | denemo | [] | dfarc | [] [] [] | dialog | [] [] [] [] [] [] [] [] [] [] | dico | | diffutils | [] [] [] [] | dink | [] | direvent | [] | doodle | [] [] | dos2unix | [] [] | dos2unix-man | | e2fsprogs | [] [] | enscript | [] [] [] | exif | [] [] [] [] [] [] | fetchmail | [] [] [] | findutils | [] [] [] [] [] [] [] | flex | [] | freedink | [] [] [] [] | fusionforge | | gas | [] | gawk | [] () [] | gcal | | gcc | | gdbm | | gettext-examples | [] [] [] [] [] [] [] | gettext-runtime | [] [] [] [] [] [] [] | gettext-tools | [] [] [] | gjay | [] | glunarclock | [] [] [] [] [] [] | gnubiff | [] [] () | gnubik | [] [] [] | gnucash | () () () () () | gnuchess | | gnulib | [] [] [] [] [] | gnunet | | gnunet-gtk | | gold | [] [] | gphoto2 | [] [] [] [] | gprof | [] [] [] [] | gramadoir | [] [] [] | grep | [] [] [] [] [] [] [] | grub | [] [] [] | gsasl | [] [] [] [] [] | gss | [] [] [] [] [] | gst-plugins-bad | [] [] [] | gst-plugins-base | [] [] [] [] | gst-plugins-good | [] [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] [] | gstreamer | [] [] [] [] [] | gtick | [] [] [] [] [] | gtkam | [] [] [] [] [] | gtkspell | [] [] [] [] [] [] [] [] [] [] | guix | | guix-packages | | gutenprint | [] [] [] | hello | [] [] [] [] [] | help2man | [] [] [] | help2man-texi | | hylafax | [] | idutils | [] [] | iso_15924 | [] [] [] [] [] [] | iso_3166 | [] [] [] [] [] [] [] [] [] [] [] [] [] | iso_3166_2 | [] [] | iso_4217 | [] [] [] [] [] [] | iso_639 | [] [] [] [] [] [] [] [] [] | iso_639_3 | [] [] | iso_639_5 | | jwhois | [] [] [] [] | kbd | [] [] [] | klavaro | [] [] [] [] [] | ld | [] [] [] [] | leafpad | [] [] [] [] [] [] [] () | libc | [] [] [] [] [] | libexif | [] | libextractor | | libgnutls | [] | libgphoto2 | [] [] | libgphoto2_port | [] [] | libgsasl | [] [] [] [] | libiconv | [] [] [] [] [] [] [] | libidn | [] [] [] [] | liferea | [] [] [] [] [] | lilypond | [] | lordsawar | | lprng | [] | lynx | [] [] [] [] | m4 | [] [] [] [] [] | mailfromd | | mailutils | | make | [] [] [] [] | man-db | [] [] | man-db-manpages | [] [] | midi-instruments | [] [] [] [] [] [] [] [] [] | minicom | [] [] [] | mkisofs | [] [] | myserver | [] | nano | [] [] [] [] [] [] | opcodes | [] [] [] | parted | [] [] [] [] [] | pies | | pnmixer | [] [] | popt | [] [] [] [] [] [] [] [] [] [] | procps-ng | | procps-ng-man | | psmisc | [] [] [] [] | pspp | [] [] | pushover | [] | pwdutils | [] | pyspread | | radius | [] | recode | [] [] [] [] [] [] [] | recutils | | rpm | [] | rush | [] | sarg | | sed | [] [] [] [] [] [] [] | sharutils | | shishi | | skribilo | [] | solfege | [] [] | solfege-manual | | spotmachine | | sudo | [] [] [] [] | sudoers | [] [] [] | sysstat | [] [] [] [] | tar | [] [] [] [] [] [] | texinfo | [] [] [] | texinfo_document | [] [] [] | tigervnc | | tin | | tin-man | | tracgoogleappsa... | [] [] [] [] | trader | [] [] | util-linux | [] | ve | [] | vice | () () | vmm | | vorbis-tools | [] [] | wastesedge | [] | wcd | | wcd-man | | wdiff | [] [] [] | wget | [] [] [] [] | wyslij-po | [] [] [] | xboard | | xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] [] [] [] | +-------------------------------------------------+ ga gd gl gu he hi hr hu hy ia id is it ja ka kk 35 2 47 4 8 2 60 71 2 6 81 11 87 57 0 3 kn ko ku ky lg lt lv mk ml mn mr ms mt nb ne nl +--------------------------------------------------+ a2ps | [] [] | aegis | [] | anubis | [] [] [] | aspell | [] [] | bash | [] [] | bfd | | binutils | | bison | [] | bison-runtime | [] [] [] [] [] [] | buzztrax | | ccd2cue | | ccide | [] [] | cflow | [] | clisp | [] | coreutils | [] [] | cpio | [] | cppi | | cpplib | [] | cryptsetup | [] | datamash | [] [] | denemo | | dfarc | [] [] | dialog | [] [] [] [] [] [] | dico | | diffutils | [] [] [] | dink | [] | direvent | [] | doodle | [] | dos2unix | [] [] | dos2unix-man | [] | e2fsprogs | [] | enscript | [] | exif | [] [] [] | fetchmail | [] | findutils | [] [] | flex | [] | freedink | [] [] | fusionforge | | gas | | gawk | [] | gcal | | gcc | | gdbm | | gettext-examples | [] [] [] [] [] [] | gettext-runtime | [] [] [] | gettext-tools | [] | gjay | | glunarclock | [] [] | gnubiff | [] | gnubik | [] [] | gnucash | () () () () () () () [] | gnuchess | [] [] | gnulib | [] | gnunet | | gnunet-gtk | | gold | | gphoto2 | [] | gprof | [] [] | gramadoir | [] | grep | [] [] | grub | [] [] [] | gsasl | [] | gss | | gst-plugins-bad | [] [] [] | gst-plugins-base | [] [] [] | gst-plugins-good | [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] | gstreamer | [] [] [] | gtick | [] | gtkam | [] [] | gtkspell | [] [] [] [] [] [] [] | guix | | guix-packages | | gutenprint | [] | hello | [] [] [] | help2man | [] | help2man-texi | | hylafax | [] | idutils | [] | iso_15924 | () [] [] | iso_3166 | [] [] [] () [] [] [] [] [] [] | iso_3166_2 | () [] | iso_4217 | () [] [] [] | iso_639 | [] [] () [] [] [] [] | iso_639_3 | [] () [] | iso_639_5 | () | jwhois | [] [] | kbd | [] | klavaro | [] [] | ld | | leafpad | [] [] [] [] [] | libc | [] [] | libexif | [] | libextractor | [] | libgnutls | [] [] | libgphoto2 | [] | libgphoto2_port | [] | libgsasl | [] | libiconv | [] [] | libidn | [] | liferea | [] [] [] | lilypond | [] | lordsawar | | lprng | | lynx | [] | m4 | [] | mailfromd | | mailutils | | make | [] [] | man-db | [] | man-db-manpages | [] | midi-instruments | [] [] [] [] [] [] [] | minicom | [] | mkisofs | [] | myserver | | nano | [] [] [] | opcodes | [] | parted | [] [] | pies | | pnmixer | [] | popt | [] [] [] [] [] | procps-ng | | procps-ng-man | | psmisc | [] | pspp | [] [] | pushover | | pwdutils | [] | pyspread | | radius | [] | recode | [] [] | recutils | [] | rpm | [] | rush | [] | sarg | | sed | [] [] | sharutils | [] | shishi | | skribilo | | solfege | [] [] | solfege-manual | [] | spotmachine | [] | sudo | [] [] [] | sudoers | [] [] [] | sysstat | [] [] | tar | [] [] [] | texinfo | [] | texinfo_document | [] | tigervnc | [] | tin | | tin-man | | tracgoogleappsa... | [] [] [] | trader | [] | util-linux | [] | ve | [] | vice | [] | vmm | [] | vorbis-tools | [] | wastesedge | [] | wcd | [] | wcd-man | [] | wdiff | [] | wget | [] [] | wyslij-po | [] | xboard | [] | xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] | +--------------------------------------------------+ kn ko ku ky lg lt lv mk ml mn mr ms mt nb ne nl 5 15 4 6 0 13 23 3 3 3 4 11 2 42 1 125 nn or pa pl ps pt pt_BR ro ru rw sk sl sq sr +------------------------------------------------+ a2ps | [] [] [] [] [] [] [] | aegis | [] [] | anubis | [] [] [] | aspell | [] [] [] [] [] [] [] | bash | [] [] [] [] [] [] | bfd | [] [] | binutils | [] [] | bison | [] [] [] | bison-runtime | [] [] [] [] [] [] [] [] | buzztrax | [] | ccd2cue | [] [] | ccide | [] [] [] | cflow | [] [] [] | clisp | [] | coreutils | [] [] [] [] | cpio | [] [] [] | cppi | [] [] [] | cpplib | [] [] [] | cryptsetup | [] [] [] | datamash | [] [] | denemo | | dfarc | [] [] [] | dialog | [] [] [] [] [] [] [] | dico | [] | diffutils | [] [] [] | dink | | direvent | [] [] [] | doodle | [] [] | dos2unix | [] [] [] [] | dos2unix-man | [] [] | e2fsprogs | [] | enscript | [] [] [] [] [] [] | exif | [] [] [] [] [] [] | fetchmail | [] [] [] | findutils | [] [] [] [] [] [] | flex | [] [] [] [] [] | freedink | [] [] [] [] [] | fusionforge | | gas | | gawk | [] | gcal | | gcc | | gdbm | [] [] [] | gettext-examples | [] [] [] [] [] [] [] [] | gettext-runtime | [] [] [] [] [] [] [] [] [] | gettext-tools | [] [] [] [] [] [] [] | gjay | [] | glunarclock | [] [] [] [] [] [] | gnubiff | [] | gnubik | [] [] [] [] | gnucash | () () () () () [] | gnuchess | [] [] | gnulib | [] [] [] [] [] | gnunet | | gnunet-gtk | | gold | | gphoto2 | [] [] [] [] [] | gprof | [] [] [] [] | gramadoir | [] [] | grep | [] [] [] [] [] [] | grub | [] [] [] [] [] | gsasl | [] [] [] | gss | [] [] [] [] | gst-plugins-bad | [] [] [] [] [] | gst-plugins-base | [] [] [] [] [] [] | gst-plugins-good | [] [] [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] [] [] | gstreamer | [] [] [] [] [] [] [] | gtick | [] [] [] [] [] | gtkam | [] [] [] [] [] [] | gtkspell | [] [] [] [] [] [] [] [] [] | guix | | guix-packages | | gutenprint | [] [] | hello | [] [] [] [] [] [] | help2man | [] [] [] [] | help2man-texi | [] | hylafax | | idutils | [] [] [] | iso_15924 | [] () [] [] [] [] | iso_3166 | [] [] [] [] () [] [] [] [] [] [] [] [] | iso_3166_2 | [] () [] | iso_4217 | [] [] () [] [] [] [] [] | iso_639 | [] [] [] () [] [] [] [] [] [] | iso_639_3 | [] () | iso_639_5 | () [] | jwhois | [] [] [] [] | kbd | [] [] | klavaro | [] [] [] [] [] | ld | | leafpad | [] [] [] [] [] [] [] [] | libc | [] [] [] | libexif | [] () [] | libextractor | [] | libgnutls | [] | libgphoto2 | [] | libgphoto2_port | [] [] [] [] [] | libgsasl | [] [] [] [] | libiconv | [] [] [] [] [] | libidn | [] [] [] | liferea | [] [] [] [] () [] [] | lilypond | | lordsawar | | lprng | [] | lynx | [] [] | m4 | [] [] [] [] [] | mailfromd | [] | mailutils | [] | make | [] [] [] | man-db | [] [] [] | man-db-manpages | [] [] [] | midi-instruments | [] [] [] [] [] [] [] [] | minicom | [] [] [] [] | mkisofs | [] [] [] | myserver | [] [] | nano | [] [] [] [] [] [] | opcodes | | parted | [] [] [] [] [] [] | pies | [] | pnmixer | [] | popt | [] [] [] [] [] [] | procps-ng | [] | procps-ng-man | [] | psmisc | [] [] [] [] | pspp | [] [] | pushover | | pwdutils | [] | pyspread | [] [] | radius | [] [] | recode | [] [] [] [] [] [] [] [] | recutils | [] [] | rpm | [] | rush | [] [] [] | sarg | [] [] | sed | [] [] [] [] [] [] [] [] | sharutils | [] [] [] | shishi | [] [] | skribilo | [] | solfege | [] [] [] | solfege-manual | [] [] | spotmachine | [] [] | sudo | [] [] [] [] [] [] | sudoers | [] [] [] [] | sysstat | [] [] [] [] [] | tar | [] [] [] [] [] | texinfo | [] [] [] | texinfo_document | [] [] | tigervnc | [] [] [] | tin | [] | tin-man | | tracgoogleappsa... | [] [] [] [] | trader | [] [] | util-linux | [] [] | ve | [] [] [] | vice | | vmm | | vorbis-tools | [] [] [] | wastesedge | | wcd | | wcd-man | | wdiff | [] [] [] [] [] | wget | [] [] [] [] [] | wyslij-po | [] [] [] [] | xboard | [] [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] [] | +------------------------------------------------+ nn or pa pl ps pt pt_BR ro ru rw sk sl sq sr 7 3 6 114 1 12 88 32 82 3 40 45 7 101 sv sw ta te tg th tr uk ur vi wa wo zh_CN +----------------------------------------------+ a2ps | [] [] [] [] [] | aegis | [] | anubis | [] [] [] [] | aspell | [] [] [] [] [] | bash | [] [] [] [] | bfd | [] [] [] | binutils | [] [] [] | bison | [] [] [] [] | bison-runtime | [] [] [] [] [] [] | buzztrax | [] [] [] | ccd2cue | [] [] [] | ccide | [] [] [] [] | cflow | [] [] [] [] | clisp | | coreutils | [] [] [] | cpio | [] [] [] [] [] | cppi | [] [] [] [] | cpplib | [] [] [] [] [] | cryptsetup | [] [] [] | datamash | [] [] [] | denemo | [] | dfarc | [] [] | dialog | [] [] [] [] [] [] | dico | [] | diffutils | [] [] [] [] [] | dink | [] | direvent | [] [] | doodle | [] [] | dos2unix | [] [] [] [] | dos2unix-man | [] [] [] | e2fsprogs | [] [] [] [] | enscript | [] [] [] [] | exif | [] [] [] [] [] | fetchmail | [] [] [] [] | findutils | [] [] [] [] [] | flex | [] [] [] [] | freedink | [] [] [] | fusionforge | | gas | [] | gawk | [] [] [] | gcal | [] [] [] | gcc | [] | gdbm | [] [] | gettext-examples | [] [] [] [] [] | gettext-runtime | [] [] [] [] [] | gettext-tools | [] [] [] [] [] | gjay | [] [] [] | glunarclock | [] [] [] [] | gnubiff | [] [] | gnubik | [] [] [] [] | gnucash | () () () () [] | gnuchess | [] [] [] | gnulib | [] [] [] [] | gnunet | | gnunet-gtk | | gold | [] [] | gphoto2 | [] [] [] [] | gprof | [] [] [] [] | gramadoir | [] [] [] | grep | [] [] [] [] [] | grub | [] [] [] [] | gsasl | [] [] [] [] | gss | [] [] [] | gst-plugins-bad | [] [] [] [] [] | gst-plugins-base | [] [] [] [] [] | gst-plugins-good | [] [] [] [] [] | gst-plugins-ugly | [] [] [] [] [] | gstreamer | [] [] [] [] [] | gtick | [] [] [] | gtkam | [] [] [] [] | gtkspell | [] [] [] [] [] [] [] | guix | | guix-packages | | gutenprint | [] [] [] [] | hello | [] [] [] [] [] [] | help2man | [] [] [] | help2man-texi | [] | hylafax | [] | idutils | [] [] [] | iso_15924 | [] () [] [] () [] | iso_3166 | [] [] () [] [] () [] [] | iso_3166_2 | () [] [] () [] | iso_4217 | [] () [] [] () [] | iso_639 | [] [] [] () [] [] () [] [] | iso_639_3 | [] () [] [] () | iso_639_5 | () [] () | jwhois | [] [] [] [] | kbd | [] [] [] [] | klavaro | [] [] [] [] [] [] | ld | [] [] [] [] [] | leafpad | [] [] [] [] [] [] | libc | [] [] [] [] [] | libexif | [] [] () | libextractor | [] [] | libgnutls | [] [] [] [] | libgphoto2 | [] [] [] | libgphoto2_port | [] [] [] [] | libgsasl | [] [] [] [] | libiconv | [] [] [] [] [] | libidn | () [] [] [] | liferea | [] [] [] [] [] | lilypond | [] | lordsawar | | lprng | [] | lynx | [] [] [] [] | m4 | [] [] [] | mailfromd | [] [] | mailutils | [] | make | [] [] [] [] | man-db | [] [] [] | man-db-manpages | [] [] | midi-instruments | [] [] [] [] [] [] | minicom | [] [] | mkisofs | [] [] [] | myserver | [] | nano | [] [] [] [] | opcodes | [] [] [] | parted | [] [] [] [] [] | pies | [] [] | pnmixer | [] [] [] | popt | [] [] [] [] [] [] [] | procps-ng | [] [] | procps-ng-man | [] | psmisc | [] [] [] [] | pspp | [] [] [] | pushover | [] | pwdutils | [] [] | pyspread | [] | radius | [] [] | recode | [] [] [] [] | recutils | [] [] [] | rpm | [] [] [] [] | rush | [] [] | sarg | | sed | [] [] [] [] [] | sharutils | [] [] [] [] | shishi | [] [] | skribilo | [] [] | solfege | [] [] [] [] | solfege-manual | [] | spotmachine | [] [] [] | sudo | [] [] [] [] [] | sudoers | [] [] [] [] | sysstat | [] [] [] [] [] | tar | [] [] [] [] [] | texinfo | [] [] [] | texinfo_document | [] | tigervnc | [] [] [] | tin | [] | tin-man | | tracgoogleappsa... | [] [] [] [] [] | trader | [] | util-linux | [] [] [] [] | ve | [] [] [] [] | vice | () () | vmm | | vorbis-tools | [] [] | wastesedge | | wcd | [] [] [] | wcd-man | [] | wdiff | [] [] [] [] | wget | [] [] [] | wyslij-po | [] [] | xboard | [] [] | xdg-user-dirs | [] [] [] [] [] [] [] [] | xkeyboard-config | [] [] [] [] | +----------------------------------------------+ sv sw ta te tg th tr uk ur vi wa wo zh_CN 106 1 4 3 0 13 51 115 1 125 7 1 100 zh_HK zh_TW +-------------+ a2ps | | 30 aegis | | 9 anubis | | 19 aspell | | 29 bash | [] | 23 bfd | | 11 binutils | | 12 bison | [] | 18 bison-runtime | [] | 38 buzztrax | | 9 ccd2cue | | 10 ccide | | 17 cflow | | 16 clisp | | 10 coreutils | | 18 cpio | | 20 cppi | | 17 cpplib | [] | 19 cryptsetup | | 14 datamash | | 11 denemo | | 5 dfarc | | 17 dialog | [] | 42 dico | | 6 diffutils | | 22 dink | | 10 direvent | | 11 doodle | | 12 dos2unix | [] | 18 dos2unix-man | | 9 e2fsprogs | | 15 enscript | | 21 exif | | 27 fetchmail | | 19 findutils | | 29 flex | [] | 19 freedink | | 24 fusionforge | | 3 gas | | 5 gawk | | 13 gcal | | 8 gcc | | 2 gdbm | | 10 gettext-examples | [] [] | 40 gettext-runtime | [] [] | 35 gettext-tools | [] | 24 gjay | | 9 glunarclock | [] | 27 gnubiff | | 9 gnubik | | 19 gnucash | () | 6 gnuchess | | 11 gnulib | | 23 gnunet | | 1 gnunet-gtk | | 1 gold | | 7 gphoto2 | [] | 19 gprof | | 21 gramadoir | | 14 grep | [] | 31 grub | | 21 gsasl | [] | 19 gss | | 17 gst-plugins-bad | | 21 gst-plugins-base | | 27 gst-plugins-good | | 32 gst-plugins-ugly | | 34 gstreamer | [] | 32 gtick | | 19 gtkam | | 24 gtkspell | [] [] | 48 guix | | 2 guix-packages | | 0 gutenprint | | 15 hello | [] | 30 help2man | | 18 help2man-texi | | 5 hylafax | | 5 idutils | | 14 iso_15924 | [] | 23 iso_3166 | [] [] | 58 iso_3166_2 | | 9 iso_4217 | [] [] | 28 iso_639 | [] [] | 46 iso_639_3 | | 10 iso_639_5 | | 2 jwhois | [] | 20 kbd | | 17 klavaro | | 30 ld | [] | 15 leafpad | [] | 39 libc | [] | 24 libexif | | 10 libextractor | | 5 libgnutls | | 13 libgphoto2 | | 10 libgphoto2_port | [] | 19 libgsasl | | 18 libiconv | [] | 29 libidn | | 17 liferea | | 29 lilypond | | 11 lordsawar | | 3 lprng | | 3 lynx | | 19 m4 | [] | 22 mailfromd | | 4 mailutils | | 6 make | | 19 man-db | | 15 man-db-manpages | | 10 midi-instruments | [] | 43 minicom | [] | 17 mkisofs | | 13 myserver | | 9 nano | [] | 30 opcodes | | 12 parted | [] | 23 pies | | 4 pnmixer | | 9 popt | [] | 36 procps-ng | | 5 procps-ng-man | | 4 psmisc | [] | 22 pspp | | 13 pushover | | 6 pwdutils | | 8 pyspread | | 6 radius | | 9 recode | | 31 recutils | | 10 rpm | [] | 13 rush | | 10 sarg | | 4 sed | [] | 35 sharutils | | 13 shishi | | 7 skribilo | | 7 solfege | | 21 solfege-manual | | 9 spotmachine | | 11 sudo | | 26 sudoers | | 22 sysstat | | 23 tar | [] | 30 texinfo | | 17 texinfo_document | | 13 tigervnc | | 14 tin | [] | 7 tin-man | | 1 tracgoogleappsa... | [] | 22 trader | | 12 util-linux | | 13 ve | | 14 vice | | 1 vmm | | 3 vorbis-tools | | 13 wastesedge | | 3 wcd | | 8 wcd-man | | 3 wdiff | [] | 23 wget | | 21 wyslij-po | | 14 xboard | | 10 xdg-user-dirs | [] [] | 68 xkeyboard-config | [] | 28 +-------------+ 89 teams zh_HK zh_TW 166 domains 7 42 2809 Some counters in the preceding matrix are higher than the number of visible blocks let us expect. This is because a few extra PO files are used for implementing regional variants of languages, or language dialects. For a PO file in the matrix above to be effective, the package to which it applies should also have been internationalized and distributed as such by its maintainer. There might be an observable lag between the mere existence a PO file and its wide availability in a distribution. If Jun 2014 seems to be old, you may fetch a more recent copy of this 'ABOUT-NLS' file on most GNU archive sites. The most up-to-date matrix with full percentage details can be found at 'http://translationproject.org/extra/matrix.html'. 1.5 Using 'gettext' in new packages =================================== If you are writing a freely available program and want to internationalize it you are welcome to use GNU 'gettext' in your package. Of course you have to respect the GNU Lesser General Public License which covers the use of the GNU 'gettext' library. This means in particular that even non-free programs can use 'libintl' as a shared library, whereas only free software can use 'libintl' as a static library or use modified versions of 'libintl'. Once the sources are changed appropriately and the setup can handle the use of 'gettext' the only thing missing are the translations. The Free Translation Project is also available for packages which are not developed inside the GNU project. Therefore the information given above applies also for every other Free Software Project. Contact 'coordinator@translationproject.org' to make the '.pot' files available to the translation teams. dar-2.6.8/config.guess0000755000175000017520000012564413011674454011561 00000000000000#! /bin/sh # Attempt to guess a canonical system name. # Copyright 1992-2016 Free Software Foundation, Inc. timestamp='2016-10-02' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that # program. This Exception is an additional permission under section 7 # of the GNU General Public License, version 3 ("GPLv3"). # # Originally written by Per Bothner; maintained since 2000 by Ben Elliston. # # You can get the latest version of this script from: # http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.guess # # Please send patches to . me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] Output the configuration name of the system \`$me' is run on. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.guess ($timestamp) Originally written by Per Bothner. Copyright 1992-2016 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" >&2 exit 1 ;; * ) break ;; esac done if test $# != 0; then echo "$me: too many arguments$help" >&2 exit 1 fi trap 'exit 1' 1 2 15 # CC_FOR_BUILD -- compiler used by this script. Note that the use of a # compiler to aid in system detection is discouraged as it requires # temporary files to be created and, as you can see below, it is a # headache to deal with in a portable fashion. # Historically, `CC_FOR_BUILD' used to be named `HOST_CC'. We still # use `HOST_CC' if defined, but it is deprecated. # Portable tmp directory creation inspired by the Autoconf team. set_cc_for_build=' trap "exitcode=\$?; (rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null) && exit \$exitcode" 0 ; trap "rm -f \$tmpfiles 2>/dev/null; rmdir \$tmp 2>/dev/null; exit 1" 1 2 13 15 ; : ${TMPDIR=/tmp} ; { tmp=`(umask 077 && mktemp -d "$TMPDIR/cgXXXXXX") 2>/dev/null` && test -n "$tmp" && test -d "$tmp" ; } || { test -n "$RANDOM" && tmp=$TMPDIR/cg$$-$RANDOM && (umask 077 && mkdir $tmp) ; } || { tmp=$TMPDIR/cg-$$ && (umask 077 && mkdir $tmp) && echo "Warning: creating insecure temp directory" >&2 ; } || { echo "$me: cannot create a temporary directory in $TMPDIR" >&2 ; exit 1 ; } ; dummy=$tmp/dummy ; tmpfiles="$dummy.c $dummy.o $dummy.rel $dummy" ; case $CC_FOR_BUILD,$HOST_CC,$CC in ,,) echo "int x;" > $dummy.c ; for c in cc gcc c89 c99 ; do if ($c -c -o $dummy.o $dummy.c) >/dev/null 2>&1 ; then CC_FOR_BUILD="$c"; break ; fi ; done ; if test x"$CC_FOR_BUILD" = x ; then CC_FOR_BUILD=no_compiler_found ; fi ;; ,,*) CC_FOR_BUILD=$CC ;; ,*,*) CC_FOR_BUILD=$HOST_CC ;; esac ; set_cc_for_build= ;' # This is needed to find uname on a Pyramid OSx when run in the BSD universe. # (ghazi@noc.rutgers.edu 1994-08-24) if (test -f /.attbin/uname) >/dev/null 2>&1 ; then PATH=$PATH:/.attbin ; export PATH fi UNAME_MACHINE=`(uname -m) 2>/dev/null` || UNAME_MACHINE=unknown UNAME_RELEASE=`(uname -r) 2>/dev/null` || UNAME_RELEASE=unknown UNAME_SYSTEM=`(uname -s) 2>/dev/null` || UNAME_SYSTEM=unknown UNAME_VERSION=`(uname -v) 2>/dev/null` || UNAME_VERSION=unknown case "${UNAME_SYSTEM}" in Linux|GNU|GNU/*) # If the system lacks a compiler, then just pick glibc. # We could probably try harder. LIBC=gnu eval $set_cc_for_build cat <<-EOF > $dummy.c #include #if defined(__UCLIBC__) LIBC=uclibc #elif defined(__dietlibc__) LIBC=dietlibc #else LIBC=gnu #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^LIBC' | sed 's, ,,g'` ;; esac # Note: order is significant - the case branches are not exclusive. case "${UNAME_MACHINE}:${UNAME_SYSTEM}:${UNAME_RELEASE}:${UNAME_VERSION}" in *:NetBSD:*:*) # NetBSD (nbsd) targets should (where applicable) match one or # more of the tuples: *-*-netbsdelf*, *-*-netbsdaout*, # *-*-netbsdecoff* and *-*-netbsd*. For targets that recently # switched to ELF, *-*-netbsd* would select the old # object file format. This provides both forward # compatibility and a consistent mechanism for selecting the # object file format. # # Note: NetBSD doesn't particularly care about the vendor # portion of the name. We always set it to "unknown". sysctl="sysctl -n hw.machine_arch" UNAME_MACHINE_ARCH=`(uname -p 2>/dev/null || \ /sbin/$sysctl 2>/dev/null || \ /usr/sbin/$sysctl 2>/dev/null || \ echo unknown)` case "${UNAME_MACHINE_ARCH}" in armeb) machine=armeb-unknown ;; arm*) machine=arm-unknown ;; sh3el) machine=shl-unknown ;; sh3eb) machine=sh-unknown ;; sh5el) machine=sh5le-unknown ;; earmv*) arch=`echo ${UNAME_MACHINE_ARCH} | sed -e 's,^e\(armv[0-9]\).*$,\1,'` endian=`echo ${UNAME_MACHINE_ARCH} | sed -ne 's,^.*\(eb\)$,\1,p'` machine=${arch}${endian}-unknown ;; *) machine=${UNAME_MACHINE_ARCH}-unknown ;; esac # The Operating System including object format, if it has switched # to ELF recently (or will in the future) and ABI. case "${UNAME_MACHINE_ARCH}" in earm*) os=netbsdelf ;; arm*|i386|m68k|ns32k|sh3*|sparc|vax) eval $set_cc_for_build if echo __ELF__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ELF__ then # Once all utilities can be ECOFF (netbsdecoff) or a.out (netbsdaout). # Return netbsd for either. FIX? os=netbsd else os=netbsdelf fi ;; *) os=netbsd ;; esac # Determine ABI tags. case "${UNAME_MACHINE_ARCH}" in earm*) expr='s/^earmv[0-9]/-eabi/;s/eb$//' abi=`echo ${UNAME_MACHINE_ARCH} | sed -e "$expr"` ;; esac # The OS release # Debian GNU/NetBSD machines have a different userland, and # thus, need a distinct triplet. However, they do not need # kernel version information, so it can be replaced with a # suitable tag, in the style of linux-gnu. case "${UNAME_VERSION}" in Debian*) release='-gnu' ;; *) release=`echo ${UNAME_RELEASE} | sed -e 's/[-_].*//' | cut -d. -f1,2` ;; esac # Since CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM: # contains redundant information, the shorter form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM is used. echo "${machine}-${os}${release}${abi}" exit ;; *:Bitrig:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/Bitrig.//'` echo ${UNAME_MACHINE_ARCH}-unknown-bitrig${UNAME_RELEASE} exit ;; *:OpenBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/OpenBSD.//'` echo ${UNAME_MACHINE_ARCH}-unknown-openbsd${UNAME_RELEASE} exit ;; *:LibertyBSD:*:*) UNAME_MACHINE_ARCH=`arch | sed 's/^.*BSD\.//'` echo ${UNAME_MACHINE_ARCH}-unknown-libertybsd${UNAME_RELEASE} exit ;; *:ekkoBSD:*:*) echo ${UNAME_MACHINE}-unknown-ekkobsd${UNAME_RELEASE} exit ;; *:SolidBSD:*:*) echo ${UNAME_MACHINE}-unknown-solidbsd${UNAME_RELEASE} exit ;; macppc:MirBSD:*:*) echo powerpc-unknown-mirbsd${UNAME_RELEASE} exit ;; *:MirBSD:*:*) echo ${UNAME_MACHINE}-unknown-mirbsd${UNAME_RELEASE} exit ;; *:Sortix:*:*) echo ${UNAME_MACHINE}-unknown-sortix exit ;; alpha:OSF1:*:*) case $UNAME_RELEASE in *4.0) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $3}'` ;; *5.*) UNAME_RELEASE=`/usr/sbin/sizer -v | awk '{print $4}'` ;; esac # According to Compaq, /usr/sbin/psrinfo has been available on # OSF/1 and Tru64 systems produced since 1995. I hope that # covers most systems running today. This code pipes the CPU # types through head -n 1, so we only detect the type of CPU 0. ALPHA_CPU_TYPE=`/usr/sbin/psrinfo -v | sed -n -e 's/^ The alpha \(.*\) processor.*$/\1/p' | head -n 1` case "$ALPHA_CPU_TYPE" in "EV4 (21064)") UNAME_MACHINE=alpha ;; "EV4.5 (21064)") UNAME_MACHINE=alpha ;; "LCA4 (21066/21068)") UNAME_MACHINE=alpha ;; "EV5 (21164)") UNAME_MACHINE=alphaev5 ;; "EV5.6 (21164A)") UNAME_MACHINE=alphaev56 ;; "EV5.6 (21164PC)") UNAME_MACHINE=alphapca56 ;; "EV5.7 (21164PC)") UNAME_MACHINE=alphapca57 ;; "EV6 (21264)") UNAME_MACHINE=alphaev6 ;; "EV6.7 (21264A)") UNAME_MACHINE=alphaev67 ;; "EV6.8CB (21264C)") UNAME_MACHINE=alphaev68 ;; "EV6.8AL (21264B)") UNAME_MACHINE=alphaev68 ;; "EV6.8CX (21264D)") UNAME_MACHINE=alphaev68 ;; "EV6.9A (21264/EV69A)") UNAME_MACHINE=alphaev69 ;; "EV7 (21364)") UNAME_MACHINE=alphaev7 ;; "EV7.9 (21364A)") UNAME_MACHINE=alphaev79 ;; esac # A Pn.n version is a patched version. # A Vn.n version is a released version. # A Tn.n version is a released field test version. # A Xn.n version is an unreleased experimental baselevel. # 1.2 uses "1.2" for uname -r. echo ${UNAME_MACHINE}-dec-osf`echo ${UNAME_RELEASE} | sed -e 's/^[PVTX]//' | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` # Reset EXIT trap before exiting to avoid spurious non-zero exit code. exitcode=$? trap '' 0 exit $exitcode ;; Alpha\ *:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # Should we change UNAME_MACHINE based on the output of uname instead # of the specific Alpha model? echo alpha-pc-interix exit ;; 21064:Windows_NT:50:3) echo alpha-dec-winnt3.5 exit ;; Amiga*:UNIX_System_V:4.0:*) echo m68k-unknown-sysv4 exit ;; *:[Aa]miga[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-amigaos exit ;; *:[Mm]orph[Oo][Ss]:*:*) echo ${UNAME_MACHINE}-unknown-morphos exit ;; *:OS/390:*:*) echo i370-ibm-openedition exit ;; *:z/VM:*:*) echo s390-ibm-zvmoe exit ;; *:OS400:*:*) echo powerpc-ibm-os400 exit ;; arm:RISC*:1.[012]*:*|arm:riscix:1.[012]*:*) echo arm-acorn-riscix${UNAME_RELEASE} exit ;; arm*:riscos:*:*|arm*:RISCOS:*:*) echo arm-unknown-riscos exit ;; SR2?01:HI-UX/MPP:*:* | SR8000:HI-UX/MPP:*:*) echo hppa1.1-hitachi-hiuxmpp exit ;; Pyramid*:OSx*:*:* | MIS*:OSx*:*:* | MIS*:SMP_DC-OSx*:*:*) # akee@wpdis03.wpafb.af.mil (Earle F. Ake) contributed MIS and NILE. if test "`(/bin/universe) 2>/dev/null`" = att ; then echo pyramid-pyramid-sysv3 else echo pyramid-pyramid-bsd fi exit ;; NILE*:*:*:dcosx) echo pyramid-pyramid-svr4 exit ;; DRS?6000:unix:4.0:6*) echo sparc-icl-nx6 exit ;; DRS?6000:UNIX_SV:4.2*:7* | DRS?6000:isis:4.2*:7*) case `/usr/bin/uname -p` in sparc) echo sparc-icl-nx7; exit ;; esac ;; s390x:SunOS:*:*) echo ${UNAME_MACHINE}-ibm-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4H:SunOS:5.*:*) echo sparc-hal-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:5.*:* | tadpole*:SunOS:5.*:*) echo sparc-sun-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; i86pc:AuroraUX:5.*:* | i86xen:AuroraUX:5.*:*) echo i386-pc-auroraux${UNAME_RELEASE} exit ;; i86pc:SunOS:5.*:* | i86xen:SunOS:5.*:*) eval $set_cc_for_build SUN_ARCH=i386 # If there is a compiler, see if it is configured for 64-bit objects. # Note that the Sun cc does not turn __LP64__ into 1 like gcc does. # This test works for both compilers. if [ "$CC_FOR_BUILD" != no_compiler_found ]; then if (echo '#ifdef __amd64'; echo IS_64BIT_ARCH; echo '#endif') | \ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ grep IS_64BIT_ARCH >/dev/null then SUN_ARCH=x86_64 fi fi echo ${SUN_ARCH}-pc-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:6*:*) # According to config.sub, this is the proper way to canonicalize # SunOS6. Hard to guess exactly what SunOS6 will be like, but # it's likely to be more like Solaris than SunOS4. echo sparc-sun-solaris3`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; sun4*:SunOS:*:*) case "`/usr/bin/arch -k`" in Series*|S4*) UNAME_RELEASE=`uname -v` ;; esac # Japanese Language versions have a version number like `4.1.3-JL'. echo sparc-sun-sunos`echo ${UNAME_RELEASE}|sed -e 's/-/_/'` exit ;; sun3*:SunOS:*:*) echo m68k-sun-sunos${UNAME_RELEASE} exit ;; sun*:*:4.2BSD:*) UNAME_RELEASE=`(sed 1q /etc/motd | awk '{print substr($5,1,3)}') 2>/dev/null` test "x${UNAME_RELEASE}" = x && UNAME_RELEASE=3 case "`/bin/arch`" in sun3) echo m68k-sun-sunos${UNAME_RELEASE} ;; sun4) echo sparc-sun-sunos${UNAME_RELEASE} ;; esac exit ;; aushp:SunOS:*:*) echo sparc-auspex-sunos${UNAME_RELEASE} exit ;; # The situation for MiNT is a little confusing. The machine name # can be virtually everything (everything which is not # "atarist" or "atariste" at least should have a processor # > m68000). The system name ranges from "MiNT" over "FreeMiNT" # to the lowercase version "mint" (or "freemint"). Finally # the system name "TOS" denotes a system which is actually not # MiNT. But MiNT is downward compatible to TOS, so this should # be no problem. atarist[e]:*MiNT:*:* | atarist[e]:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; atari*:*MiNT:*:* | atari*:*mint:*:* | atarist[e]:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; *falcon*:*MiNT:*:* | *falcon*:*mint:*:* | *falcon*:*TOS:*:*) echo m68k-atari-mint${UNAME_RELEASE} exit ;; milan*:*MiNT:*:* | milan*:*mint:*:* | *milan*:*TOS:*:*) echo m68k-milan-mint${UNAME_RELEASE} exit ;; hades*:*MiNT:*:* | hades*:*mint:*:* | *hades*:*TOS:*:*) echo m68k-hades-mint${UNAME_RELEASE} exit ;; *:*MiNT:*:* | *:*mint:*:* | *:*TOS:*:*) echo m68k-unknown-mint${UNAME_RELEASE} exit ;; m68k:machten:*:*) echo m68k-apple-machten${UNAME_RELEASE} exit ;; powerpc:machten:*:*) echo powerpc-apple-machten${UNAME_RELEASE} exit ;; RISC*:Mach:*:*) echo mips-dec-mach_bsd4.3 exit ;; RISC*:ULTRIX:*:*) echo mips-dec-ultrix${UNAME_RELEASE} exit ;; VAX*:ULTRIX*:*:*) echo vax-dec-ultrix${UNAME_RELEASE} exit ;; 2020:CLIX:*:* | 2430:CLIX:*:*) echo clipper-intergraph-clix${UNAME_RELEASE} exit ;; mips:*:*:UMIPS | mips:*:*:RISCos) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #ifdef __cplusplus #include /* for printf() prototype */ int main (int argc, char *argv[]) { #else int main (argc, argv) int argc; char *argv[]; { #endif #if defined (host_mips) && defined (MIPSEB) #if defined (SYSTYPE_SYSV) printf ("mips-mips-riscos%ssysv\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_SVR4) printf ("mips-mips-riscos%ssvr4\n", argv[1]); exit (0); #endif #if defined (SYSTYPE_BSD43) || defined(SYSTYPE_BSD) printf ("mips-mips-riscos%sbsd\n", argv[1]); exit (0); #endif #endif exit (-1); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && dummyarg=`echo "${UNAME_RELEASE}" | sed -n 's/\([0-9]*\).*/\1/p'` && SYSTEM_NAME=`$dummy $dummyarg` && { echo "$SYSTEM_NAME"; exit; } echo mips-mips-riscos${UNAME_RELEASE} exit ;; Motorola:PowerMAX_OS:*:*) echo powerpc-motorola-powermax exit ;; Motorola:*:4.3:PL8-*) echo powerpc-harris-powermax exit ;; Night_Hawk:*:*:PowerMAX_OS | Synergy:PowerMAX_OS:*:*) echo powerpc-harris-powermax exit ;; Night_Hawk:Power_UNIX:*:*) echo powerpc-harris-powerunix exit ;; m88k:CX/UX:7*:*) echo m88k-harris-cxux7 exit ;; m88k:*:4*:R4*) echo m88k-motorola-sysv4 exit ;; m88k:*:3*:R3*) echo m88k-motorola-sysv3 exit ;; AViiON:dgux:*:*) # DG/UX returns AViiON for all architectures UNAME_PROCESSOR=`/usr/bin/uname -p` if [ $UNAME_PROCESSOR = mc88100 ] || [ $UNAME_PROCESSOR = mc88110 ] then if [ ${TARGET_BINARY_INTERFACE}x = m88kdguxelfx ] || \ [ ${TARGET_BINARY_INTERFACE}x = x ] then echo m88k-dg-dgux${UNAME_RELEASE} else echo m88k-dg-dguxbcs${UNAME_RELEASE} fi else echo i586-dg-dgux${UNAME_RELEASE} fi exit ;; M88*:DolphinOS:*:*) # DolphinOS (SVR3) echo m88k-dolphin-sysv3 exit ;; M88*:*:R3*:*) # Delta 88k system running SVR3 echo m88k-motorola-sysv3 exit ;; XD88*:*:*:*) # Tektronix XD88 system running UTekV (SVR3) echo m88k-tektronix-sysv3 exit ;; Tek43[0-9][0-9]:UTek:*:*) # Tektronix 4300 system running UTek (BSD) echo m68k-tektronix-bsd exit ;; *:IRIX*:*:*) echo mips-sgi-irix`echo ${UNAME_RELEASE}|sed -e 's/-/_/g'` exit ;; ????????:AIX?:[12].1:2) # AIX 2.2.1 or AIX 2.1.1 is RT/PC AIX. echo romp-ibm-aix # uname -m gives an 8 hex-code CPU id exit ;; # Note that: echo "'`uname -s`'" gives 'AIX ' i*86:AIX:*:*) echo i386-ibm-aix exit ;; ia64:AIX:*:*) if [ -x /usr/bin/oslevel ] ; then IBM_REV=`/usr/bin/oslevel` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${UNAME_MACHINE}-ibm-aix${IBM_REV} exit ;; *:AIX:2:3) if grep bos325 /usr/include/stdio.h >/dev/null 2>&1; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include main() { if (!__power_pc()) exit(1); puts("powerpc-ibm-aix3.2.5"); exit(0); } EOF if $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` then echo "$SYSTEM_NAME" else echo rs6000-ibm-aix3.2.5 fi elif grep bos324 /usr/include/stdio.h >/dev/null 2>&1; then echo rs6000-ibm-aix3.2.4 else echo rs6000-ibm-aix3.2 fi exit ;; *:AIX:*:[4567]) IBM_CPU_ID=`/usr/sbin/lsdev -C -c processor -S available | sed 1q | awk '{ print $1 }'` if /usr/sbin/lsattr -El ${IBM_CPU_ID} | grep ' POWER' >/dev/null 2>&1; then IBM_ARCH=rs6000 else IBM_ARCH=powerpc fi if [ -x /usr/bin/lslpp ] ; then IBM_REV=`/usr/bin/lslpp -Lqc bos.rte.libc | awk -F: '{ print $3 }' | sed s/[0-9]*$/0/` else IBM_REV=${UNAME_VERSION}.${UNAME_RELEASE} fi echo ${IBM_ARCH}-ibm-aix${IBM_REV} exit ;; *:AIX:*:*) echo rs6000-ibm-aix exit ;; ibmrt:4.4BSD:*|romp-ibm:BSD:*) echo romp-ibm-bsd4.4 exit ;; ibmrt:*BSD:*|romp-ibm:BSD:*) # covers RT/PC BSD and echo romp-ibm-bsd${UNAME_RELEASE} # 4.3 with uname added to exit ;; # report: romp-ibm BSD 4.3 *:BOSX:*:*) echo rs6000-bull-bosx exit ;; DPX/2?00:B.O.S.:*:*) echo m68k-bull-sysv3 exit ;; 9000/[34]??:4.3bsd:1.*:*) echo m68k-hp-bsd exit ;; hp300:4.4BSD:*:* | 9000/[34]??:4.3bsd:2.*:*) echo m68k-hp-bsd4.4 exit ;; 9000/[34678]??:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` case "${UNAME_MACHINE}" in 9000/31? ) HP_ARCH=m68000 ;; 9000/[34]?? ) HP_ARCH=m68k ;; 9000/[678][0-9][0-9]) if [ -x /usr/bin/getconf ]; then sc_cpu_version=`/usr/bin/getconf SC_CPU_VERSION 2>/dev/null` sc_kernel_bits=`/usr/bin/getconf SC_KERNEL_BITS 2>/dev/null` case "${sc_cpu_version}" in 523) HP_ARCH=hppa1.0 ;; # CPU_PA_RISC1_0 528) HP_ARCH=hppa1.1 ;; # CPU_PA_RISC1_1 532) # CPU_PA_RISC2_0 case "${sc_kernel_bits}" in 32) HP_ARCH=hppa2.0n ;; 64) HP_ARCH=hppa2.0w ;; '') HP_ARCH=hppa2.0 ;; # HP-UX 10.20 esac ;; esac fi if [ "${HP_ARCH}" = "" ]; then eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #define _HPUX_SOURCE #include #include int main () { #if defined(_SC_KERNEL_BITS) long bits = sysconf(_SC_KERNEL_BITS); #endif long cpu = sysconf (_SC_CPU_VERSION); switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0"); break; case CPU_PA_RISC1_1: puts ("hppa1.1"); break; case CPU_PA_RISC2_0: #if defined(_SC_KERNEL_BITS) switch (bits) { case 64: puts ("hppa2.0w"); break; case 32: puts ("hppa2.0n"); break; default: puts ("hppa2.0"); break; } break; #else /* !defined(_SC_KERNEL_BITS) */ puts ("hppa2.0"); break; #endif default: puts ("hppa1.0"); break; } exit (0); } EOF (CCOPTS="" $CC_FOR_BUILD -o $dummy $dummy.c 2>/dev/null) && HP_ARCH=`$dummy` test -z "$HP_ARCH" && HP_ARCH=hppa fi ;; esac if [ ${HP_ARCH} = hppa2.0w ] then eval $set_cc_for_build # hppa2.0w-hp-hpux* has a 64-bit kernel and a compiler generating # 32-bit code. hppa64-hp-hpux* has the same kernel and a compiler # generating 64-bit code. GNU and HP use different nomenclature: # # $ CC_FOR_BUILD=cc ./config.guess # => hppa2.0w-hp-hpux11.23 # $ CC_FOR_BUILD="cc +DA2.0w" ./config.guess # => hppa64-hp-hpux11.23 if echo __LP64__ | (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | grep -q __LP64__ then HP_ARCH=hppa2.0w else HP_ARCH=hppa64 fi fi echo ${HP_ARCH}-hp-hpux${HPUX_REV} exit ;; ia64:HP-UX:*:*) HPUX_REV=`echo ${UNAME_RELEASE}|sed -e 's/[^.]*.[0B]*//'` echo ia64-hp-hpux${HPUX_REV} exit ;; 3050*:HI-UX:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #include int main () { long cpu = sysconf (_SC_CPU_VERSION); /* The order matters, because CPU_IS_HP_MC68K erroneously returns true for CPU_PA_RISC1_0. CPU_IS_PA_RISC returns correct results, however. */ if (CPU_IS_PA_RISC (cpu)) { switch (cpu) { case CPU_PA_RISC1_0: puts ("hppa1.0-hitachi-hiuxwe2"); break; case CPU_PA_RISC1_1: puts ("hppa1.1-hitachi-hiuxwe2"); break; case CPU_PA_RISC2_0: puts ("hppa2.0-hitachi-hiuxwe2"); break; default: puts ("hppa-hitachi-hiuxwe2"); break; } } else if (CPU_IS_HP_MC68K (cpu)) puts ("m68k-hitachi-hiuxwe2"); else puts ("unknown-hitachi-hiuxwe2"); exit (0); } EOF $CC_FOR_BUILD -o $dummy $dummy.c && SYSTEM_NAME=`$dummy` && { echo "$SYSTEM_NAME"; exit; } echo unknown-hitachi-hiuxwe2 exit ;; 9000/7??:4.3bsd:*:* | 9000/8?[79]:4.3bsd:*:* ) echo hppa1.1-hp-bsd exit ;; 9000/8??:4.3bsd:*:*) echo hppa1.0-hp-bsd exit ;; *9??*:MPE/iX:*:* | *3000*:MPE/iX:*:*) echo hppa1.0-hp-mpeix exit ;; hp7??:OSF1:*:* | hp8?[79]:OSF1:*:* ) echo hppa1.1-hp-osf exit ;; hp8??:OSF1:*:*) echo hppa1.0-hp-osf exit ;; i*86:OSF1:*:*) if [ -x /usr/sbin/sysversion ] ; then echo ${UNAME_MACHINE}-unknown-osf1mk else echo ${UNAME_MACHINE}-unknown-osf1 fi exit ;; parisc*:Lites*:*:*) echo hppa1.1-hp-lites exit ;; C1*:ConvexOS:*:* | convex:ConvexOS:C1*:*) echo c1-convex-bsd exit ;; C2*:ConvexOS:*:* | convex:ConvexOS:C2*:*) if getsysinfo -f scalar_acc then echo c32-convex-bsd else echo c2-convex-bsd fi exit ;; C34*:ConvexOS:*:* | convex:ConvexOS:C34*:*) echo c34-convex-bsd exit ;; C38*:ConvexOS:*:* | convex:ConvexOS:C38*:*) echo c38-convex-bsd exit ;; C4*:ConvexOS:*:* | convex:ConvexOS:C4*:*) echo c4-convex-bsd exit ;; CRAY*Y-MP:*:*:*) echo ymp-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*[A-Z]90:*:*:*) echo ${UNAME_MACHINE}-cray-unicos${UNAME_RELEASE} \ | sed -e 's/CRAY.*\([A-Z]90\)/\1/' \ -e y/ABCDEFGHIJKLMNOPQRSTUVWXYZ/abcdefghijklmnopqrstuvwxyz/ \ -e 's/\.[^.]*$/.X/' exit ;; CRAY*TS:*:*:*) echo t90-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*T3E:*:*:*) echo alphaev5-cray-unicosmk${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; CRAY*SV1:*:*:*) echo sv1-cray-unicos${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; *:UNICOS/mp:*:*) echo craynv-cray-unicosmp${UNAME_RELEASE} | sed -e 's/\.[^.]*$/.X/' exit ;; F30[01]:UNIX_System_V:*:* | F700:UNIX_System_V:*:*) FUJITSU_PROC=`uname -m | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz` FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | sed -e 's/ /_/'` echo "${FUJITSU_PROC}-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; 5000:UNIX_System_V:4.*:*) FUJITSU_SYS=`uname -p | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/\///'` FUJITSU_REL=`echo ${UNAME_RELEASE} | tr ABCDEFGHIJKLMNOPQRSTUVWXYZ abcdefghijklmnopqrstuvwxyz | sed -e 's/ /_/'` echo "sparc-fujitsu-${FUJITSU_SYS}${FUJITSU_REL}" exit ;; i*86:BSD/386:*:* | i*86:BSD/OS:*:* | *:Ascend\ Embedded/OS:*:*) echo ${UNAME_MACHINE}-pc-bsdi${UNAME_RELEASE} exit ;; sparc*:BSD/OS:*:*) echo sparc-unknown-bsdi${UNAME_RELEASE} exit ;; *:BSD/OS:*:*) echo ${UNAME_MACHINE}-unknown-bsdi${UNAME_RELEASE} exit ;; *:FreeBSD:*:*) UNAME_PROCESSOR=`/usr/bin/uname -p` case ${UNAME_PROCESSOR} in amd64) echo x86_64-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; *) echo ${UNAME_PROCESSOR}-unknown-freebsd`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` ;; esac exit ;; i*:CYGWIN*:*) echo ${UNAME_MACHINE}-pc-cygwin exit ;; *:MINGW64*:*) echo ${UNAME_MACHINE}-pc-mingw64 exit ;; *:MINGW*:*) echo ${UNAME_MACHINE}-pc-mingw32 exit ;; *:MSYS*:*) echo ${UNAME_MACHINE}-pc-msys exit ;; i*:windows32*:*) # uname -m includes "-pc" on this system. echo ${UNAME_MACHINE}-mingw32 exit ;; i*:PW*:*) echo ${UNAME_MACHINE}-pc-pw32 exit ;; *:Interix*:*) case ${UNAME_MACHINE} in x86) echo i586-pc-interix${UNAME_RELEASE} exit ;; authenticamd | genuineintel | EM64T) echo x86_64-unknown-interix${UNAME_RELEASE} exit ;; IA64) echo ia64-unknown-interix${UNAME_RELEASE} exit ;; esac ;; [345]86:Windows_95:* | [345]86:Windows_98:* | [345]86:Windows_NT:*) echo i${UNAME_MACHINE}-pc-mks exit ;; 8664:Windows_NT:*) echo x86_64-pc-mks exit ;; i*:Windows_NT*:* | Pentium*:Windows_NT*:*) # How do we know it's Interix rather than the generic POSIX subsystem? # It also conflicts with pre-2.0 versions of AT&T UWIN. Should we # UNAME_MACHINE based on the output of uname instead of i386? echo i586-pc-interix exit ;; i*:UWIN*:*) echo ${UNAME_MACHINE}-pc-uwin exit ;; amd64:CYGWIN*:*:* | x86_64:CYGWIN*:*:*) echo x86_64-unknown-cygwin exit ;; p*:CYGWIN*:*) echo powerpcle-unknown-cygwin exit ;; prep*:SunOS:5.*:*) echo powerpcle-unknown-solaris2`echo ${UNAME_RELEASE}|sed -e 's/[^.]*//'` exit ;; *:GNU:*:*) # the GNU system echo `echo ${UNAME_MACHINE}|sed -e 's,[-/].*$,,'`-unknown-${LIBC}`echo ${UNAME_RELEASE}|sed -e 's,/.*$,,'` exit ;; *:GNU/*:*:*) # other systems with GNU libc and userland echo ${UNAME_MACHINE}-unknown-`echo ${UNAME_SYSTEM} | sed 's,^[^/]*/,,' | tr "[:upper:]" "[:lower:]"``echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'`-${LIBC} exit ;; i*86:Minix:*:*) echo ${UNAME_MACHINE}-pc-minix exit ;; aarch64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; aarch64_be:Linux:*:*) UNAME_MACHINE=aarch64_be echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; alpha:Linux:*:*) case `sed -n '/^cpu model/s/^.*: \(.*\)/\1/p' < /proc/cpuinfo` in EV5) UNAME_MACHINE=alphaev5 ;; EV56) UNAME_MACHINE=alphaev56 ;; PCA56) UNAME_MACHINE=alphapca56 ;; PCA57) UNAME_MACHINE=alphapca56 ;; EV6) UNAME_MACHINE=alphaev6 ;; EV67) UNAME_MACHINE=alphaev67 ;; EV68*) UNAME_MACHINE=alphaev68 ;; esac objdump --private-headers /bin/sh | grep -q ld.so.1 if test "$?" = 0 ; then LIBC=gnulibc1 ; fi echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; arc:Linux:*:* | arceb:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; arm*:Linux:*:*) eval $set_cc_for_build if echo __ARM_EABI__ | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_EABI__ then echo ${UNAME_MACHINE}-unknown-linux-${LIBC} else if echo __ARM_PCS_VFP | $CC_FOR_BUILD -E - 2>/dev/null \ | grep -q __ARM_PCS_VFP then echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabi else echo ${UNAME_MACHINE}-unknown-linux-${LIBC}eabihf fi fi exit ;; avr32*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; cris:Linux:*:*) echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; crisv32:Linux:*:*) echo ${UNAME_MACHINE}-axis-linux-${LIBC} exit ;; e2k:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; frv:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; hexagon:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; i*86:Linux:*:*) echo ${UNAME_MACHINE}-pc-linux-${LIBC} exit ;; ia64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; k1om:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m32r*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; m68*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; mips:Linux:*:* | mips64:Linux:*:*) eval $set_cc_for_build sed 's/^ //' << EOF >$dummy.c #undef CPU #undef ${UNAME_MACHINE} #undef ${UNAME_MACHINE}el #if defined(__MIPSEL__) || defined(__MIPSEL) || defined(_MIPSEL) || defined(MIPSEL) CPU=${UNAME_MACHINE}el #else #if defined(__MIPSEB__) || defined(__MIPSEB) || defined(_MIPSEB) || defined(MIPSEB) CPU=${UNAME_MACHINE} #else CPU= #endif #endif EOF eval `$CC_FOR_BUILD -E $dummy.c 2>/dev/null | grep '^CPU'` test x"${CPU}" != x && { echo "${CPU}-unknown-linux-${LIBC}"; exit; } ;; mips64el:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; openrisc*:Linux:*:*) echo or1k-unknown-linux-${LIBC} exit ;; or32:Linux:*:* | or1k*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; padre:Linux:*:*) echo sparc-unknown-linux-${LIBC} exit ;; parisc64:Linux:*:* | hppa64:Linux:*:*) echo hppa64-unknown-linux-${LIBC} exit ;; parisc:Linux:*:* | hppa:Linux:*:*) # Look for CPU level case `grep '^cpu[^a-z]*:' /proc/cpuinfo 2>/dev/null | cut -d' ' -f2` in PA7*) echo hppa1.1-unknown-linux-${LIBC} ;; PA8*) echo hppa2.0-unknown-linux-${LIBC} ;; *) echo hppa-unknown-linux-${LIBC} ;; esac exit ;; ppc64:Linux:*:*) echo powerpc64-unknown-linux-${LIBC} exit ;; ppc:Linux:*:*) echo powerpc-unknown-linux-${LIBC} exit ;; ppc64le:Linux:*:*) echo powerpc64le-unknown-linux-${LIBC} exit ;; ppcle:Linux:*:*) echo powerpcle-unknown-linux-${LIBC} exit ;; riscv32:Linux:*:* | riscv64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; s390:Linux:*:* | s390x:Linux:*:*) echo ${UNAME_MACHINE}-ibm-linux-${LIBC} exit ;; sh64*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sh*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; sparc:Linux:*:* | sparc64:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; tile*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; vax:Linux:*:*) echo ${UNAME_MACHINE}-dec-linux-${LIBC} exit ;; x86_64:Linux:*:*) echo ${UNAME_MACHINE}-pc-linux-${LIBC} exit ;; xtensa*:Linux:*:*) echo ${UNAME_MACHINE}-unknown-linux-${LIBC} exit ;; i*86:DYNIX/ptx:4*:*) # ptx 4.0 does uname -s correctly, with DYNIX/ptx in there. # earlier versions are messed up and put the nodename in both # sysname and nodename. echo i386-sequent-sysv4 exit ;; i*86:UNIX_SV:4.2MP:2.*) # Unixware is an offshoot of SVR4, but it has its own version # number series starting with 2... # I am not positive that other SVR4 systems won't match this, # I just have to hope. -- rms. # Use sysv4.2uw... so that sysv4* matches it. echo ${UNAME_MACHINE}-pc-sysv4.2uw${UNAME_VERSION} exit ;; i*86:OS/2:*:*) # If we were able to find `uname', then EMX Unix compatibility # is probably installed. echo ${UNAME_MACHINE}-pc-os2-emx exit ;; i*86:XTS-300:*:STOP) echo ${UNAME_MACHINE}-unknown-stop exit ;; i*86:atheos:*:*) echo ${UNAME_MACHINE}-unknown-atheos exit ;; i*86:syllable:*:*) echo ${UNAME_MACHINE}-pc-syllable exit ;; i*86:LynxOS:2.*:* | i*86:LynxOS:3.[01]*:* | i*86:LynxOS:4.[02]*:*) echo i386-unknown-lynxos${UNAME_RELEASE} exit ;; i*86:*DOS:*:*) echo ${UNAME_MACHINE}-pc-msdosdjgpp exit ;; i*86:*:4.*:* | i*86:SYSTEM_V:4.*:*) UNAME_REL=`echo ${UNAME_RELEASE} | sed 's/\/MP$//'` if grep Novell /usr/include/link.h >/dev/null 2>/dev/null; then echo ${UNAME_MACHINE}-univel-sysv${UNAME_REL} else echo ${UNAME_MACHINE}-pc-sysv${UNAME_REL} fi exit ;; i*86:*:5:[678]*) # UnixWare 7.x, OpenUNIX and OpenServer 6. case `/bin/uname -X | grep "^Machine"` in *486*) UNAME_MACHINE=i486 ;; *Pentium) UNAME_MACHINE=i586 ;; *Pent*|*Celeron) UNAME_MACHINE=i686 ;; esac echo ${UNAME_MACHINE}-unknown-sysv${UNAME_RELEASE}${UNAME_SYSTEM}${UNAME_VERSION} exit ;; i*86:*:3.2:*) if test -f /usr/options/cb.name; then UNAME_REL=`sed -n 's/.*Version //p' /dev/null >/dev/null ; then UNAME_REL=`(/bin/uname -X|grep Release|sed -e 's/.*= //')` (/bin/uname -X|grep i80486 >/dev/null) && UNAME_MACHINE=i486 (/bin/uname -X|grep '^Machine.*Pentium' >/dev/null) \ && UNAME_MACHINE=i586 (/bin/uname -X|grep '^Machine.*Pent *II' >/dev/null) \ && UNAME_MACHINE=i686 (/bin/uname -X|grep '^Machine.*Pentium Pro' >/dev/null) \ && UNAME_MACHINE=i686 echo ${UNAME_MACHINE}-pc-sco$UNAME_REL else echo ${UNAME_MACHINE}-pc-sysv32 fi exit ;; pc:*:*:*) # Left here for compatibility: # uname -m prints for DJGPP always 'pc', but it prints nothing about # the processor, so we play safe by assuming i586. # Note: whatever this is, it MUST be the same as what config.sub # prints for the "djgpp" host, or else GDB configure will decide that # this is a cross-build. echo i586-pc-msdosdjgpp exit ;; Intel:Mach:3*:*) echo i386-pc-mach3 exit ;; paragon:*:*:*) echo i860-intel-osf1 exit ;; i860:*:4.*:*) # i860-SVR4 if grep Stardent /usr/include/sys/uadmin.h >/dev/null 2>&1 ; then echo i860-stardent-sysv${UNAME_RELEASE} # Stardent Vistra i860-SVR4 else # Add other i860-SVR4 vendors below as they are discovered. echo i860-unknown-sysv${UNAME_RELEASE} # Unknown i860-SVR4 fi exit ;; mini*:CTIX:SYS*5:*) # "miniframe" echo m68010-convergent-sysv exit ;; mc68k:UNIX:SYSTEM5:3.51m) echo m68k-convergent-sysv exit ;; M680?0:D-NIX:5.3:*) echo m68k-diab-dnix exit ;; M68*:*:R3V[5678]*:*) test -r /sysV68 && { echo 'm68k-motorola-sysv'; exit; } ;; 3[345]??:*:4.0:3.0 | 3[34]??A:*:4.0:3.0 | 3[34]??,*:*:4.0:3.0 | 3[34]??/*:*:4.0:3.0 | 4400:*:4.0:3.0 | 4850:*:4.0:3.0 | SKA40:*:4.0:3.0 | SDS2:*:4.0:3.0 | SHG2:*:4.0:3.0 | S7501*:*:4.0:3.0) OS_REL='' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; 3[34]??:*:4.0:* | 3[34]??,*:*:4.0:*) /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4; exit; } ;; NCR*:*:4.2:* | MPRAS*:*:4.2:*) OS_REL='.3' test -r /etc/.relid \ && OS_REL=.`sed -n 's/[^ ]* [^ ]* \([0-9][0-9]\).*/\1/p' < /etc/.relid` /bin/uname -p 2>/dev/null | grep 86 >/dev/null \ && { echo i486-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep entium >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } /bin/uname -p 2>/dev/null | /bin/grep pteron >/dev/null \ && { echo i586-ncr-sysv4.3${OS_REL}; exit; } ;; m68*:LynxOS:2.*:* | m68*:LynxOS:3.0*:*) echo m68k-unknown-lynxos${UNAME_RELEASE} exit ;; mc68030:UNIX_System_V:4.*:*) echo m68k-atari-sysv4 exit ;; TSUNAMI:LynxOS:2.*:*) echo sparc-unknown-lynxos${UNAME_RELEASE} exit ;; rs6000:LynxOS:2.*:*) echo rs6000-unknown-lynxos${UNAME_RELEASE} exit ;; PowerPC:LynxOS:2.*:* | PowerPC:LynxOS:3.[01]*:* | PowerPC:LynxOS:4.[02]*:*) echo powerpc-unknown-lynxos${UNAME_RELEASE} exit ;; SM[BE]S:UNIX_SV:*:*) echo mips-dde-sysv${UNAME_RELEASE} exit ;; RM*:ReliantUNIX-*:*:*) echo mips-sni-sysv4 exit ;; RM*:SINIX-*:*:*) echo mips-sni-sysv4 exit ;; *:SINIX-*:*:*) if uname -p 2>/dev/null >/dev/null ; then UNAME_MACHINE=`(uname -p) 2>/dev/null` echo ${UNAME_MACHINE}-sni-sysv4 else echo ns32k-sni-sysv fi exit ;; PENTIUM:*:4.0*:*) # Unisys `ClearPath HMP IX 4000' SVR4/MP effort # says echo i586-unisys-sysv4 exit ;; *:UNIX_System_V:4*:FTX*) # From Gerald Hewes . # How about differentiating between stratus architectures? -djm echo hppa1.1-stratus-sysv4 exit ;; *:*:*:FTX*) # From seanf@swdc.stratus.com. echo i860-stratus-sysv4 exit ;; i*86:VOS:*:*) # From Paul.Green@stratus.com. echo ${UNAME_MACHINE}-stratus-vos exit ;; *:VOS:*:*) # From Paul.Green@stratus.com. echo hppa1.1-stratus-vos exit ;; mc68*:A/UX:*:*) echo m68k-apple-aux${UNAME_RELEASE} exit ;; news*:NEWS-OS:6*:*) echo mips-sony-newsos6 exit ;; R[34]000:*System_V*:*:* | R4000:UNIX_SYSV:*:* | R*000:UNIX_SV:*:*) if [ -d /usr/nec ]; then echo mips-nec-sysv${UNAME_RELEASE} else echo mips-unknown-sysv${UNAME_RELEASE} fi exit ;; BeBox:BeOS:*:*) # BeOS running on hardware made by Be, PPC only. echo powerpc-be-beos exit ;; BeMac:BeOS:*:*) # BeOS running on Mac or Mac clone, PPC only. echo powerpc-apple-beos exit ;; BePC:BeOS:*:*) # BeOS running on Intel PC compatible. echo i586-pc-beos exit ;; BePC:Haiku:*:*) # Haiku running on Intel PC compatible. echo i586-pc-haiku exit ;; x86_64:Haiku:*:*) echo x86_64-unknown-haiku exit ;; SX-4:SUPER-UX:*:*) echo sx4-nec-superux${UNAME_RELEASE} exit ;; SX-5:SUPER-UX:*:*) echo sx5-nec-superux${UNAME_RELEASE} exit ;; SX-6:SUPER-UX:*:*) echo sx6-nec-superux${UNAME_RELEASE} exit ;; SX-7:SUPER-UX:*:*) echo sx7-nec-superux${UNAME_RELEASE} exit ;; SX-8:SUPER-UX:*:*) echo sx8-nec-superux${UNAME_RELEASE} exit ;; SX-8R:SUPER-UX:*:*) echo sx8r-nec-superux${UNAME_RELEASE} exit ;; SX-ACE:SUPER-UX:*:*) echo sxace-nec-superux${UNAME_RELEASE} exit ;; Power*:Rhapsody:*:*) echo powerpc-apple-rhapsody${UNAME_RELEASE} exit ;; *:Rhapsody:*:*) echo ${UNAME_MACHINE}-apple-rhapsody${UNAME_RELEASE} exit ;; *:Darwin:*:*) UNAME_PROCESSOR=`uname -p` || UNAME_PROCESSOR=unknown eval $set_cc_for_build if test "$UNAME_PROCESSOR" = unknown ; then UNAME_PROCESSOR=powerpc fi if test `echo "$UNAME_RELEASE" | sed -e 's/\..*//'` -le 10 ; then if [ "$CC_FOR_BUILD" != no_compiler_found ]; then if (echo '#ifdef __LP64__'; echo IS_64BIT_ARCH; echo '#endif') | \ (CCOPTS="" $CC_FOR_BUILD -E - 2>/dev/null) | \ grep IS_64BIT_ARCH >/dev/null then case $UNAME_PROCESSOR in i386) UNAME_PROCESSOR=x86_64 ;; powerpc) UNAME_PROCESSOR=powerpc64 ;; esac fi fi elif test "$UNAME_PROCESSOR" = i386 ; then # Avoid executing cc on OS X 10.9, as it ships with a stub # that puts up a graphical alert prompting to install # developer tools. Any system running Mac OS X 10.7 or # later (Darwin 11 and later) is required to have a 64-bit # processor. This is not true of the ARM version of Darwin # that Apple uses in portable devices. UNAME_PROCESSOR=x86_64 fi echo ${UNAME_PROCESSOR}-apple-darwin${UNAME_RELEASE} exit ;; *:procnto*:*:* | *:QNX:[0123456789]*:*) UNAME_PROCESSOR=`uname -p` if test "$UNAME_PROCESSOR" = x86; then UNAME_PROCESSOR=i386 UNAME_MACHINE=pc fi echo ${UNAME_PROCESSOR}-${UNAME_MACHINE}-nto-qnx${UNAME_RELEASE} exit ;; *:QNX:*:4*) echo i386-pc-qnx exit ;; NEO-?:NONSTOP_KERNEL:*:*) echo neo-tandem-nsk${UNAME_RELEASE} exit ;; NSE-*:NONSTOP_KERNEL:*:*) echo nse-tandem-nsk${UNAME_RELEASE} exit ;; NSR-?:NONSTOP_KERNEL:*:*) echo nsr-tandem-nsk${UNAME_RELEASE} exit ;; *:NonStop-UX:*:*) echo mips-compaq-nonstopux exit ;; BS2000:POSIX*:*:*) echo bs2000-siemens-sysv exit ;; DS/*:UNIX_System_V:*:*) echo ${UNAME_MACHINE}-${UNAME_SYSTEM}-${UNAME_RELEASE} exit ;; *:Plan9:*:*) # "uname -m" is not consistent, so use $cputype instead. 386 # is converted to i386 for consistency with other x86 # operating systems. if test "$cputype" = 386; then UNAME_MACHINE=i386 else UNAME_MACHINE="$cputype" fi echo ${UNAME_MACHINE}-unknown-plan9 exit ;; *:TOPS-10:*:*) echo pdp10-unknown-tops10 exit ;; *:TENEX:*:*) echo pdp10-unknown-tenex exit ;; KS10:TOPS-20:*:* | KL10:TOPS-20:*:* | TYPE4:TOPS-20:*:*) echo pdp10-dec-tops20 exit ;; XKL-1:TOPS-20:*:* | TYPE5:TOPS-20:*:*) echo pdp10-xkl-tops20 exit ;; *:TOPS-20:*:*) echo pdp10-unknown-tops20 exit ;; *:ITS:*:*) echo pdp10-unknown-its exit ;; SEI:*:*:SEIUX) echo mips-sei-seiux${UNAME_RELEASE} exit ;; *:DragonFly:*:*) echo ${UNAME_MACHINE}-unknown-dragonfly`echo ${UNAME_RELEASE}|sed -e 's/[-(].*//'` exit ;; *:*VMS:*:*) UNAME_MACHINE=`(uname -p) 2>/dev/null` case "${UNAME_MACHINE}" in A*) echo alpha-dec-vms ; exit ;; I*) echo ia64-dec-vms ; exit ;; V*) echo vax-dec-vms ; exit ;; esac ;; *:XENIX:*:SysV) echo i386-pc-xenix exit ;; i*86:skyos:*:*) echo ${UNAME_MACHINE}-pc-skyos`echo ${UNAME_RELEASE} | sed -e 's/ .*$//'` exit ;; i*86:rdos:*:*) echo ${UNAME_MACHINE}-pc-rdos exit ;; i*86:AROS:*:*) echo ${UNAME_MACHINE}-pc-aros exit ;; x86_64:VMkernel:*:*) echo ${UNAME_MACHINE}-unknown-esx exit ;; amd64:Isilon\ OneFS:*:*) echo x86_64-unknown-onefs exit ;; esac cat >&2 </dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null` /bin/uname -X = `(/bin/uname -X) 2>/dev/null` hostinfo = `(hostinfo) 2>/dev/null` /bin/universe = `(/bin/universe) 2>/dev/null` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null` /bin/arch = `(/bin/arch) 2>/dev/null` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null` UNAME_MACHINE = ${UNAME_MACHINE} UNAME_RELEASE = ${UNAME_RELEASE} UNAME_SYSTEM = ${UNAME_SYSTEM} UNAME_VERSION = ${UNAME_VERSION} EOF exit 1 # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: dar-2.6.8/depcomp0000755000175000017520000005601613042165456010613 00000000000000#! /bin/sh # depcomp - compile a program generating dependencies as side-effects scriptversion=2013-05-30.07; # UTC # Copyright (C) 1999-2014 Free Software Foundation, Inc. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. # Originally written by Alexandre Oliva . case $1 in '') echo "$0: No command. Try '$0 --help' for more information." 1>&2 exit 1; ;; -h | --h*) cat <<\EOF Usage: depcomp [--help] [--version] PROGRAM [ARGS] Run PROGRAMS ARGS to compile a file, generating dependencies as side-effects. Environment variables: depmode Dependency tracking mode. source Source file read by 'PROGRAMS ARGS'. object Object file output by 'PROGRAMS ARGS'. DEPDIR directory where to store dependencies. depfile Dependency file to output. tmpdepfile Temporary file to use when outputting dependencies. libtool Whether libtool is used (yes/no). Report bugs to . EOF exit $? ;; -v | --v*) echo "depcomp $scriptversion" exit $? ;; esac # Get the directory component of the given path, and save it in the # global variables '$dir'. Note that this directory component will # be either empty or ending with a '/' character. This is deliberate. set_dir_from () { case $1 in */*) dir=`echo "$1" | sed -e 's|/[^/]*$|/|'`;; *) dir=;; esac } # Get the suffix-stripped basename of the given path, and save it the # global variable '$base'. set_base_from () { base=`echo "$1" | sed -e 's|^.*/||' -e 's/\.[^.]*$//'` } # If no dependency file was actually created by the compiler invocation, # we still have to create a dummy depfile, to avoid errors with the # Makefile "include basename.Plo" scheme. make_dummy_depfile () { echo "#dummy" > "$depfile" } # Factor out some common post-processing of the generated depfile. # Requires the auxiliary global variable '$tmpdepfile' to be set. aix_post_process_depfile () { # If the compiler actually managed to produce a dependency file, # post-process it. if test -f "$tmpdepfile"; then # Each line is of the form 'foo.o: dependency.h'. # Do two passes, one to just change these to # $object: dependency.h # and one to simply output # dependency.h: # which is needed to avoid the deleted-header problem. { sed -e "s,^.*\.[$lower]*:,$object:," < "$tmpdepfile" sed -e "s,^.*\.[$lower]*:[$tab ]*,," -e 's,$,:,' < "$tmpdepfile" } > "$depfile" rm -f "$tmpdepfile" else make_dummy_depfile fi } # A tabulation character. tab=' ' # A newline character. nl=' ' # Character ranges might be problematic outside the C locale. # These definitions help. upper=ABCDEFGHIJKLMNOPQRSTUVWXYZ lower=abcdefghijklmnopqrstuvwxyz digits=0123456789 alpha=${upper}${lower} if test -z "$depmode" || test -z "$source" || test -z "$object"; then echo "depcomp: Variables source, object and depmode must be set" 1>&2 exit 1 fi # Dependencies for sub/bar.o or sub/bar.obj go into sub/.deps/bar.Po. depfile=${depfile-`echo "$object" | sed 's|[^\\/]*$|'${DEPDIR-.deps}'/&|;s|\.\([^.]*\)$|.P\1|;s|Pobj$|Po|'`} tmpdepfile=${tmpdepfile-`echo "$depfile" | sed 's/\.\([^.]*\)$/.T\1/'`} rm -f "$tmpdepfile" # Avoid interferences from the environment. gccflag= dashmflag= # Some modes work just like other modes, but use different flags. We # parameterize here, but still list the modes in the big case below, # to make depend.m4 easier to write. Note that we *cannot* use a case # here, because this file can only contain one case statement. if test "$depmode" = hp; then # HP compiler uses -M and no extra arg. gccflag=-M depmode=gcc fi if test "$depmode" = dashXmstdout; then # This is just like dashmstdout with a different argument. dashmflag=-xM depmode=dashmstdout fi cygpath_u="cygpath -u -f -" if test "$depmode" = msvcmsys; then # This is just like msvisualcpp but w/o cygpath translation. # Just convert the backslash-escaped backslashes to single forward # slashes to satisfy depend.m4 cygpath_u='sed s,\\\\,/,g' depmode=msvisualcpp fi if test "$depmode" = msvc7msys; then # This is just like msvc7 but w/o cygpath translation. # Just convert the backslash-escaped backslashes to single forward # slashes to satisfy depend.m4 cygpath_u='sed s,\\\\,/,g' depmode=msvc7 fi if test "$depmode" = xlc; then # IBM C/C++ Compilers xlc/xlC can output gcc-like dependency information. gccflag=-qmakedep=gcc,-MF depmode=gcc fi case "$depmode" in gcc3) ## gcc 3 implements dependency tracking that does exactly what ## we want. Yay! Note: for some reason libtool 1.4 doesn't like ## it if -MD -MP comes after the -MF stuff. Hmm. ## Unfortunately, FreeBSD c89 acceptance of flags depends upon ## the command line argument order; so add the flags where they ## appear in depend2.am. Note that the slowdown incurred here ## affects only configure: in makefiles, %FASTDEP% shortcuts this. for arg do case $arg in -c) set fnord "$@" -MT "$object" -MD -MP -MF "$tmpdepfile" "$arg" ;; *) set fnord "$@" "$arg" ;; esac shift # fnord shift # $arg done "$@" stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi mv "$tmpdepfile" "$depfile" ;; gcc) ## Note that this doesn't just cater to obsosete pre-3.x GCC compilers. ## but also to in-use compilers like IMB xlc/xlC and the HP C compiler. ## (see the conditional assignment to $gccflag above). ## There are various ways to get dependency output from gcc. Here's ## why we pick this rather obscure method: ## - Don't want to use -MD because we'd like the dependencies to end ## up in a subdir. Having to rename by hand is ugly. ## (We might end up doing this anyway to support other compilers.) ## - The DEPENDENCIES_OUTPUT environment variable makes gcc act like ## -MM, not -M (despite what the docs say). Also, it might not be ## supported by the other compilers which use the 'gcc' depmode. ## - Using -M directly means running the compiler twice (even worse ## than renaming). if test -z "$gccflag"; then gccflag=-MD, fi "$@" -Wp,"$gccflag$tmpdepfile" stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" echo "$object : \\" > "$depfile" # The second -e expression handles DOS-style file names with drive # letters. sed -e 's/^[^:]*: / /' \ -e 's/^['$alpha']:\/[^:]*: / /' < "$tmpdepfile" >> "$depfile" ## This next piece of magic avoids the "deleted header file" problem. ## The problem is that when a header file which appears in a .P file ## is deleted, the dependency causes make to die (because there is ## typically no way to rebuild the header). We avoid this by adding ## dummy dependencies for each header file. Too bad gcc doesn't do ## this for us directly. ## Some versions of gcc put a space before the ':'. On the theory ## that the space means something, we add a space to the output as ## well. hp depmode also adds that space, but also prefixes the VPATH ## to the object. Take care to not repeat it in the output. ## Some versions of the HPUX 10.20 sed can't process this invocation ## correctly. Breaking it into two sed invocations is a workaround. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^\\$//' -e '/^$/d' -e "s|.*$object$||" -e '/:$/d' \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; hp) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; sgi) if test "$libtool" = yes; then "$@" "-Wp,-MDupdate,$tmpdepfile" else "$@" -MDupdate "$tmpdepfile" fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" if test -f "$tmpdepfile"; then # yes, the sourcefile depend on other files echo "$object : \\" > "$depfile" # Clip off the initial element (the dependent). Don't try to be # clever and replace this with sed code, as IRIX sed won't handle # lines with more than a fixed number of characters (4096 in # IRIX 6.2 sed, 8192 in IRIX 6.5). We also remove comment lines; # the IRIX cc adds comments like '#:fec' to the end of the # dependency line. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' \ | tr "$nl" ' ' >> "$depfile" echo >> "$depfile" # The second pass generates a dummy entry for each header file. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^.*\.o://' -e 's/#.*$//' -e '/^$/ d' -e 's/$/:/' \ >> "$depfile" else make_dummy_depfile fi rm -f "$tmpdepfile" ;; xlc) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; aix) # The C for AIX Compiler uses -M and outputs the dependencies # in a .u file. In older versions, this file always lives in the # current directory. Also, the AIX compiler puts '$object:' at the # start of each line; $object doesn't have directory information. # Version 6 uses the directory in both cases. set_dir_from "$object" set_base_from "$object" if test "$libtool" = yes; then tmpdepfile1=$dir$base.u tmpdepfile2=$base.u tmpdepfile3=$dir.libs/$base.u "$@" -Wc,-M else tmpdepfile1=$dir$base.u tmpdepfile2=$dir$base.u tmpdepfile3=$dir$base.u "$@" -M fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" do test -f "$tmpdepfile" && break done aix_post_process_depfile ;; tcc) # tcc (Tiny C Compiler) understand '-MD -MF file' since version 0.9.26 # FIXME: That version still under development at the moment of writing. # Make that this statement remains true also for stable, released # versions. # It will wrap lines (doesn't matter whether long or short) with a # trailing '\', as in: # # foo.o : \ # foo.c \ # foo.h \ # # It will put a trailing '\' even on the last line, and will use leading # spaces rather than leading tabs (at least since its commit 0394caf7 # "Emit spaces for -MD"). "$@" -MD -MF "$tmpdepfile" stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" # Each non-empty line is of the form 'foo.o : \' or ' dep.h \'. # We have to change lines of the first kind to '$object: \'. sed -e "s|.*:|$object :|" < "$tmpdepfile" > "$depfile" # And for each line of the second kind, we have to emit a 'dep.h:' # dummy dependency, to avoid the deleted-header problem. sed -n -e 's|^ *\(.*\) *\\$|\1:|p' < "$tmpdepfile" >> "$depfile" rm -f "$tmpdepfile" ;; ## The order of this option in the case statement is important, since the ## shell code in configure will try each of these formats in the order ## listed in this file. A plain '-MD' option would be understood by many ## compilers, so we must ensure this comes after the gcc and icc options. pgcc) # Portland's C compiler understands '-MD'. # Will always output deps to 'file.d' where file is the root name of the # source file under compilation, even if file resides in a subdirectory. # The object file name does not affect the name of the '.d' file. # pgcc 10.2 will output # foo.o: sub/foo.c sub/foo.h # and will wrap long lines using '\' : # foo.o: sub/foo.c ... \ # sub/foo.h ... \ # ... set_dir_from "$object" # Use the source, not the object, to determine the base name, since # that's sadly what pgcc will do too. set_base_from "$source" tmpdepfile=$base.d # For projects that build the same source file twice into different object # files, the pgcc approach of using the *source* file root name can cause # problems in parallel builds. Use a locking strategy to avoid stomping on # the same $tmpdepfile. lockdir=$base.d-lock trap " echo '$0: caught signal, cleaning up...' >&2 rmdir '$lockdir' exit 1 " 1 2 13 15 numtries=100 i=$numtries while test $i -gt 0; do # mkdir is a portable test-and-set. if mkdir "$lockdir" 2>/dev/null; then # This process acquired the lock. "$@" -MD stat=$? # Release the lock. rmdir "$lockdir" break else # If the lock is being held by a different process, wait # until the winning process is done or we timeout. while test -d "$lockdir" && test $i -gt 0; do sleep 1 i=`expr $i - 1` done fi i=`expr $i - 1` done trap - 1 2 13 15 if test $i -le 0; then echo "$0: failed to acquire lock after $numtries attempts" >&2 echo "$0: check lockdir '$lockdir'" >&2 exit 1 fi if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" # Each line is of the form `foo.o: dependent.h', # or `foo.o: dep1.h dep2.h \', or ` dep3.h dep4.h \'. # Do two passes, one to just change these to # `$object: dependent.h' and one to simply `dependent.h:'. sed "s,^[^:]*:,$object :," < "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process this invocation # correctly. Breaking it into two sed invocations is a workaround. sed 's,^[^:]*: \(.*\)$,\1,;s/^\\$//;/^$/d;/:$/d' < "$tmpdepfile" \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; hp2) # The "hp" stanza above does not work with aCC (C++) and HP's ia64 # compilers, which have integrated preprocessors. The correct option # to use with these is +Maked; it writes dependencies to a file named # 'foo.d', which lands next to the object file, wherever that # happens to be. # Much of this is similar to the tru64 case; see comments there. set_dir_from "$object" set_base_from "$object" if test "$libtool" = yes; then tmpdepfile1=$dir$base.d tmpdepfile2=$dir.libs/$base.d "$@" -Wc,+Maked else tmpdepfile1=$dir$base.d tmpdepfile2=$dir$base.d "$@" +Maked fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" do test -f "$tmpdepfile" && break done if test -f "$tmpdepfile"; then sed -e "s,^.*\.[$lower]*:,$object:," "$tmpdepfile" > "$depfile" # Add 'dependent.h:' lines. sed -ne '2,${ s/^ *// s/ \\*$// s/$/:/ p }' "$tmpdepfile" >> "$depfile" else make_dummy_depfile fi rm -f "$tmpdepfile" "$tmpdepfile2" ;; tru64) # The Tru64 compiler uses -MD to generate dependencies as a side # effect. 'cc -MD -o foo.o ...' puts the dependencies into 'foo.o.d'. # At least on Alpha/Redhat 6.1, Compaq CCC V6.2-504 seems to put # dependencies in 'foo.d' instead, so we check for that too. # Subdirectories are respected. set_dir_from "$object" set_base_from "$object" if test "$libtool" = yes; then # Libtool generates 2 separate objects for the 2 libraries. These # two compilations output dependencies in $dir.libs/$base.o.d and # in $dir$base.o.d. We have to check for both files, because # one of the two compilations can be disabled. We should prefer # $dir$base.o.d over $dir.libs/$base.o.d because the latter is # automatically cleaned when .libs/ is deleted, while ignoring # the former would cause a distcleancheck panic. tmpdepfile1=$dir$base.o.d # libtool 1.5 tmpdepfile2=$dir.libs/$base.o.d # Likewise. tmpdepfile3=$dir.libs/$base.d # Compaq CCC V6.2-504 "$@" -Wc,-MD else tmpdepfile1=$dir$base.d tmpdepfile2=$dir$base.d tmpdepfile3=$dir$base.d "$@" -MD fi stat=$? if test $stat -ne 0; then rm -f "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" exit $stat fi for tmpdepfile in "$tmpdepfile1" "$tmpdepfile2" "$tmpdepfile3" do test -f "$tmpdepfile" && break done # Same post-processing that is required for AIX mode. aix_post_process_depfile ;; msvc7) if test "$libtool" = yes; then showIncludes=-Wc,-showIncludes else showIncludes=-showIncludes fi "$@" $showIncludes > "$tmpdepfile" stat=$? grep -v '^Note: including file: ' "$tmpdepfile" if test $stat -ne 0; then rm -f "$tmpdepfile" exit $stat fi rm -f "$depfile" echo "$object : \\" > "$depfile" # The first sed program below extracts the file names and escapes # backslashes for cygpath. The second sed program outputs the file # name when reading, but also accumulates all include files in the # hold buffer in order to output them again at the end. This only # works with sed implementations that can handle large buffers. sed < "$tmpdepfile" -n ' /^Note: including file: *\(.*\)/ { s//\1/ s/\\/\\\\/g p }' | $cygpath_u | sort -u | sed -n ' s/ /\\ /g s/\(.*\)/'"$tab"'\1 \\/p s/.\(.*\) \\/\1:/ H $ { s/.*/'"$tab"'/ G p }' >> "$depfile" echo >> "$depfile" # make sure the fragment doesn't end with a backslash rm -f "$tmpdepfile" ;; msvc7msys) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; #nosideeffect) # This comment above is used by automake to tell side-effect # dependency tracking mechanisms from slower ones. dashmstdout) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout, regardless of -o. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi # Remove '-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done test -z "$dashmflag" && dashmflag=-M # Require at least two characters before searching for ':' # in the target name. This is to cope with DOS-style filenames: # a dependency such as 'c:/foo/bar' could be seen as target 'c' otherwise. "$@" $dashmflag | sed "s|^[$tab ]*[^:$tab ][^:][^:]*:[$tab ]*|$object: |" > "$tmpdepfile" rm -f "$depfile" cat < "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process this sed invocation # correctly. Breaking it into two sed invocations is a workaround. tr ' ' "$nl" < "$tmpdepfile" \ | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; dashXmstdout) # This case only exists to satisfy depend.m4. It is never actually # run, as this mode is specially recognized in the preamble. exit 1 ;; makedepend) "$@" || exit $? # Remove any Libtool call if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi # X makedepend shift cleared=no eat=no for arg do case $cleared in no) set ""; shift cleared=yes ;; esac if test $eat = yes; then eat=no continue fi case "$arg" in -D*|-I*) set fnord "$@" "$arg"; shift ;; # Strip any option that makedepend may not understand. Remove # the object too, otherwise makedepend will parse it as a source file. -arch) eat=yes ;; -*|$object) ;; *) set fnord "$@" "$arg"; shift ;; esac done obj_suffix=`echo "$object" | sed 's/^.*\././'` touch "$tmpdepfile" ${MAKEDEPEND-makedepend} -o"$obj_suffix" -f"$tmpdepfile" "$@" rm -f "$depfile" # makedepend may prepend the VPATH from the source file name to the object. # No need to regex-escape $object, excess matching of '.' is harmless. sed "s|^.*\($object *:\)|\1|" "$tmpdepfile" > "$depfile" # Some versions of the HPUX 10.20 sed can't process the last invocation # correctly. Breaking it into two sed invocations is a workaround. sed '1,2d' "$tmpdepfile" \ | tr ' ' "$nl" \ | sed -e 's/^\\$//' -e '/^$/d' -e '/:$/d' \ | sed -e 's/$/ :/' >> "$depfile" rm -f "$tmpdepfile" "$tmpdepfile".bak ;; cpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi # Remove '-o $object'. IFS=" " for arg do case $arg in -o) shift ;; $object) shift ;; *) set fnord "$@" "$arg" shift # fnord shift # $arg ;; esac done "$@" -E \ | sed -n -e '/^# [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ -e '/^#line [0-9][0-9]* "\([^"]*\)".*/ s:: \1 \\:p' \ | sed '$ s: \\$::' > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" cat < "$tmpdepfile" >> "$depfile" sed < "$tmpdepfile" '/^$/d;s/^ //;s/ \\$//;s/$/ :/' >> "$depfile" rm -f "$tmpdepfile" ;; msvisualcpp) # Important note: in order to support this mode, a compiler *must* # always write the preprocessed file to stdout. "$@" || exit $? # Remove the call to Libtool. if test "$libtool" = yes; then while test "X$1" != 'X--mode=compile'; do shift done shift fi IFS=" " for arg do case "$arg" in -o) shift ;; $object) shift ;; "-Gm"|"/Gm"|"-Gi"|"/Gi"|"-ZI"|"/ZI") set fnord "$@" shift shift ;; *) set fnord "$@" "$arg" shift shift ;; esac done "$@" -E 2>/dev/null | sed -n '/^#line [0-9][0-9]* "\([^"]*\)"/ s::\1:p' | $cygpath_u | sort -u > "$tmpdepfile" rm -f "$depfile" echo "$object : \\" > "$depfile" sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::'"$tab"'\1 \\:p' >> "$depfile" echo "$tab" >> "$depfile" sed < "$tmpdepfile" -n -e 's% %\\ %g' -e '/^\(.*\)$/ s::\1\::p' >> "$depfile" rm -f "$tmpdepfile" ;; msvcmsys) # This case exists only to let depend.m4 do its work. It works by # looking at the text of this script. This case will never be run, # since it is checked for above. exit 1 ;; none) exec "$@" ;; *) echo "Unknown depmode $depmode" 1>&2 exit 1 ;; esac exit 0 # Local Variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: dar-2.6.8/config.sub0000755000175000017520000010676313011674454011225 00000000000000#! /bin/sh # Configuration validation subroutine script. # Copyright 1992-2016 Free Software Foundation, Inc. timestamp='2016-11-04' # This file is free software; you can redistribute it and/or modify it # under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # # This program is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, see . # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that # program. This Exception is an additional permission under section 7 # of the GNU General Public License, version 3 ("GPLv3"). # Please send patches to . # # Configuration subroutine to validate and canonicalize a configuration type. # Supply the specified configuration type as an argument. # If it is invalid, we print an error message on stderr and exit with code 1. # Otherwise, we print the canonical config type on stdout and succeed. # You can get the latest version of this script from: # http://git.savannah.gnu.org/gitweb/?p=config.git;a=blob_plain;f=config.sub # This file is supposed to be the same for all GNU packages # and recognize all the CPU types, system types and aliases # that are meaningful with *any* GNU software. # Each package is responsible for reporting which valid configurations # it does not support. The user should be able to distinguish # a failure to support a valid configuration from a meaningless # configuration. # The goal of this file is to map all the various variations of a given # machine specification into a single specification in the form: # CPU_TYPE-MANUFACTURER-OPERATING_SYSTEM # or in some cases, the newer four-part form: # CPU_TYPE-MANUFACTURER-KERNEL-OPERATING_SYSTEM # It is wrong to echo any other type of specification. me=`echo "$0" | sed -e 's,.*/,,'` usage="\ Usage: $0 [OPTION] CPU-MFR-OPSYS or ALIAS Canonicalize a configuration name. Operation modes: -h, --help print this help, then exit -t, --time-stamp print date of last modification, then exit -v, --version print version number, then exit Report bugs and patches to ." version="\ GNU config.sub ($timestamp) Copyright 1992-2016 Free Software Foundation, Inc. This is free software; see the source for copying conditions. There is NO warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE." help=" Try \`$me --help' for more information." # Parse command line while test $# -gt 0 ; do case $1 in --time-stamp | --time* | -t ) echo "$timestamp" ; exit ;; --version | -v ) echo "$version" ; exit ;; --help | --h* | -h ) echo "$usage"; exit ;; -- ) # Stop option processing shift; break ;; - ) # Use stdin as input. break ;; -* ) echo "$me: invalid option $1$help" exit 1 ;; *local*) # First pass through any local machine types. echo $1 exit ;; * ) break ;; esac done case $# in 0) echo "$me: missing argument$help" >&2 exit 1;; 1) ;; *) echo "$me: too many arguments$help" >&2 exit 1;; esac # Separate what the user gave into CPU-COMPANY and OS or KERNEL-OS (if any). # Here we must recognize all the valid KERNEL-OS combinations. maybe_os=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\2/'` case $maybe_os in nto-qnx* | linux-gnu* | linux-android* | linux-dietlibc | linux-newlib* | \ linux-musl* | linux-uclibc* | uclinux-uclibc* | uclinux-gnu* | kfreebsd*-gnu* | \ knetbsd*-gnu* | netbsd*-gnu* | netbsd*-eabi* | \ kopensolaris*-gnu* | cloudabi*-eabi* | \ storm-chaos* | os2-emx* | rtmk-nova*) os=-$maybe_os basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'` ;; android-linux) os=-linux-android basic_machine=`echo $1 | sed 's/^\(.*\)-\([^-]*-[^-]*\)$/\1/'`-unknown ;; *) basic_machine=`echo $1 | sed 's/-[^-]*$//'` if [ $basic_machine != $1 ] then os=`echo $1 | sed 's/.*-/-/'` else os=; fi ;; esac ### Let's recognize common machines as not being operating systems so ### that things like config.sub decstation-3100 work. We also ### recognize some manufacturers as not being operating systems, so we ### can provide default operating systems below. case $os in -sun*os*) # Prevent following clause from handling this invalid input. ;; -dec* | -mips* | -sequent* | -encore* | -pc532* | -sgi* | -sony* | \ -att* | -7300* | -3300* | -delta* | -motorola* | -sun[234]* | \ -unicom* | -ibm* | -next | -hp | -isi* | -apollo | -altos* | \ -convergent* | -ncr* | -news | -32* | -3600* | -3100* | -hitachi* |\ -c[123]* | -convex* | -sun | -crds | -omron* | -dg | -ultra | -tti* | \ -harris | -dolphin | -highlevel | -gould | -cbm | -ns | -masscomp | \ -apple | -axis | -knuth | -cray | -microblaze*) os= basic_machine=$1 ;; -bluegene*) os=-cnk ;; -sim | -cisco | -oki | -wec | -winbond) os= basic_machine=$1 ;; -scout) ;; -wrs) os=-vxworks basic_machine=$1 ;; -chorusos*) os=-chorusos basic_machine=$1 ;; -chorusrdb) os=-chorusrdb basic_machine=$1 ;; -hiux*) os=-hiuxwe2 ;; -sco6) os=-sco5v6 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5) os=-sco3.2v5 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco4) os=-sco3.2v4 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2.[4-9]*) os=`echo $os | sed -e 's/sco3.2./sco3.2v/'` basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco3.2v[4-9]*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco5v6*) # Don't forget version if it is 3.2v4 or newer. basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -sco*) os=-sco3.2v2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -udk*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -isc) os=-isc2.2 basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -clix*) basic_machine=clipper-intergraph ;; -isc*) basic_machine=`echo $1 | sed -e 's/86-.*/86-pc/'` ;; -lynx*178) os=-lynxos178 ;; -lynx*5) os=-lynxos5 ;; -lynx*) os=-lynxos ;; -ptx*) basic_machine=`echo $1 | sed -e 's/86-.*/86-sequent/'` ;; -windowsnt*) os=`echo $os | sed -e 's/windowsnt/winnt/'` ;; -psos*) os=-psos ;; -mint | -mint[0-9]*) basic_machine=m68k-atari os=-mint ;; esac # Decode aliases for certain CPU-COMPANY combinations. case $basic_machine in # Recognize the basic CPU types without company name. # Some are omitted here because they have special meanings below. 1750a | 580 \ | a29k \ | aarch64 | aarch64_be \ | alpha | alphaev[4-8] | alphaev56 | alphaev6[78] | alphapca5[67] \ | alpha64 | alpha64ev[4-8] | alpha64ev56 | alpha64ev6[78] | alpha64pca5[67] \ | am33_2.0 \ | arc | arceb \ | arm | arm[bl]e | arme[lb] | armv[2-8] | armv[3-8][lb] | armv7[arm] \ | avr | avr32 \ | ba \ | be32 | be64 \ | bfin \ | c4x | c8051 | clipper \ | d10v | d30v | dlx | dsp16xx \ | e2k | epiphany \ | fido | fr30 | frv | ft32 \ | h8300 | h8500 | hppa | hppa1.[01] | hppa2.0 | hppa2.0[nw] | hppa64 \ | hexagon \ | i370 | i860 | i960 | ia64 \ | ip2k | iq2000 \ | k1om \ | le32 | le64 \ | lm32 \ | m32c | m32r | m32rle | m68000 | m68k | m88k \ | maxq | mb | microblaze | microblazeel | mcore | mep | metag \ | mips | mipsbe | mipseb | mipsel | mipsle \ | mips16 \ | mips64 | mips64el \ | mips64octeon | mips64octeonel \ | mips64orion | mips64orionel \ | mips64r5900 | mips64r5900el \ | mips64vr | mips64vrel \ | mips64vr4100 | mips64vr4100el \ | mips64vr4300 | mips64vr4300el \ | mips64vr5000 | mips64vr5000el \ | mips64vr5900 | mips64vr5900el \ | mipsisa32 | mipsisa32el \ | mipsisa32r2 | mipsisa32r2el \ | mipsisa32r6 | mipsisa32r6el \ | mipsisa64 | mipsisa64el \ | mipsisa64r2 | mipsisa64r2el \ | mipsisa64r6 | mipsisa64r6el \ | mipsisa64sb1 | mipsisa64sb1el \ | mipsisa64sr71k | mipsisa64sr71kel \ | mipsr5900 | mipsr5900el \ | mipstx39 | mipstx39el \ | mn10200 | mn10300 \ | moxie \ | mt \ | msp430 \ | nds32 | nds32le | nds32be \ | nios | nios2 | nios2eb | nios2el \ | ns16k | ns32k \ | open8 | or1k | or1knd | or32 \ | pdp10 | pdp11 | pj | pjl \ | powerpc | powerpc64 | powerpc64le | powerpcle \ | pru \ | pyramid \ | riscv32 | riscv64 \ | rl78 | rx \ | score \ | sh | sh[1234] | sh[24]a | sh[24]aeb | sh[23]e | sh[234]eb | sheb | shbe | shle | sh[1234]le | sh3ele \ | sh64 | sh64le \ | sparc | sparc64 | sparc64b | sparc64v | sparc86x | sparclet | sparclite \ | sparcv8 | sparcv9 | sparcv9b | sparcv9v \ | spu \ | tahoe | tic4x | tic54x | tic55x | tic6x | tic80 | tron \ | ubicom32 \ | v850 | v850e | v850e1 | v850e2 | v850es | v850e2v3 \ | visium \ | we32k \ | x86 | xc16x | xstormy16 | xtensa \ | z8k | z80) basic_machine=$basic_machine-unknown ;; c54x) basic_machine=tic54x-unknown ;; c55x) basic_machine=tic55x-unknown ;; c6x) basic_machine=tic6x-unknown ;; leon|leon[3-9]) basic_machine=sparc-$basic_machine ;; m6811 | m68hc11 | m6812 | m68hc12 | m68hcs12x | nvptx | picochip) basic_machine=$basic_machine-unknown os=-none ;; m88110 | m680[12346]0 | m683?2 | m68360 | m5200 | v70 | w65 | z8k) ;; ms1) basic_machine=mt-unknown ;; strongarm | thumb | xscale) basic_machine=arm-unknown ;; xgate) basic_machine=$basic_machine-unknown os=-none ;; xscaleeb) basic_machine=armeb-unknown ;; xscaleel) basic_machine=armel-unknown ;; # We use `pc' rather than `unknown' # because (1) that's what they normally are, and # (2) the word "unknown" tends to confuse beginning users. i*86 | x86_64) basic_machine=$basic_machine-pc ;; # Object if more than one company name word. *-*-*) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; # Recognize the basic CPU types with company name. 580-* \ | a29k-* \ | aarch64-* | aarch64_be-* \ | alpha-* | alphaev[4-8]-* | alphaev56-* | alphaev6[78]-* \ | alpha64-* | alpha64ev[4-8]-* | alpha64ev56-* | alpha64ev6[78]-* \ | alphapca5[67]-* | alpha64pca5[67]-* | arc-* | arceb-* \ | arm-* | armbe-* | armle-* | armeb-* | armv*-* \ | avr-* | avr32-* \ | ba-* \ | be32-* | be64-* \ | bfin-* | bs2000-* \ | c[123]* | c30-* | [cjt]90-* | c4x-* \ | c8051-* | clipper-* | craynv-* | cydra-* \ | d10v-* | d30v-* | dlx-* \ | e2k-* | elxsi-* \ | f30[01]-* | f700-* | fido-* | fr30-* | frv-* | fx80-* \ | h8300-* | h8500-* \ | hppa-* | hppa1.[01]-* | hppa2.0-* | hppa2.0[nw]-* | hppa64-* \ | hexagon-* \ | i*86-* | i860-* | i960-* | ia64-* \ | ip2k-* | iq2000-* \ | k1om-* \ | le32-* | le64-* \ | lm32-* \ | m32c-* | m32r-* | m32rle-* \ | m68000-* | m680[012346]0-* | m68360-* | m683?2-* | m68k-* \ | m88110-* | m88k-* | maxq-* | mcore-* | metag-* \ | microblaze-* | microblazeel-* \ | mips-* | mipsbe-* | mipseb-* | mipsel-* | mipsle-* \ | mips16-* \ | mips64-* | mips64el-* \ | mips64octeon-* | mips64octeonel-* \ | mips64orion-* | mips64orionel-* \ | mips64r5900-* | mips64r5900el-* \ | mips64vr-* | mips64vrel-* \ | mips64vr4100-* | mips64vr4100el-* \ | mips64vr4300-* | mips64vr4300el-* \ | mips64vr5000-* | mips64vr5000el-* \ | mips64vr5900-* | mips64vr5900el-* \ | mipsisa32-* | mipsisa32el-* \ | mipsisa32r2-* | mipsisa32r2el-* \ | mipsisa32r6-* | mipsisa32r6el-* \ | mipsisa64-* | mipsisa64el-* \ | mipsisa64r2-* | mipsisa64r2el-* \ | mipsisa64r6-* | mipsisa64r6el-* \ | mipsisa64sb1-* | mipsisa64sb1el-* \ | mipsisa64sr71k-* | mipsisa64sr71kel-* \ | mipsr5900-* | mipsr5900el-* \ | mipstx39-* | mipstx39el-* \ | mmix-* \ | mt-* \ | msp430-* \ | nds32-* | nds32le-* | nds32be-* \ | nios-* | nios2-* | nios2eb-* | nios2el-* \ | none-* | np1-* | ns16k-* | ns32k-* \ | open8-* \ | or1k*-* \ | orion-* \ | pdp10-* | pdp11-* | pj-* | pjl-* | pn-* | power-* \ | powerpc-* | powerpc64-* | powerpc64le-* | powerpcle-* \ | pru-* \ | pyramid-* \ | riscv32-* | riscv64-* \ | rl78-* | romp-* | rs6000-* | rx-* \ | sh-* | sh[1234]-* | sh[24]a-* | sh[24]aeb-* | sh[23]e-* | sh[34]eb-* | sheb-* | shbe-* \ | shle-* | sh[1234]le-* | sh3ele-* | sh64-* | sh64le-* \ | sparc-* | sparc64-* | sparc64b-* | sparc64v-* | sparc86x-* | sparclet-* \ | sparclite-* \ | sparcv8-* | sparcv9-* | sparcv9b-* | sparcv9v-* | sv1-* | sx*-* \ | tahoe-* \ | tic30-* | tic4x-* | tic54x-* | tic55x-* | tic6x-* | tic80-* \ | tile*-* \ | tron-* \ | ubicom32-* \ | v850-* | v850e-* | v850e1-* | v850es-* | v850e2-* | v850e2v3-* \ | vax-* \ | visium-* \ | we32k-* \ | x86-* | x86_64-* | xc16x-* | xps100-* \ | xstormy16-* | xtensa*-* \ | ymp-* \ | z8k-* | z80-*) ;; # Recognize the basic CPU types without company name, with glob match. xtensa*) basic_machine=$basic_machine-unknown ;; # Recognize the various machine names and aliases which stand # for a CPU type and a company and sometimes even an OS. 386bsd) basic_machine=i386-unknown os=-bsd ;; 3b1 | 7300 | 7300-att | att-7300 | pc7300 | safari | unixpc) basic_machine=m68000-att ;; 3b*) basic_machine=we32k-att ;; a29khif) basic_machine=a29k-amd os=-udi ;; abacus) basic_machine=abacus-unknown ;; adobe68k) basic_machine=m68010-adobe os=-scout ;; alliant | fx80) basic_machine=fx80-alliant ;; altos | altos3068) basic_machine=m68k-altos ;; am29k) basic_machine=a29k-none os=-bsd ;; amd64) basic_machine=x86_64-pc ;; amd64-*) basic_machine=x86_64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; amdahl) basic_machine=580-amdahl os=-sysv ;; amiga | amiga-*) basic_machine=m68k-unknown ;; amigaos | amigados) basic_machine=m68k-unknown os=-amigaos ;; amigaunix | amix) basic_machine=m68k-unknown os=-sysv4 ;; apollo68) basic_machine=m68k-apollo os=-sysv ;; apollo68bsd) basic_machine=m68k-apollo os=-bsd ;; aros) basic_machine=i386-pc os=-aros ;; asmjs) basic_machine=asmjs-unknown ;; aux) basic_machine=m68k-apple os=-aux ;; balance) basic_machine=ns32k-sequent os=-dynix ;; blackfin) basic_machine=bfin-unknown os=-linux ;; blackfin-*) basic_machine=bfin-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; bluegene*) basic_machine=powerpc-ibm os=-cnk ;; c54x-*) basic_machine=tic54x-`echo $basic_machine | sed 's/^[^-]*-//'` ;; c55x-*) basic_machine=tic55x-`echo $basic_machine | sed 's/^[^-]*-//'` ;; c6x-*) basic_machine=tic6x-`echo $basic_machine | sed 's/^[^-]*-//'` ;; c90) basic_machine=c90-cray os=-unicos ;; cegcc) basic_machine=arm-unknown os=-cegcc ;; convex-c1) basic_machine=c1-convex os=-bsd ;; convex-c2) basic_machine=c2-convex os=-bsd ;; convex-c32) basic_machine=c32-convex os=-bsd ;; convex-c34) basic_machine=c34-convex os=-bsd ;; convex-c38) basic_machine=c38-convex os=-bsd ;; cray | j90) basic_machine=j90-cray os=-unicos ;; craynv) basic_machine=craynv-cray os=-unicosmp ;; cr16 | cr16-*) basic_machine=cr16-unknown os=-elf ;; crds | unos) basic_machine=m68k-crds ;; crisv32 | crisv32-* | etraxfs*) basic_machine=crisv32-axis ;; cris | cris-* | etrax*) basic_machine=cris-axis ;; crx) basic_machine=crx-unknown os=-elf ;; da30 | da30-*) basic_machine=m68k-da30 ;; decstation | decstation-3100 | pmax | pmax-* | pmin | dec3100 | decstatn) basic_machine=mips-dec ;; decsystem10* | dec10*) basic_machine=pdp10-dec os=-tops10 ;; decsystem20* | dec20*) basic_machine=pdp10-dec os=-tops20 ;; delta | 3300 | motorola-3300 | motorola-delta \ | 3300-motorola | delta-motorola) basic_machine=m68k-motorola ;; delta88) basic_machine=m88k-motorola os=-sysv3 ;; dicos) basic_machine=i686-pc os=-dicos ;; djgpp) basic_machine=i586-pc os=-msdosdjgpp ;; dpx20 | dpx20-*) basic_machine=rs6000-bull os=-bosx ;; dpx2* | dpx2*-bull) basic_machine=m68k-bull os=-sysv3 ;; e500v[12]) basic_machine=powerpc-unknown os=$os"spe" ;; e500v[12]-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` os=$os"spe" ;; ebmon29k) basic_machine=a29k-amd os=-ebmon ;; elxsi) basic_machine=elxsi-elxsi os=-bsd ;; encore | umax | mmax) basic_machine=ns32k-encore ;; es1800 | OSE68k | ose68k | ose | OSE) basic_machine=m68k-ericsson os=-ose ;; fx2800) basic_machine=i860-alliant ;; genix) basic_machine=ns32k-ns ;; gmicro) basic_machine=tron-gmicro os=-sysv ;; go32) basic_machine=i386-pc os=-go32 ;; h3050r* | hiux*) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; h8300hms) basic_machine=h8300-hitachi os=-hms ;; h8300xray) basic_machine=h8300-hitachi os=-xray ;; h8500hms) basic_machine=h8500-hitachi os=-hms ;; harris) basic_machine=m88k-harris os=-sysv3 ;; hp300-*) basic_machine=m68k-hp ;; hp300bsd) basic_machine=m68k-hp os=-bsd ;; hp300hpux) basic_machine=m68k-hp os=-hpux ;; hp3k9[0-9][0-9] | hp9[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k2[0-9][0-9] | hp9k31[0-9]) basic_machine=m68000-hp ;; hp9k3[2-9][0-9]) basic_machine=m68k-hp ;; hp9k6[0-9][0-9] | hp6[0-9][0-9]) basic_machine=hppa1.0-hp ;; hp9k7[0-79][0-9] | hp7[0-79][0-9]) basic_machine=hppa1.1-hp ;; hp9k78[0-9] | hp78[0-9]) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[67]1 | hp8[67]1 | hp9k80[24] | hp80[24] | hp9k8[78]9 | hp8[78]9 | hp9k893 | hp893) # FIXME: really hppa2.0-hp basic_machine=hppa1.1-hp ;; hp9k8[0-9][13679] | hp8[0-9][13679]) basic_machine=hppa1.1-hp ;; hp9k8[0-9][0-9] | hp8[0-9][0-9]) basic_machine=hppa1.0-hp ;; hppa-next) os=-nextstep3 ;; hppaosf) basic_machine=hppa1.1-hp os=-osf ;; hppro) basic_machine=hppa1.1-hp os=-proelf ;; i370-ibm* | ibm*) basic_machine=i370-ibm ;; i*86v32) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv32 ;; i*86v4*) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv4 ;; i*86v) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-sysv ;; i*86sol2) basic_machine=`echo $1 | sed -e 's/86.*/86-pc/'` os=-solaris2 ;; i386mach) basic_machine=i386-mach os=-mach ;; i386-vsta | vsta) basic_machine=i386-unknown os=-vsta ;; iris | iris4d) basic_machine=mips-sgi case $os in -irix*) ;; *) os=-irix4 ;; esac ;; isi68 | isi) basic_machine=m68k-isi os=-sysv ;; leon-*|leon[3-9]-*) basic_machine=sparc-`echo $basic_machine | sed 's/-.*//'` ;; m68knommu) basic_machine=m68k-unknown os=-linux ;; m68knommu-*) basic_machine=m68k-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; m88k-omron*) basic_machine=m88k-omron ;; magnum | m3230) basic_machine=mips-mips os=-sysv ;; merlin) basic_machine=ns32k-utek os=-sysv ;; microblaze*) basic_machine=microblaze-xilinx ;; mingw64) basic_machine=x86_64-pc os=-mingw64 ;; mingw32) basic_machine=i686-pc os=-mingw32 ;; mingw32ce) basic_machine=arm-unknown os=-mingw32ce ;; miniframe) basic_machine=m68000-convergent ;; *mint | -mint[0-9]* | *MiNT | *MiNT[0-9]*) basic_machine=m68k-atari os=-mint ;; mips3*-*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'` ;; mips3*) basic_machine=`echo $basic_machine | sed -e 's/mips3/mips64/'`-unknown ;; monitor) basic_machine=m68k-rom68k os=-coff ;; morphos) basic_machine=powerpc-unknown os=-morphos ;; moxiebox) basic_machine=moxie-unknown os=-moxiebox ;; msdos) basic_machine=i386-pc os=-msdos ;; ms1-*) basic_machine=`echo $basic_machine | sed -e 's/ms1-/mt-/'` ;; msys) basic_machine=i686-pc os=-msys ;; mvs) basic_machine=i370-ibm os=-mvs ;; nacl) basic_machine=le32-unknown os=-nacl ;; ncr3000) basic_machine=i486-ncr os=-sysv4 ;; netbsd386) basic_machine=i386-unknown os=-netbsd ;; netwinder) basic_machine=armv4l-rebel os=-linux ;; news | news700 | news800 | news900) basic_machine=m68k-sony os=-newsos ;; news1000) basic_machine=m68030-sony os=-newsos ;; news-3600 | risc-news) basic_machine=mips-sony os=-newsos ;; necv70) basic_machine=v70-nec os=-sysv ;; next | m*-next ) basic_machine=m68k-next case $os in -nextstep* ) ;; -ns2*) os=-nextstep2 ;; *) os=-nextstep3 ;; esac ;; nh3000) basic_machine=m68k-harris os=-cxux ;; nh[45]000) basic_machine=m88k-harris os=-cxux ;; nindy960) basic_machine=i960-intel os=-nindy ;; mon960) basic_machine=i960-intel os=-mon960 ;; nonstopux) basic_machine=mips-compaq os=-nonstopux ;; np1) basic_machine=np1-gould ;; neo-tandem) basic_machine=neo-tandem ;; nse-tandem) basic_machine=nse-tandem ;; nsr-tandem) basic_machine=nsr-tandem ;; op50n-* | op60c-*) basic_machine=hppa1.1-oki os=-proelf ;; openrisc | openrisc-*) basic_machine=or32-unknown ;; os400) basic_machine=powerpc-ibm os=-os400 ;; OSE68000 | ose68000) basic_machine=m68000-ericsson os=-ose ;; os68k) basic_machine=m68k-none os=-os68k ;; pa-hitachi) basic_machine=hppa1.1-hitachi os=-hiuxwe2 ;; paragon) basic_machine=i860-intel os=-osf ;; parisc) basic_machine=hppa-unknown os=-linux ;; parisc-*) basic_machine=hppa-`echo $basic_machine | sed 's/^[^-]*-//'` os=-linux ;; pbd) basic_machine=sparc-tti ;; pbb) basic_machine=m68k-tti ;; pc532 | pc532-*) basic_machine=ns32k-pc532 ;; pc98) basic_machine=i386-pc ;; pc98-*) basic_machine=i386-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium | p5 | k5 | k6 | nexgen | viac3) basic_machine=i586-pc ;; pentiumpro | p6 | 6x86 | athlon | athlon_*) basic_machine=i686-pc ;; pentiumii | pentium2 | pentiumiii | pentium3) basic_machine=i686-pc ;; pentium4) basic_machine=i786-pc ;; pentium-* | p5-* | k5-* | k6-* | nexgen-* | viac3-*) basic_machine=i586-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumpro-* | p6-* | 6x86-* | athlon-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentiumii-* | pentium2-* | pentiumiii-* | pentium3-*) basic_machine=i686-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pentium4-*) basic_machine=i786-`echo $basic_machine | sed 's/^[^-]*-//'` ;; pn) basic_machine=pn-gould ;; power) basic_machine=power-ibm ;; ppc | ppcbe) basic_machine=powerpc-unknown ;; ppc-* | ppcbe-*) basic_machine=powerpc-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppcle | powerpclittle) basic_machine=powerpcle-unknown ;; ppcle-* | powerpclittle-*) basic_machine=powerpcle-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64) basic_machine=powerpc64-unknown ;; ppc64-*) basic_machine=powerpc64-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ppc64le | powerpc64little) basic_machine=powerpc64le-unknown ;; ppc64le-* | powerpc64little-*) basic_machine=powerpc64le-`echo $basic_machine | sed 's/^[^-]*-//'` ;; ps2) basic_machine=i386-ibm ;; pw32) basic_machine=i586-unknown os=-pw32 ;; rdos | rdos64) basic_machine=x86_64-pc os=-rdos ;; rdos32) basic_machine=i386-pc os=-rdos ;; rom68k) basic_machine=m68k-rom68k os=-coff ;; rm[46]00) basic_machine=mips-siemens ;; rtpc | rtpc-*) basic_machine=romp-ibm ;; s390 | s390-*) basic_machine=s390-ibm ;; s390x | s390x-*) basic_machine=s390x-ibm ;; sa29200) basic_machine=a29k-amd os=-udi ;; sb1) basic_machine=mipsisa64sb1-unknown ;; sb1el) basic_machine=mipsisa64sb1el-unknown ;; sde) basic_machine=mipsisa32-sde os=-elf ;; sei) basic_machine=mips-sei os=-seiux ;; sequent) basic_machine=i386-sequent ;; sh) basic_machine=sh-hitachi os=-hms ;; sh5el) basic_machine=sh5le-unknown ;; sh64) basic_machine=sh64-unknown ;; sparclite-wrs | simso-wrs) basic_machine=sparclite-wrs os=-vxworks ;; sps7) basic_machine=m68k-bull os=-sysv2 ;; spur) basic_machine=spur-unknown ;; st2000) basic_machine=m68k-tandem ;; stratus) basic_machine=i860-stratus os=-sysv4 ;; strongarm-* | thumb-*) basic_machine=arm-`echo $basic_machine | sed 's/^[^-]*-//'` ;; sun2) basic_machine=m68000-sun ;; sun2os3) basic_machine=m68000-sun os=-sunos3 ;; sun2os4) basic_machine=m68000-sun os=-sunos4 ;; sun3os3) basic_machine=m68k-sun os=-sunos3 ;; sun3os4) basic_machine=m68k-sun os=-sunos4 ;; sun4os3) basic_machine=sparc-sun os=-sunos3 ;; sun4os4) basic_machine=sparc-sun os=-sunos4 ;; sun4sol2) basic_machine=sparc-sun os=-solaris2 ;; sun3 | sun3-*) basic_machine=m68k-sun ;; sun4) basic_machine=sparc-sun ;; sun386 | sun386i | roadrunner) basic_machine=i386-sun ;; sv1) basic_machine=sv1-cray os=-unicos ;; symmetry) basic_machine=i386-sequent os=-dynix ;; t3e) basic_machine=alphaev5-cray os=-unicos ;; t90) basic_machine=t90-cray os=-unicos ;; tile*) basic_machine=$basic_machine-unknown os=-linux-gnu ;; tx39) basic_machine=mipstx39-unknown ;; tx39el) basic_machine=mipstx39el-unknown ;; toad1) basic_machine=pdp10-xkl os=-tops20 ;; tower | tower-32) basic_machine=m68k-ncr ;; tpf) basic_machine=s390x-ibm os=-tpf ;; udi29k) basic_machine=a29k-amd os=-udi ;; ultra3) basic_machine=a29k-nyu os=-sym1 ;; v810 | necv810) basic_machine=v810-nec os=-none ;; vaxv) basic_machine=vax-dec os=-sysv ;; vms) basic_machine=vax-dec os=-vms ;; vpp*|vx|vx-*) basic_machine=f301-fujitsu ;; vxworks960) basic_machine=i960-wrs os=-vxworks ;; vxworks68) basic_machine=m68k-wrs os=-vxworks ;; vxworks29k) basic_machine=a29k-wrs os=-vxworks ;; w65*) basic_machine=w65-wdc os=-none ;; w89k-*) basic_machine=hppa1.1-winbond os=-proelf ;; xbox) basic_machine=i686-pc os=-mingw32 ;; xps | xps100) basic_machine=xps100-honeywell ;; xscale-* | xscalee[bl]-*) basic_machine=`echo $basic_machine | sed 's/^xscale/arm/'` ;; ymp) basic_machine=ymp-cray os=-unicos ;; z8k-*-coff) basic_machine=z8k-unknown os=-sim ;; z80-*-coff) basic_machine=z80-unknown os=-sim ;; none) basic_machine=none-none os=-none ;; # Here we handle the default manufacturer of certain CPU types. It is in # some cases the only manufacturer, in others, it is the most popular. w89k) basic_machine=hppa1.1-winbond ;; op50n) basic_machine=hppa1.1-oki ;; op60c) basic_machine=hppa1.1-oki ;; romp) basic_machine=romp-ibm ;; mmix) basic_machine=mmix-knuth ;; rs6000) basic_machine=rs6000-ibm ;; vax) basic_machine=vax-dec ;; pdp10) # there are many clones, so DEC is not a safe bet basic_machine=pdp10-unknown ;; pdp11) basic_machine=pdp11-dec ;; we32k) basic_machine=we32k-att ;; sh[1234] | sh[24]a | sh[24]aeb | sh[34]eb | sh[1234]le | sh[23]ele) basic_machine=sh-unknown ;; sparc | sparcv8 | sparcv9 | sparcv9b | sparcv9v) basic_machine=sparc-sun ;; cydra) basic_machine=cydra-cydrome ;; orion) basic_machine=orion-highlevel ;; orion105) basic_machine=clipper-highlevel ;; mac | mpw | mac-mpw) basic_machine=m68k-apple ;; pmac | pmac-mpw) basic_machine=powerpc-apple ;; *-unknown) # Make sure to match an already-canonicalized machine name. ;; *) echo Invalid configuration \`$1\': machine \`$basic_machine\' not recognized 1>&2 exit 1 ;; esac # Here we canonicalize certain aliases for manufacturers. case $basic_machine in *-digital*) basic_machine=`echo $basic_machine | sed 's/digital.*/dec/'` ;; *-commodore*) basic_machine=`echo $basic_machine | sed 's/commodore.*/cbm/'` ;; *) ;; esac # Decode manufacturer-specific aliases for certain operating systems. if [ x"$os" != x"" ] then case $os in # First match some system type aliases # that might get confused with valid system types. # -solaris* is a basic system type, with this one exception. -auroraux) os=-auroraux ;; -solaris1 | -solaris1.*) os=`echo $os | sed -e 's|solaris1|sunos4|'` ;; -solaris) os=-solaris2 ;; -svr4*) os=-sysv4 ;; -unixware*) os=-sysv4.2uw ;; -gnu/linux*) os=`echo $os | sed -e 's|gnu/linux|linux-gnu|'` ;; # First accept the basic system types. # The portable systems comes first. # Each alternative MUST END IN A *, to match a version number. # -sysv* is not here because it comes later, after sysvr4. -gnu* | -bsd* | -mach* | -minix* | -genix* | -ultrix* | -irix* \ | -*vms* | -sco* | -esix* | -isc* | -aix* | -cnk* | -sunos | -sunos[34]*\ | -hpux* | -unos* | -osf* | -luna* | -dgux* | -auroraux* | -solaris* \ | -sym* | -kopensolaris* | -plan9* \ | -amigaos* | -amigados* | -msdos* | -newsos* | -unicos* | -aof* \ | -aos* | -aros* | -cloudabi* | -sortix* \ | -nindy* | -vxsim* | -vxworks* | -ebmon* | -hms* | -mvs* \ | -clix* | -riscos* | -uniplus* | -iris* | -rtu* | -xenix* \ | -hiux* | -386bsd* | -knetbsd* | -mirbsd* | -netbsd* \ | -bitrig* | -openbsd* | -solidbsd* | -libertybsd* \ | -ekkobsd* | -kfreebsd* | -freebsd* | -riscix* | -lynxos* \ | -bosx* | -nextstep* | -cxux* | -aout* | -elf* | -oabi* \ | -ptx* | -coff* | -ecoff* | -winnt* | -domain* | -vsta* \ | -udi* | -eabi* | -lites* | -ieee* | -go32* | -aux* \ | -chorusos* | -chorusrdb* | -cegcc* \ | -cygwin* | -msys* | -pe* | -psos* | -moss* | -proelf* | -rtems* \ | -midipix* | -mingw32* | -mingw64* | -linux-gnu* | -linux-android* \ | -linux-newlib* | -linux-musl* | -linux-uclibc* \ | -uxpv* | -beos* | -mpeix* | -udk* | -moxiebox* \ | -interix* | -uwin* | -mks* | -rhapsody* | -darwin* | -opened* \ | -openstep* | -oskit* | -conix* | -pw32* | -nonstopux* \ | -storm-chaos* | -tops10* | -tenex* | -tops20* | -its* \ | -os2* | -vos* | -palmos* | -uclinux* | -nucleus* \ | -morphos* | -superux* | -rtmk* | -rtmk-nova* | -windiss* \ | -powermax* | -dnix* | -nx6 | -nx7 | -sei* | -dragonfly* \ | -skyos* | -haiku* | -rdos* | -toppers* | -drops* | -es* \ | -onefs* | -tirtos* | -phoenix* | -fuchsia*) # Remember, each alternative MUST END IN *, to match a version number. ;; -qnx*) case $basic_machine in x86-* | i*86-*) ;; *) os=-nto$os ;; esac ;; -nto-qnx*) ;; -nto*) os=`echo $os | sed -e 's|nto|nto-qnx|'` ;; -sim | -es1800* | -hms* | -xray | -os68k* | -none* | -v88r* \ | -windows* | -osx | -abug | -netware* | -os9* | -beos* | -haiku* \ | -macos* | -mpw* | -magic* | -mmixware* | -mon960* | -lnews*) ;; -mac*) os=`echo $os | sed -e 's|mac|macos|'` ;; -linux-dietlibc) os=-linux-dietlibc ;; -linux*) os=`echo $os | sed -e 's|linux|linux-gnu|'` ;; -sunos5*) os=`echo $os | sed -e 's|sunos5|solaris2|'` ;; -sunos6*) os=`echo $os | sed -e 's|sunos6|solaris3|'` ;; -opened*) os=-openedition ;; -os400*) os=-os400 ;; -wince*) os=-wince ;; -osfrose*) os=-osfrose ;; -osf*) os=-osf ;; -utek*) os=-bsd ;; -dynix*) os=-bsd ;; -acis*) os=-aos ;; -atheos*) os=-atheos ;; -syllable*) os=-syllable ;; -386bsd) os=-bsd ;; -ctix* | -uts*) os=-sysv ;; -nova*) os=-rtmk-nova ;; -ns2 ) os=-nextstep2 ;; -nsk*) os=-nsk ;; # Preserve the version number of sinix5. -sinix5.*) os=`echo $os | sed -e 's|sinix|sysv|'` ;; -sinix*) os=-sysv4 ;; -tpf*) os=-tpf ;; -triton*) os=-sysv3 ;; -oss*) os=-sysv3 ;; -svr4) os=-sysv4 ;; -svr3) os=-sysv3 ;; -sysvr4) os=-sysv4 ;; # This must come after -sysvr4. -sysv*) ;; -ose*) os=-ose ;; -es1800*) os=-ose ;; -xenix) os=-xenix ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) os=-mint ;; -aros*) os=-aros ;; -zvmoe) os=-zvmoe ;; -dicos*) os=-dicos ;; -nacl*) ;; -ios) ;; -none) ;; *) # Get rid of the `-' at the beginning of $os. os=`echo $os | sed 's/[^-]*-//'` echo Invalid configuration \`$1\': system \`$os\' not recognized 1>&2 exit 1 ;; esac else # Here we handle the default operating systems that come with various machines. # The value should be what the vendor currently ships out the door with their # machine or put another way, the most popular os provided with the machine. # Note that if you're going to try to match "-MANUFACTURER" here (say, # "-sun"), then you have to tell the case statement up towards the top # that MANUFACTURER isn't an operating system. Otherwise, code above # will signal an error saying that MANUFACTURER isn't an operating # system, and we'll never get to this point. case $basic_machine in score-*) os=-elf ;; spu-*) os=-elf ;; *-acorn) os=-riscix1.2 ;; arm*-rebel) os=-linux ;; arm*-semi) os=-aout ;; c4x-* | tic4x-*) os=-coff ;; c8051-*) os=-elf ;; hexagon-*) os=-elf ;; tic54x-*) os=-coff ;; tic55x-*) os=-coff ;; tic6x-*) os=-coff ;; # This must come before the *-dec entry. pdp10-*) os=-tops20 ;; pdp11-*) os=-none ;; *-dec | vax-*) os=-ultrix4.2 ;; m68*-apollo) os=-domain ;; i386-sun) os=-sunos4.0.2 ;; m68000-sun) os=-sunos3 ;; m68*-cisco) os=-aout ;; mep-*) os=-elf ;; mips*-cisco) os=-elf ;; mips*-*) os=-elf ;; or32-*) os=-coff ;; *-tti) # must be before sparc entry or we get the wrong os. os=-sysv3 ;; sparc-* | *-sun) os=-sunos4.1.1 ;; *-be) os=-beos ;; *-haiku) os=-haiku ;; *-ibm) os=-aix ;; *-knuth) os=-mmixware ;; *-wec) os=-proelf ;; *-winbond) os=-proelf ;; *-oki) os=-proelf ;; *-hp) os=-hpux ;; *-hitachi) os=-hiux ;; i860-* | *-att | *-ncr | *-altos | *-motorola | *-convergent) os=-sysv ;; *-cbm) os=-amigaos ;; *-dg) os=-dgux ;; *-dolphin) os=-sysv3 ;; m68k-ccur) os=-rtu ;; m88k-omron*) os=-luna ;; *-next ) os=-nextstep ;; *-sequent) os=-ptx ;; *-crds) os=-unos ;; *-ns) os=-genix ;; i370-*) os=-mvs ;; *-next) os=-nextstep3 ;; *-gould) os=-sysv ;; *-highlevel) os=-bsd ;; *-encore) os=-bsd ;; *-sgi) os=-irix ;; *-siemens) os=-sysv4 ;; *-masscomp) os=-rtu ;; f30[01]-fujitsu | f700-fujitsu) os=-uxpv ;; *-rom68k) os=-coff ;; *-*bug) os=-coff ;; *-apple) os=-macos ;; *-atari*) os=-mint ;; *) os=-none ;; esac fi # Here we handle the case where we know the os, and the CPU type, but not the # manufacturer. We pick the logical manufacturer. vendor=unknown case $basic_machine in *-unknown) case $os in -riscix*) vendor=acorn ;; -sunos*) vendor=sun ;; -cnk*|-aix*) vendor=ibm ;; -beos*) vendor=be ;; -hpux*) vendor=hp ;; -mpeix*) vendor=hp ;; -hiux*) vendor=hitachi ;; -unos*) vendor=crds ;; -dgux*) vendor=dg ;; -luna*) vendor=omron ;; -genix*) vendor=ns ;; -mvs* | -opened*) vendor=ibm ;; -os400*) vendor=ibm ;; -ptx*) vendor=sequent ;; -tpf*) vendor=ibm ;; -vxsim* | -vxworks* | -windiss*) vendor=wrs ;; -aux*) vendor=apple ;; -hms*) vendor=hitachi ;; -mpw* | -macos*) vendor=apple ;; -*mint | -mint[0-9]* | -*MiNT | -MiNT[0-9]*) vendor=atari ;; -vos*) vendor=stratus ;; esac basic_machine=`echo $basic_machine | sed "s/unknown/$vendor/"` ;; esac echo $basic_machine$os exit # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "timestamp='" # time-stamp-format: "%:y-%02m-%02d" # time-stamp-end: "'" # End: dar-2.6.8/README0000644000175000017520000000230513606427316010110 00000000000000 D i s k A R c h i v e PRESENTATION ================ dar is a shell command, that makes backup of a directory tree and files. It has been tested under Linux, Windows (95, 2000, NT, XP, 7, 8, 10), OpenSolaris, FreeBSD and NetBSD, and has been reported as working under Mac OS X 10.3 and more recent (where it takes care of file forks but not of the ACL). External GUI are possible and exist thanks to the libdar library and its API Actually dar is a set of six commands over libdar library: dar dar_xform dar_slave dar_manager dar_cp dar_split with in addition: dar_static (statically linked version of dar, not all system support static linking) This library is part of dar and licensed under the GNU General Public License (GPL in the following), like all the source code of the Dar suite programs. This imply that you can link libdar to your programs at the condition that your programs are also covered by the GPL, which says in particular that you must provide source code of your programs. See COPYING file for details. DOCUMENTATION ============== For the documentation, see doc/README INSTALLATION ============== see INSTALL file dar-2.6.8/aclocal.m40000644000175000017520000140412313617552302011071 00000000000000# generated automatically by aclocal 1.15 -*- Autoconf -*- # Copyright (C) 1996-2014 Free Software Foundation, Inc. # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. m4_ifndef([AC_CONFIG_MACRO_DIRS], [m4_defun([_AM_CONFIG_MACRO_DIRS], [])m4_defun([AC_CONFIG_MACRO_DIRS], [_AM_CONFIG_MACRO_DIRS($@)])]) m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl m4_if(m4_defn([AC_AUTOCONF_VERSION]), [2.69],, [m4_warning([this file was generated for autoconf 2.69. You have another version of autoconf. It may work, but is not guaranteed to. If you have problems, you may need to regenerate the build system entirely. To do so, use the procedure documented by the package, typically 'autoreconf'.])]) # gpgme.m4 - autoconf macro to detect GPGME. # Copyright (C) 2002, 2003, 2004, 2014 g10 Code GmbH # # This file is free software; as a special exception the author gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # # This file is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY, to the extent permitted by law; without even the # implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # # Last-changed: 2014-10-02 AC_DEFUN([_AM_PATH_GPGME_CONFIG], [ AC_ARG_WITH(gpgme-prefix, AC_HELP_STRING([--with-gpgme-prefix=PFX], [prefix where GPGME is installed (optional)]), gpgme_config_prefix="$withval", gpgme_config_prefix="") if test x"${GPGME_CONFIG}" = x ; then if test x"${gpgme_config_prefix}" != x ; then GPGME_CONFIG="${gpgme_config_prefix}/bin/gpgme-config" else case "${SYSROOT}" in /*) if test -x "${SYSROOT}/bin/gpgme-config" ; then GPGME_CONFIG="${SYSROOT}/bin/gpgme-config" fi ;; '') ;; *) AC_MSG_WARN([Ignoring \$SYSROOT as it is not an absolute path.]) ;; esac fi fi AC_PATH_PROG(GPGME_CONFIG, gpgme-config, no) if test "$GPGME_CONFIG" != "no" ; then gpgme_version=`$GPGME_CONFIG --version` fi gpgme_version_major=`echo $gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\).*/\1/'` gpgme_version_minor=`echo $gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\).*/\2/'` gpgme_version_micro=`echo $gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\).*/\3/'` ]) AC_DEFUN([_AM_PATH_GPGME_CONFIG_HOST_CHECK], [ gpgme_config_host=`$GPGME_CONFIG --host 2>/dev/null || echo none` if test x"$gpgme_config_host" != xnone ; then if test x"$gpgme_config_host" != x"$host" ; then AC_MSG_WARN([[ *** *** The config script $GPGME_CONFIG was *** built for $gpgme_config_host and thus may not match the *** used host $host. *** You may want to use the configure option --with-gpgme-prefix *** to specify a matching config script or use \$SYSROOT. ***]]) gpg_config_script_warn="$gpg_config_script_warn gpgme" fi fi ]) dnl AM_PATH_GPGME([MINIMUM-VERSION, dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]]) dnl Test for libgpgme and define GPGME_CFLAGS and GPGME_LIBS. dnl dnl If a prefix option is not used, the config script is first dnl searched in $SYSROOT/bin and then along $PATH. If the used dnl config script does not match the host specification the script dnl is added to the gpg_config_script_warn variable. dnl AC_DEFUN([AM_PATH_GPGME], [ AC_REQUIRE([_AM_PATH_GPGME_CONFIG])dnl tmp=ifelse([$1], ,1:0.4.2,$1) if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then req_gpgme_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'` min_gpgme_version=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\2/'` else req_gpgme_api=0 min_gpgme_version="$tmp" fi AC_MSG_CHECKING(for GPGME - version >= $min_gpgme_version) ok=no if test "$GPGME_CONFIG" != "no" ; then req_major=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\1/'` req_minor=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\2/'` req_micro=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\3/'` if test "$gpgme_version_major" -gt "$req_major"; then ok=yes else if test "$gpgme_version_major" -eq "$req_major"; then if test "$gpgme_version_minor" -gt "$req_minor"; then ok=yes else if test "$gpgme_version_minor" -eq "$req_minor"; then if test "$gpgme_version_micro" -ge "$req_micro"; then ok=yes fi fi fi fi fi fi if test $ok = yes; then # If we have a recent GPGME, we should also check that the # API is compatible. if test "$req_gpgme_api" -gt 0 ; then tmp=`$GPGME_CONFIG --api-version 2>/dev/null || echo 0` if test "$tmp" -gt 0 ; then if test "$req_gpgme_api" -ne "$tmp" ; then ok=no fi fi fi fi if test $ok = yes; then GPGME_CFLAGS=`$GPGME_CONFIG --cflags` GPGME_LIBS=`$GPGME_CONFIG --libs` AC_MSG_RESULT(yes) ifelse([$2], , :, [$2]) _AM_PATH_GPGME_CONFIG_HOST_CHECK else GPGME_CFLAGS="" GPGME_LIBS="" AC_MSG_RESULT(no) ifelse([$3], , :, [$3]) fi AC_SUBST(GPGME_CFLAGS) AC_SUBST(GPGME_LIBS) ]) dnl AM_PATH_GPGME_PTHREAD([MINIMUM-VERSION, dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]]) dnl Test for libgpgme and define GPGME_PTHREAD_CFLAGS dnl and GPGME_PTHREAD_LIBS. dnl AC_DEFUN([AM_PATH_GPGME_PTHREAD], [ AC_REQUIRE([_AM_PATH_GPGME_CONFIG])dnl tmp=ifelse([$1], ,1:0.4.2,$1) if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then req_gpgme_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'` min_gpgme_version=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\2/'` else req_gpgme_api=0 min_gpgme_version="$tmp" fi AC_MSG_CHECKING(for GPGME pthread - version >= $min_gpgme_version) ok=no if test "$GPGME_CONFIG" != "no" ; then if `$GPGME_CONFIG --thread=pthread 2> /dev/null` ; then req_major=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\1/'` req_minor=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\2/'` req_micro=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\3/'` if test "$gpgme_version_major" -gt "$req_major"; then ok=yes else if test "$gpgme_version_major" -eq "$req_major"; then if test "$gpgme_version_minor" -gt "$req_minor"; then ok=yes else if test "$gpgme_version_minor" -eq "$req_minor"; then if test "$gpgme_version_micro" -ge "$req_micro"; then ok=yes fi fi fi fi fi fi fi if test $ok = yes; then # If we have a recent GPGME, we should also check that the # API is compatible. if test "$req_gpgme_api" -gt 0 ; then tmp=`$GPGME_CONFIG --api-version 2>/dev/null || echo 0` if test "$tmp" -gt 0 ; then if test "$req_gpgme_api" -ne "$tmp" ; then ok=no fi fi fi fi if test $ok = yes; then GPGME_PTHREAD_CFLAGS=`$GPGME_CONFIG --thread=pthread --cflags` GPGME_PTHREAD_LIBS=`$GPGME_CONFIG --thread=pthread --libs` AC_MSG_RESULT(yes) ifelse([$2], , :, [$2]) _AM_PATH_GPGME_CONFIG_HOST_CHECK else GPGME_PTHREAD_CFLAGS="" GPGME_PTHREAD_LIBS="" AC_MSG_RESULT(no) ifelse([$3], , :, [$3]) fi AC_SUBST(GPGME_PTHREAD_CFLAGS) AC_SUBST(GPGME_PTHREAD_LIBS) ]) dnl AM_PATH_GPGME_GLIB([MINIMUM-VERSION, dnl [ACTION-IF-FOUND [, ACTION-IF-NOT-FOUND ]]]) dnl Test for libgpgme-glib and define GPGME_GLIB_CFLAGS and GPGME_GLIB_LIBS. dnl AC_DEFUN([AM_PATH_GPGME_GLIB], [ AC_REQUIRE([_AM_PATH_GPGME_CONFIG])dnl tmp=ifelse([$1], ,1:0.4.2,$1) if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then req_gpgme_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'` min_gpgme_version=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\2/'` else req_gpgme_api=0 min_gpgme_version="$tmp" fi AC_MSG_CHECKING(for GPGME - version >= $min_gpgme_version) ok=no if test "$GPGME_CONFIG" != "no" ; then req_major=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\1/'` req_minor=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\2/'` req_micro=`echo $min_gpgme_version | \ sed 's/\([[0-9]]*\)\.\([[0-9]]*\)\.\([[0-9]]*\)/\3/'` if test "$gpgme_version_major" -gt "$req_major"; then ok=yes else if test "$gpgme_version_major" -eq "$req_major"; then if test "$gpgme_version_minor" -gt "$req_minor"; then ok=yes else if test "$gpgme_version_minor" -eq "$req_minor"; then if test "$gpgme_version_micro" -ge "$req_micro"; then ok=yes fi fi fi fi fi fi if test $ok = yes; then # If we have a recent GPGME, we should also check that the # API is compatible. if test "$req_gpgme_api" -gt 0 ; then tmp=`$GPGME_CONFIG --api-version 2>/dev/null || echo 0` if test "$tmp" -gt 0 ; then if test "$req_gpgme_api" -ne "$tmp" ; then ok=no fi fi fi fi if test $ok = yes; then GPGME_GLIB_CFLAGS=`$GPGME_CONFIG --glib --cflags` GPGME_GLIB_LIBS=`$GPGME_CONFIG --glib --libs` AC_MSG_RESULT(yes) ifelse([$2], , :, [$2]) _AM_PATH_GPGME_CONFIG_HOST_CHECK else GPGME_GLIB_CFLAGS="" GPGME_GLIB_LIBS="" AC_MSG_RESULT(no) ifelse([$3], , :, [$3]) fi AC_SUBST(GPGME_GLIB_CFLAGS) AC_SUBST(GPGME_GLIB_LIBS) ]) # intlmacosx.m4 serial 5 (gettext-0.18.2) dnl Copyright (C) 2004-2014, 2016 Free Software Foundation, Inc. dnl This file is free software; the Free Software Foundation dnl gives unlimited permission to copy and/or distribute it, dnl with or without modifications, as long as this notice is preserved. dnl dnl This file can be used in projects which are not available under dnl the GNU General Public License or the GNU Library General Public dnl License but which still want to provide support for the GNU gettext dnl functionality. dnl Please note that the actual code of the GNU gettext library is covered dnl by the GNU Library General Public License, and the rest of the GNU dnl gettext package is covered by the GNU General Public License. dnl They are *not* in the public domain. dnl Checks for special options needed on Mac OS X. dnl Defines INTL_MACOSX_LIBS. AC_DEFUN([gt_INTL_MACOSX], [ dnl Check for API introduced in Mac OS X 10.2. AC_CACHE_CHECK([for CFPreferencesCopyAppValue], [gt_cv_func_CFPreferencesCopyAppValue], [gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[#include ]], [[CFPreferencesCopyAppValue(NULL, NULL)]])], [gt_cv_func_CFPreferencesCopyAppValue=yes], [gt_cv_func_CFPreferencesCopyAppValue=no]) LIBS="$gt_save_LIBS"]) if test $gt_cv_func_CFPreferencesCopyAppValue = yes; then AC_DEFINE([HAVE_CFPREFERENCESCOPYAPPVALUE], [1], [Define to 1 if you have the Mac OS X function CFPreferencesCopyAppValue in the CoreFoundation framework.]) fi dnl Check for API introduced in Mac OS X 10.3. AC_CACHE_CHECK([for CFLocaleCopyCurrent], [gt_cv_func_CFLocaleCopyCurrent], [gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" AC_LINK_IFELSE( [AC_LANG_PROGRAM( [[#include ]], [[CFLocaleCopyCurrent();]])], [gt_cv_func_CFLocaleCopyCurrent=yes], [gt_cv_func_CFLocaleCopyCurrent=no]) LIBS="$gt_save_LIBS"]) if test $gt_cv_func_CFLocaleCopyCurrent = yes; then AC_DEFINE([HAVE_CFLOCALECOPYCURRENT], [1], [Define to 1 if you have the Mac OS X function CFLocaleCopyCurrent in the CoreFoundation framework.]) fi INTL_MACOSX_LIBS= if test $gt_cv_func_CFPreferencesCopyAppValue = yes || test $gt_cv_func_CFLocaleCopyCurrent = yes; then INTL_MACOSX_LIBS="-Wl,-framework -Wl,CoreFoundation" fi AC_SUBST([INTL_MACOSX_LIBS]) ]) # libtool.m4 - Configure libtool for the host system. -*-Autoconf-*- # # Copyright (C) 1996-2001, 2003-2015 Free Software Foundation, Inc. # Written by Gordon Matzigkeit, 1996 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. m4_define([_LT_COPYING], [dnl # Copyright (C) 2014 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # GNU Libtool is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of of the License, or # (at your option) any later version. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program or library that is built # using GNU Libtool, you may include this file under the same # distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . ]) # serial 58 LT_INIT # LT_PREREQ(VERSION) # ------------------ # Complain and exit if this libtool version is less that VERSION. m4_defun([LT_PREREQ], [m4_if(m4_version_compare(m4_defn([LT_PACKAGE_VERSION]), [$1]), -1, [m4_default([$3], [m4_fatal([Libtool version $1 or higher is required], 63)])], [$2])]) # _LT_CHECK_BUILDDIR # ------------------ # Complain if the absolute build directory name contains unusual characters m4_defun([_LT_CHECK_BUILDDIR], [case `pwd` in *\ * | *\ *) AC_MSG_WARN([Libtool does not cope well with whitespace in `pwd`]) ;; esac ]) # LT_INIT([OPTIONS]) # ------------------ AC_DEFUN([LT_INIT], [AC_PREREQ([2.62])dnl We use AC_PATH_PROGS_FEATURE_CHECK AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl AC_BEFORE([$0], [LT_LANG])dnl AC_BEFORE([$0], [LT_OUTPUT])dnl AC_BEFORE([$0], [LTDL_INIT])dnl m4_require([_LT_CHECK_BUILDDIR])dnl dnl Autoconf doesn't catch unexpanded LT_ macros by default: m4_pattern_forbid([^_?LT_[A-Z_]+$])dnl m4_pattern_allow([^(_LT_EOF|LT_DLGLOBAL|LT_DLLAZY_OR_NOW|LT_MULTI_MODULE)$])dnl dnl aclocal doesn't pull ltoptions.m4, ltsugar.m4, or ltversion.m4 dnl unless we require an AC_DEFUNed macro: AC_REQUIRE([LTOPTIONS_VERSION])dnl AC_REQUIRE([LTSUGAR_VERSION])dnl AC_REQUIRE([LTVERSION_VERSION])dnl AC_REQUIRE([LTOBSOLETE_VERSION])dnl m4_require([_LT_PROG_LTMAIN])dnl _LT_SHELL_INIT([SHELL=${CONFIG_SHELL-/bin/sh}]) dnl Parse OPTIONS _LT_SET_OPTIONS([$0], [$1]) # This can be used to rebuild libtool when needed LIBTOOL_DEPS=$ltmain # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' AC_SUBST(LIBTOOL)dnl _LT_SETUP # Only expand once: m4_define([LT_INIT]) ])# LT_INIT # Old names: AU_ALIAS([AC_PROG_LIBTOOL], [LT_INIT]) AU_ALIAS([AM_PROG_LIBTOOL], [LT_INIT]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PROG_LIBTOOL], []) dnl AC_DEFUN([AM_PROG_LIBTOOL], []) # _LT_PREPARE_CC_BASENAME # ----------------------- m4_defun([_LT_PREPARE_CC_BASENAME], [ # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. func_cc_basename () { for cc_temp in @S|@*""; do case $cc_temp in compile | *[[\\/]]compile | ccache | *[[\\/]]ccache ) ;; distcc | *[[\\/]]distcc | purify | *[[\\/]]purify ) ;; \-*) ;; *) break;; esac done func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` } ])# _LT_PREPARE_CC_BASENAME # _LT_CC_BASENAME(CC) # ------------------- # It would be clearer to call AC_REQUIREs from _LT_PREPARE_CC_BASENAME, # but that macro is also expanded into generated libtool script, which # arranges for $SED and $ECHO to be set by different means. m4_defun([_LT_CC_BASENAME], [m4_require([_LT_PREPARE_CC_BASENAME])dnl AC_REQUIRE([_LT_DECL_SED])dnl AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl func_cc_basename $1 cc_basename=$func_cc_basename_result ]) # _LT_FILEUTILS_DEFAULTS # ---------------------- # It is okay to use these file commands and assume they have been set # sensibly after 'm4_require([_LT_FILEUTILS_DEFAULTS])'. m4_defun([_LT_FILEUTILS_DEFAULTS], [: ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} ])# _LT_FILEUTILS_DEFAULTS # _LT_SETUP # --------- m4_defun([_LT_SETUP], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl AC_REQUIRE([_LT_PREPARE_SED_QUOTE_VARS])dnl AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH])dnl _LT_DECL([], [PATH_SEPARATOR], [1], [The PATH separator for the build system])dnl dnl _LT_DECL([], [host_alias], [0], [The host system])dnl _LT_DECL([], [host], [0])dnl _LT_DECL([], [host_os], [0])dnl dnl _LT_DECL([], [build_alias], [0], [The build system])dnl _LT_DECL([], [build], [0])dnl _LT_DECL([], [build_os], [0])dnl dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl dnl AC_REQUIRE([AC_PROG_LN_S])dnl test -z "$LN_S" && LN_S="ln -s" _LT_DECL([], [LN_S], [1], [Whether we need soft or hard links])dnl dnl AC_REQUIRE([LT_CMD_MAX_LEN])dnl _LT_DECL([objext], [ac_objext], [0], [Object file suffix (normally "o")])dnl _LT_DECL([], [exeext], [0], [Executable file suffix (normally "")])dnl dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl m4_require([_LT_PATH_CONVERSION_FUNCTIONS])dnl m4_require([_LT_CMD_RELOAD])dnl m4_require([_LT_CHECK_MAGIC_METHOD])dnl m4_require([_LT_CHECK_SHAREDLIB_FROM_LINKLIB])dnl m4_require([_LT_CMD_OLD_ARCHIVE])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl m4_require([_LT_WITH_SYSROOT])dnl m4_require([_LT_CMD_TRUNCATE])dnl _LT_CONFIG_LIBTOOL_INIT([ # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi ]) if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi _LT_CHECK_OBJDIR m4_require([_LT_TAG_COMPILER])dnl case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a '.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld=$lt_cv_prog_gnu_ld old_CC=$CC old_CFLAGS=$CFLAGS # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o _LT_CC_BASENAME([$compiler]) # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then _LT_PATH_MAGIC fi ;; esac # Use C for the default configuration in the libtool script LT_SUPPORTED_TAG([CC]) _LT_LANG_C_CONFIG _LT_LANG_DEFAULT_CONFIG _LT_CONFIG_COMMANDS ])# _LT_SETUP # _LT_PREPARE_SED_QUOTE_VARS # -------------------------- # Define a few sed substitution that help us do robust quoting. m4_defun([_LT_PREPARE_SED_QUOTE_VARS], [# Backslashify metacharacters that are still active within # double-quoted strings. sed_quote_subst='s/\([["`$\\]]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\([["`\\]]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' ]) # _LT_PROG_LTMAIN # --------------- # Note that this code is called both from 'configure', and 'config.status' # now that we use AC_CONFIG_COMMANDS to generate libtool. Notably, # 'config.status' has no value for ac_aux_dir unless we are using Automake, # so we pass a copy along to make sure it has a sensible value anyway. m4_defun([_LT_PROG_LTMAIN], [m4_ifdef([AC_REQUIRE_AUX_FILE], [AC_REQUIRE_AUX_FILE([ltmain.sh])])dnl _LT_CONFIG_LIBTOOL_INIT([ac_aux_dir='$ac_aux_dir']) ltmain=$ac_aux_dir/ltmain.sh ])# _LT_PROG_LTMAIN # So that we can recreate a full libtool script including additional # tags, we accumulate the chunks of code to send to AC_CONFIG_COMMANDS # in macros and then make a single call at the end using the 'libtool' # label. # _LT_CONFIG_LIBTOOL_INIT([INIT-COMMANDS]) # ---------------------------------------- # Register INIT-COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL_INIT], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_INIT], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_INIT]) # _LT_CONFIG_LIBTOOL([COMMANDS]) # ------------------------------ # Register COMMANDS to be passed to AC_CONFIG_COMMANDS later. m4_define([_LT_CONFIG_LIBTOOL], [m4_ifval([$1], [m4_append([_LT_OUTPUT_LIBTOOL_COMMANDS], [$1 ])])]) # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS]) # _LT_CONFIG_SAVE_COMMANDS([COMMANDS], [INIT_COMMANDS]) # ----------------------------------------------------- m4_defun([_LT_CONFIG_SAVE_COMMANDS], [_LT_CONFIG_LIBTOOL([$1]) _LT_CONFIG_LIBTOOL_INIT([$2]) ]) # _LT_FORMAT_COMMENT([COMMENT]) # ----------------------------- # Add leading comment marks to the start of each line, and a trailing # full-stop to the whole comment if one is not present already. m4_define([_LT_FORMAT_COMMENT], [m4_ifval([$1], [ m4_bpatsubst([m4_bpatsubst([$1], [^ *], [# ])], [['`$\]], [\\\&])]m4_bmatch([$1], [[!?.]$], [], [.]) )]) # _LT_DECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION], [IS-TAGGED?]) # ------------------------------------------------------------------- # CONFIGNAME is the name given to the value in the libtool script. # VARNAME is the (base) name used in the configure script. # VALUE may be 0, 1 or 2 for a computed quote escaped value based on # VARNAME. Any other value will be used directly. m4_define([_LT_DECL], [lt_if_append_uniq([lt_decl_varnames], [$2], [, ], [lt_dict_add_subkey([lt_decl_dict], [$2], [libtool_name], [m4_ifval([$1], [$1], [$2])]) lt_dict_add_subkey([lt_decl_dict], [$2], [value], [$3]) m4_ifval([$4], [lt_dict_add_subkey([lt_decl_dict], [$2], [description], [$4])]) lt_dict_add_subkey([lt_decl_dict], [$2], [tagged?], [m4_ifval([$5], [yes], [no])])]) ]) # _LT_TAGDECL([CONFIGNAME], VARNAME, VALUE, [DESCRIPTION]) # -------------------------------------------------------- m4_define([_LT_TAGDECL], [_LT_DECL([$1], [$2], [$3], [$4], [yes])]) # lt_decl_tag_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_tag_varnames], [_lt_decl_filter([tagged?], [yes], $@)]) # _lt_decl_filter(SUBKEY, VALUE, [SEPARATOR], [VARNAME1..]) # --------------------------------------------------------- m4_define([_lt_decl_filter], [m4_case([$#], [0], [m4_fatal([$0: too few arguments: $#])], [1], [m4_fatal([$0: too few arguments: $#: $1])], [2], [lt_dict_filter([lt_decl_dict], [$1], [$2], [], lt_decl_varnames)], [3], [lt_dict_filter([lt_decl_dict], [$1], [$2], [$3], lt_decl_varnames)], [lt_dict_filter([lt_decl_dict], $@)])[]dnl ]) # lt_decl_quote_varnames([SEPARATOR], [VARNAME1...]) # -------------------------------------------------- m4_define([lt_decl_quote_varnames], [_lt_decl_filter([value], [1], $@)]) # lt_decl_dquote_varnames([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_dquote_varnames], [_lt_decl_filter([value], [2], $@)]) # lt_decl_varnames_tagged([SEPARATOR], [VARNAME1...]) # --------------------------------------------------- m4_define([lt_decl_varnames_tagged], [m4_assert([$# <= 2])dnl _$0(m4_quote(m4_default([$1], [[, ]])), m4_ifval([$2], [[$2]], [m4_dquote(lt_decl_tag_varnames)]), m4_split(m4_normalize(m4_quote(_LT_TAGS)), [ ]))]) m4_define([_lt_decl_varnames_tagged], [m4_ifval([$3], [lt_combine([$1], [$2], [_], $3)])]) # lt_decl_all_varnames([SEPARATOR], [VARNAME1...]) # ------------------------------------------------ m4_define([lt_decl_all_varnames], [_$0(m4_quote(m4_default([$1], [[, ]])), m4_if([$2], [], m4_quote(lt_decl_varnames), m4_quote(m4_shift($@))))[]dnl ]) m4_define([_lt_decl_all_varnames], [lt_join($@, lt_decl_varnames_tagged([$1], lt_decl_tag_varnames([[, ]], m4_shift($@))))dnl ]) # _LT_CONFIG_STATUS_DECLARE([VARNAME]) # ------------------------------------ # Quote a variable value, and forward it to 'config.status' so that its # declaration there will have the same value as in 'configure'. VARNAME # must have a single quote delimited value for this to work. m4_define([_LT_CONFIG_STATUS_DECLARE], [$1='`$ECHO "$][$1" | $SED "$delay_single_quote_subst"`']) # _LT_CONFIG_STATUS_DECLARATIONS # ------------------------------ # We delimit libtool config variables with single quotes, so when # we write them to config.status, we have to be sure to quote all # embedded single quotes properly. In configure, this macro expands # each variable declared with _LT_DECL (and _LT_TAGDECL) into: # # ='`$ECHO "$" | $SED "$delay_single_quote_subst"`' m4_defun([_LT_CONFIG_STATUS_DECLARATIONS], [m4_foreach([_lt_var], m4_quote(lt_decl_all_varnames), [m4_n([_LT_CONFIG_STATUS_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAGS # ---------------- # Output comment and list of tags supported by the script m4_defun([_LT_LIBTOOL_TAGS], [_LT_FORMAT_COMMENT([The names of the tagged configurations supported by this script])dnl available_tags='_LT_TAGS'dnl ]) # _LT_LIBTOOL_DECLARE(VARNAME, [TAG]) # ----------------------------------- # Extract the dictionary values for VARNAME (optionally with TAG) and # expand to a commented shell variable setting: # # # Some comment about what VAR is for. # visible_name=$lt_internal_name m4_define([_LT_LIBTOOL_DECLARE], [_LT_FORMAT_COMMENT(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [description])))[]dnl m4_pushdef([_libtool_name], m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [libtool_name])))[]dnl m4_case(m4_quote(lt_dict_fetch([lt_decl_dict], [$1], [value])), [0], [_libtool_name=[$]$1], [1], [_libtool_name=$lt_[]$1], [2], [_libtool_name=$lt_[]$1], [_libtool_name=lt_dict_fetch([lt_decl_dict], [$1], [value])])[]dnl m4_ifval([$2], [_$2])[]m4_popdef([_libtool_name])[]dnl ]) # _LT_LIBTOOL_CONFIG_VARS # ----------------------- # Produce commented declarations of non-tagged libtool config variables # suitable for insertion in the LIBTOOL CONFIG section of the 'libtool' # script. Tagged libtool config variables (even for the LIBTOOL CONFIG # section) are produced by _LT_LIBTOOL_TAG_VARS. m4_defun([_LT_LIBTOOL_CONFIG_VARS], [m4_foreach([_lt_var], m4_quote(_lt_decl_filter([tagged?], [no], [], lt_decl_varnames)), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var)])])]) # _LT_LIBTOOL_TAG_VARS(TAG) # ------------------------- m4_define([_LT_LIBTOOL_TAG_VARS], [m4_foreach([_lt_var], m4_quote(lt_decl_tag_varnames), [m4_n([_LT_LIBTOOL_DECLARE(_lt_var, [$1])])])]) # _LT_TAGVAR(VARNAME, [TAGNAME]) # ------------------------------ m4_define([_LT_TAGVAR], [m4_ifval([$2], [$1_$2], [$1])]) # _LT_CONFIG_COMMANDS # ------------------- # Send accumulated output to $CONFIG_STATUS. Thanks to the lists of # variables for single and double quote escaping we saved from calls # to _LT_DECL, we can put quote escaped variables declarations # into 'config.status', and then the shell code to quote escape them in # for loops in 'config.status'. Finally, any additional code accumulated # from calls to _LT_CONFIG_LIBTOOL_INIT is expanded. m4_defun([_LT_CONFIG_COMMANDS], [AC_PROVIDE_IFELSE([LT_OUTPUT], dnl If the libtool generation code has been placed in $CONFIG_LT, dnl instead of duplicating it all over again into config.status, dnl then we will have config.status run $CONFIG_LT later, so it dnl needs to know what name is stored there: [AC_CONFIG_COMMANDS([libtool], [$SHELL $CONFIG_LT || AS_EXIT(1)], [CONFIG_LT='$CONFIG_LT'])], dnl If the libtool generation code is destined for config.status, dnl expand the accumulated commands and init code now: [AC_CONFIG_COMMANDS([libtool], [_LT_OUTPUT_LIBTOOL_COMMANDS], [_LT_OUTPUT_LIBTOOL_COMMANDS_INIT])]) ])#_LT_CONFIG_COMMANDS # Initialize. m4_define([_LT_OUTPUT_LIBTOOL_COMMANDS_INIT], [ # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' _LT_CONFIG_STATUS_DECLARATIONS LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$[]1 _LTECHO_EOF' } # Quote evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_quote_varnames); do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in lt_decl_all_varnames([[ \ ]], lt_decl_dquote_varnames); do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[[\\\\\\\`\\"\\\$]]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done _LT_OUTPUT_LIBTOOL_INIT ]) # _LT_GENERATED_FILE_INIT(FILE, [COMMENT]) # ------------------------------------ # Generate a child script FILE with all initialization necessary to # reuse the environment learned by the parent script, and make the # file executable. If COMMENT is supplied, it is inserted after the # '#!' sequence but before initialization text begins. After this # macro, additional text can be appended to FILE to form the body of # the child script. The macro ends with non-zero status if the # file could not be fully written (such as if the disk is full). m4_ifdef([AS_INIT_GENERATED], [m4_defun([_LT_GENERATED_FILE_INIT],[AS_INIT_GENERATED($@)])], [m4_defun([_LT_GENERATED_FILE_INIT], [m4_require([AS_PREPARE])]dnl [m4_pushdef([AS_MESSAGE_LOG_FD])]dnl [lt_write_fail=0 cat >$1 <<_ASEOF || lt_write_fail=1 #! $SHELL # Generated by $as_me. $2 SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$1 <<\_ASEOF || lt_write_fail=1 AS_SHELL_SANITIZE _AS_PREPARE exec AS_MESSAGE_FD>&1 _ASEOF test 0 = "$lt_write_fail" && chmod +x $1[]dnl m4_popdef([AS_MESSAGE_LOG_FD])])])# _LT_GENERATED_FILE_INIT # LT_OUTPUT # --------- # This macro allows early generation of the libtool script (before # AC_OUTPUT is called), incase it is used in configure for compilation # tests. AC_DEFUN([LT_OUTPUT], [: ${CONFIG_LT=./config.lt} AC_MSG_NOTICE([creating $CONFIG_LT]) _LT_GENERATED_FILE_INIT(["$CONFIG_LT"], [# Run this file to recreate a libtool stub with the current configuration.]) cat >>"$CONFIG_LT" <<\_LTEOF lt_cl_silent=false exec AS_MESSAGE_LOG_FD>>config.log { echo AS_BOX([Running $as_me.]) } >&AS_MESSAGE_LOG_FD lt_cl_help="\ '$as_me' creates a local libtool stub from the current configuration, for use in further configure time tests before the real libtool is generated. Usage: $[0] [[OPTIONS]] -h, --help print this help, then exit -V, --version print version number, then exit -q, --quiet do not print progress messages -d, --debug don't remove temporary files Report bugs to ." lt_cl_version="\ m4_ifset([AC_PACKAGE_NAME], [AC_PACKAGE_NAME ])config.lt[]dnl m4_ifset([AC_PACKAGE_VERSION], [ AC_PACKAGE_VERSION]) configured by $[0], generated by m4_PACKAGE_STRING. Copyright (C) 2011 Free Software Foundation, Inc. This config.lt script is free software; the Free Software Foundation gives unlimited permision to copy, distribute and modify it." while test 0 != $[#] do case $[1] in --version | --v* | -V ) echo "$lt_cl_version"; exit 0 ;; --help | --h* | -h ) echo "$lt_cl_help"; exit 0 ;; --debug | --d* | -d ) debug=: ;; --quiet | --q* | --silent | --s* | -q ) lt_cl_silent=: ;; -*) AC_MSG_ERROR([unrecognized option: $[1] Try '$[0] --help' for more information.]) ;; *) AC_MSG_ERROR([unrecognized argument: $[1] Try '$[0] --help' for more information.]) ;; esac shift done if $lt_cl_silent; then exec AS_MESSAGE_FD>/dev/null fi _LTEOF cat >>"$CONFIG_LT" <<_LTEOF _LT_OUTPUT_LIBTOOL_COMMANDS_INIT _LTEOF cat >>"$CONFIG_LT" <<\_LTEOF AC_MSG_NOTICE([creating $ofile]) _LT_OUTPUT_LIBTOOL_COMMANDS AS_EXIT(0) _LTEOF chmod +x "$CONFIG_LT" # configure is writing to config.log, but config.lt does its own redirection, # appending to config.log, which fails on DOS, as config.log is still kept # open by configure. Here we exec the FD to /dev/null, effectively closing # config.log, so it can be properly (re)opened and appended to by config.lt. lt_cl_success=: test yes = "$silent" && lt_config_lt_args="$lt_config_lt_args --quiet" exec AS_MESSAGE_LOG_FD>/dev/null $SHELL "$CONFIG_LT" $lt_config_lt_args || lt_cl_success=false exec AS_MESSAGE_LOG_FD>>config.log $lt_cl_success || AS_EXIT(1) ])# LT_OUTPUT # _LT_CONFIG(TAG) # --------------- # If TAG is the built-in tag, create an initial libtool script with a # default configuration from the untagged config vars. Otherwise add code # to config.status for appending the configuration named by TAG from the # matching tagged config vars. m4_defun([_LT_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_CONFIG_SAVE_COMMANDS([ m4_define([_LT_TAG], m4_if([$1], [], [C], [$1]))dnl m4_if(_LT_TAG, [C], [ # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi cfgfile=${ofile}T trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # Generated automatically by $as_me ($PACKAGE) $VERSION # NOTE: Changes made to this file will be lost: look at ltmain.sh. # Provide generalized library-building support services. # Written by Gordon Matzigkeit, 1996 _LT_COPYING _LT_LIBTOOL_TAGS # Configured defaults for sys_lib_dlsearch_path munging. : \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} # ### BEGIN LIBTOOL CONFIG _LT_LIBTOOL_CONFIG_VARS _LT_LIBTOOL_TAG_VARS # ### END LIBTOOL CONFIG _LT_EOF cat <<'_LT_EOF' >> "$cfgfile" # ### BEGIN FUNCTIONS SHARED WITH CONFIGURE _LT_PREPARE_MUNGE_PATH_LIST _LT_PREPARE_CC_BASENAME # ### END FUNCTIONS SHARED WITH CONFIGURE _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac _LT_PROG_LTMAIN # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '$q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" ], [cat <<_LT_EOF >> "$ofile" dnl Unfortunately we have to use $1 here, since _LT_TAG is not expanded dnl in a comment (ie after a #). # ### BEGIN LIBTOOL TAG CONFIG: $1 _LT_LIBTOOL_TAG_VARS(_LT_TAG) # ### END LIBTOOL TAG CONFIG: $1 _LT_EOF ])dnl /m4_if ], [m4_if([$1], [], [ PACKAGE='$PACKAGE' VERSION='$VERSION' RM='$RM' ofile='$ofile'], []) ])dnl /_LT_CONFIG_SAVE_COMMANDS ])# _LT_CONFIG # LT_SUPPORTED_TAG(TAG) # --------------------- # Trace this macro to discover what tags are supported by the libtool # --tag option, using: # autoconf --trace 'LT_SUPPORTED_TAG:$1' AC_DEFUN([LT_SUPPORTED_TAG], []) # C support is built-in for now m4_define([_LT_LANG_C_enabled], []) m4_define([_LT_TAGS], []) # LT_LANG(LANG) # ------------- # Enable libtool support for the given language if not already enabled. AC_DEFUN([LT_LANG], [AC_BEFORE([$0], [LT_OUTPUT])dnl m4_case([$1], [C], [_LT_LANG(C)], [C++], [_LT_LANG(CXX)], [Go], [_LT_LANG(GO)], [Java], [_LT_LANG(GCJ)], [Fortran 77], [_LT_LANG(F77)], [Fortran], [_LT_LANG(FC)], [Windows Resource], [_LT_LANG(RC)], [m4_ifdef([_LT_LANG_]$1[_CONFIG], [_LT_LANG($1)], [m4_fatal([$0: unsupported language: "$1"])])])dnl ])# LT_LANG # _LT_LANG(LANGNAME) # ------------------ m4_defun([_LT_LANG], [m4_ifdef([_LT_LANG_]$1[_enabled], [], [LT_SUPPORTED_TAG([$1])dnl m4_append([_LT_TAGS], [$1 ])dnl m4_define([_LT_LANG_]$1[_enabled], [])dnl _LT_LANG_$1_CONFIG($1)])dnl ])# _LT_LANG m4_ifndef([AC_PROG_GO], [ # NOTE: This macro has been submitted for inclusion into # # GNU Autoconf as AC_PROG_GO. When it is available in # # a released version of Autoconf we should remove this # # macro and use it instead. # m4_defun([AC_PROG_GO], [AC_LANG_PUSH(Go)dnl AC_ARG_VAR([GOC], [Go compiler command])dnl AC_ARG_VAR([GOFLAGS], [Go compiler flags])dnl _AC_ARG_VAR_LDFLAGS()dnl AC_CHECK_TOOL(GOC, gccgo) if test -z "$GOC"; then if test -n "$ac_tool_prefix"; then AC_CHECK_PROG(GOC, [${ac_tool_prefix}gccgo], [${ac_tool_prefix}gccgo]) fi fi if test -z "$GOC"; then AC_CHECK_PROG(GOC, gccgo, gccgo, false) fi ])#m4_defun ])#m4_ifndef # _LT_LANG_DEFAULT_CONFIG # ----------------------- m4_defun([_LT_LANG_DEFAULT_CONFIG], [AC_PROVIDE_IFELSE([AC_PROG_CXX], [LT_LANG(CXX)], [m4_define([AC_PROG_CXX], defn([AC_PROG_CXX])[LT_LANG(CXX)])]) AC_PROVIDE_IFELSE([AC_PROG_F77], [LT_LANG(F77)], [m4_define([AC_PROG_F77], defn([AC_PROG_F77])[LT_LANG(F77)])]) AC_PROVIDE_IFELSE([AC_PROG_FC], [LT_LANG(FC)], [m4_define([AC_PROG_FC], defn([AC_PROG_FC])[LT_LANG(FC)])]) dnl The call to [A][M_PROG_GCJ] is quoted like that to stop aclocal dnl pulling things in needlessly. AC_PROVIDE_IFELSE([AC_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([A][M_PROG_GCJ], [LT_LANG(GCJ)], [AC_PROVIDE_IFELSE([LT_PROG_GCJ], [LT_LANG(GCJ)], [m4_ifdef([AC_PROG_GCJ], [m4_define([AC_PROG_GCJ], defn([AC_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([A][M_PROG_GCJ], [m4_define([A][M_PROG_GCJ], defn([A][M_PROG_GCJ])[LT_LANG(GCJ)])]) m4_ifdef([LT_PROG_GCJ], [m4_define([LT_PROG_GCJ], defn([LT_PROG_GCJ])[LT_LANG(GCJ)])])])])]) AC_PROVIDE_IFELSE([AC_PROG_GO], [LT_LANG(GO)], [m4_define([AC_PROG_GO], defn([AC_PROG_GO])[LT_LANG(GO)])]) AC_PROVIDE_IFELSE([LT_PROG_RC], [LT_LANG(RC)], [m4_define([LT_PROG_RC], defn([LT_PROG_RC])[LT_LANG(RC)])]) ])# _LT_LANG_DEFAULT_CONFIG # Obsolete macros: AU_DEFUN([AC_LIBTOOL_CXX], [LT_LANG(C++)]) AU_DEFUN([AC_LIBTOOL_F77], [LT_LANG(Fortran 77)]) AU_DEFUN([AC_LIBTOOL_FC], [LT_LANG(Fortran)]) AU_DEFUN([AC_LIBTOOL_GCJ], [LT_LANG(Java)]) AU_DEFUN([AC_LIBTOOL_RC], [LT_LANG(Windows Resource)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_CXX], []) dnl AC_DEFUN([AC_LIBTOOL_F77], []) dnl AC_DEFUN([AC_LIBTOOL_FC], []) dnl AC_DEFUN([AC_LIBTOOL_GCJ], []) dnl AC_DEFUN([AC_LIBTOOL_RC], []) # _LT_TAG_COMPILER # ---------------- m4_defun([_LT_TAG_COMPILER], [AC_REQUIRE([AC_PROG_CC])dnl _LT_DECL([LTCC], [CC], [1], [A C compiler])dnl _LT_DECL([LTCFLAGS], [CFLAGS], [1], [LTCC compiler flags])dnl _LT_TAGDECL([CC], [compiler], [1], [A language specific compiler])dnl _LT_TAGDECL([with_gcc], [GCC], [0], [Is the compiler the GNU compiler?])dnl # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC ])# _LT_TAG_COMPILER # _LT_COMPILER_BOILERPLATE # ------------------------ # Check for compiler boilerplate output or warnings with # the simple compiler test code. m4_defun([_LT_COMPILER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ])# _LT_COMPILER_BOILERPLATE # _LT_LINKER_BOILERPLATE # ---------------------- # Check for linker boilerplate output or warnings with # the simple link test code. m4_defun([_LT_LINKER_BOILERPLATE], [m4_require([_LT_DECL_SED])dnl ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* ])# _LT_LINKER_BOILERPLATE # _LT_REQUIRED_DARWIN_CHECKS # ------------------------- m4_defun_once([_LT_REQUIRED_DARWIN_CHECKS],[ case $host_os in rhapsody* | darwin*) AC_CHECK_TOOL([DSYMUTIL], [dsymutil], [:]) AC_CHECK_TOOL([NMEDIT], [nmedit], [:]) AC_CHECK_TOOL([LIPO], [lipo], [:]) AC_CHECK_TOOL([OTOOL], [otool], [:]) AC_CHECK_TOOL([OTOOL64], [otool64], [:]) _LT_DECL([], [DSYMUTIL], [1], [Tool to manipulate archived DWARF debug symbol files on Mac OS X]) _LT_DECL([], [NMEDIT], [1], [Tool to change global to local symbols on Mac OS X]) _LT_DECL([], [LIPO], [1], [Tool to manipulate fat objects and archives on Mac OS X]) _LT_DECL([], [OTOOL], [1], [ldd/readelf like tool for Mach-O binaries on Mac OS X]) _LT_DECL([], [OTOOL64], [1], [ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4]) AC_CACHE_CHECK([for -single_module linker flag],[lt_cv_apple_cc_single_mod], [lt_cv_apple_cc_single_mod=no if test -z "$LT_MULTI_MODULE"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? # If there is a non-empty error log, and "single_module" # appears in it, assume the flag caused a linker warning if test -s conftest.err && $GREP single_module conftest.err; then cat conftest.err >&AS_MESSAGE_LOG_FD # Otherwise, if the output was created with a 0 exit code from # the compiler, it worked. elif test -f libconftest.dylib && test 0 = "$_lt_result"; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&AS_MESSAGE_LOG_FD fi rm -rf libconftest.dylib* rm -f conftest.* fi]) AC_CACHE_CHECK([for -exported_symbols_list linker flag], [lt_cv_ld_exported_symbols_list], [lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [lt_cv_ld_exported_symbols_list=yes], [lt_cv_ld_exported_symbols_list=no]) LDFLAGS=$save_LDFLAGS ]) AC_CACHE_CHECK([for -force_load linker flag],[lt_cv_ld_force_load], [lt_cv_ld_force_load=no cat > conftest.c << _LT_EOF int forced_loaded() { return 2;} _LT_EOF echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&AS_MESSAGE_LOG_FD echo "$AR cru libconftest.a conftest.o" >&AS_MESSAGE_LOG_FD $AR cru libconftest.a conftest.o 2>&AS_MESSAGE_LOG_FD echo "$RANLIB libconftest.a" >&AS_MESSAGE_LOG_FD $RANLIB libconftest.a 2>&AS_MESSAGE_LOG_FD cat > conftest.c << _LT_EOF int main() { return 0;} _LT_EOF echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&AS_MESSAGE_LOG_FD $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err _lt_result=$? if test -s conftest.err && $GREP force_load conftest.err; then cat conftest.err >&AS_MESSAGE_LOG_FD elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then lt_cv_ld_force_load=yes else cat conftest.err >&AS_MESSAGE_LOG_FD fi rm -f conftest.err libconftest.a conftest conftest.c rm -rf conftest.dSYM ]) case $host_os in rhapsody* | darwin1.[[012]]) _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[[91]]*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; 10.[[012]][[,.]]*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test yes = "$lt_cv_apple_cc_single_mod"; then _lt_dar_single_mod='$single_module' fi if test yes = "$lt_cv_ld_exported_symbols_list"; then _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' fi if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac ]) # _LT_DARWIN_LINKER_FEATURES([TAG]) # --------------------------------- # Checks for linker and compiler features on darwin m4_defun([_LT_DARWIN_LINKER_FEATURES], [ m4_require([_LT_REQUIRED_DARWIN_CHECKS]) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported if test yes = "$lt_cv_ld_force_load"; then _LT_TAGVAR(whole_archive_flag_spec, $1)='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' m4_case([$1], [F77], [_LT_TAGVAR(compiler_needs_object, $1)=yes], [FC], [_LT_TAGVAR(compiler_needs_object, $1)=yes]) else _LT_TAGVAR(whole_archive_flag_spec, $1)='' fi _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=$_lt_dar_allow_undefined case $cc_basename in ifort*|nagfor*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test yes = "$_lt_dar_can_shared"; then output_verbose_link_cmd=func_echo_all _LT_TAGVAR(archive_cmds, $1)="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" _LT_TAGVAR(module_cmds, $1)="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" _LT_TAGVAR(module_expsym_cmds, $1)="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" m4_if([$1], [CXX], [ if test yes != "$lt_cv_apple_cc_single_mod"; then _LT_TAGVAR(archive_cmds, $1)="\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dsymutil" _LT_TAGVAR(archive_expsym_cmds, $1)="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil" fi ],[]) else _LT_TAGVAR(ld_shlibs, $1)=no fi ]) # _LT_SYS_MODULE_PATH_AIX([TAGNAME]) # ---------------------------------- # Links a minimal program and checks the executable # for the system default hardcoded library path. In most cases, # this is /usr/lib:/lib, but when the MPI compilers are used # the location of the communication and MPI libs are included too. # If we don't find anything, use the default library path according # to the aix ld manual. # Store the results from the different compilers for each TAGNAME. # Allow to override them for all tags through lt_cv_aix_libpath. m4_defun([_LT_SYS_MODULE_PATH_AIX], [m4_require([_LT_DECL_SED])dnl if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else AC_CACHE_VAL([_LT_TAGVAR([lt_cv_aix_libpath_], [$1])], [AC_LINK_IFELSE([AC_LANG_PROGRAM],[ lt_aix_libpath_sed='[ /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }]' _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi],[]) if test -z "$_LT_TAGVAR([lt_cv_aix_libpath_], [$1])"; then _LT_TAGVAR([lt_cv_aix_libpath_], [$1])=/usr/lib:/lib fi ]) aix_libpath=$_LT_TAGVAR([lt_cv_aix_libpath_], [$1]) fi ])# _LT_SYS_MODULE_PATH_AIX # _LT_SHELL_INIT(ARG) # ------------------- m4_define([_LT_SHELL_INIT], [m4_divert_text([M4SH-INIT], [$1 ])])# _LT_SHELL_INIT # _LT_PROG_ECHO_BACKSLASH # ----------------------- # Find how we can fake an echo command that does not interpret backslash. # In particular, with Autoconf 2.60 or later we add some code to the start # of the generated configure script that will find a shell with a builtin # printf (that we can use as an echo command). m4_defun([_LT_PROG_ECHO_BACKSLASH], [ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO AC_MSG_CHECKING([how to print strings]) # Test print first, because it will be a builtin if present. if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='printf %s\n' else # Use this function as a fallback that always works. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $[]1 _LTECHO_EOF' } ECHO='func_fallback_echo' fi # func_echo_all arg... # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "$*" } case $ECHO in printf*) AC_MSG_RESULT([printf]) ;; print*) AC_MSG_RESULT([print -r]) ;; *) AC_MSG_RESULT([cat]) ;; esac m4_ifdef([_AS_DETECT_SUGGESTED], [_AS_DETECT_SUGGESTED([ test -n "${ZSH_VERSION+set}${BASH_VERSION+set}" || ( ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO PATH=/empty FPATH=/empty; export PATH FPATH test "X`printf %s $ECHO`" = "X$ECHO" \ || test "X`print -r -- $ECHO`" = "X$ECHO" )])]) _LT_DECL([], [SHELL], [1], [Shell to use when invoking shell scripts]) _LT_DECL([], [ECHO], [1], [An echo program that protects backslashes]) ])# _LT_PROG_ECHO_BACKSLASH # _LT_WITH_SYSROOT # ---------------- AC_DEFUN([_LT_WITH_SYSROOT], [AC_MSG_CHECKING([for sysroot]) AC_ARG_WITH([sysroot], [AS_HELP_STRING([--with-sysroot@<:@=DIR@:>@], [Search for dependent libraries within DIR (or the compiler's sysroot if not specified).])], [], [with_sysroot=no]) dnl lt_sysroot will always be passed unquoted. We quote it here dnl in case the user passed a directory name. lt_sysroot= case $with_sysroot in #( yes) if test yes = "$GCC"; then lt_sysroot=`$CC --print-sysroot 2>/dev/null` fi ;; #( /*) lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` ;; #( no|'') ;; #( *) AC_MSG_RESULT([$with_sysroot]) AC_MSG_ERROR([The sysroot must be an absolute path.]) ;; esac AC_MSG_RESULT([${lt_sysroot:-no}]) _LT_DECL([], [lt_sysroot], [0], [The root where to search for ]dnl [dependent libraries, and where our libraries should be installed.])]) # _LT_ENABLE_LOCK # --------------- m4_defun([_LT_ENABLE_LOCK], [AC_ARG_ENABLE([libtool-lock], [AS_HELP_STRING([--disable-libtool-lock], [avoid locking (might break parallel builds)])]) test no = "$enable_libtool_lock" || enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out what ABI is being produced by ac_compile, and set mode # options accordingly. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE=32 ;; *ELF-64*) HPUX_IA64_MODE=64 ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then if test yes = "$lt_cv_prog_gnu_ld"; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; mips64*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '[#]line '$LINENO' "configure"' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then emul=elf case `/usr/bin/file conftest.$ac_objext` in *32-bit*) emul="${emul}32" ;; *64-bit*) emul="${emul}64" ;; esac case `/usr/bin/file conftest.$ac_objext` in *MSB*) emul="${emul}btsmip" ;; *LSB*) emul="${emul}ltsmip" ;; esac case `/usr/bin/file conftest.$ac_objext` in *N32*) emul="${emul}n32" ;; esac LD="${LD-ld} -m $emul" fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. Note that the listed cases only cover the # situations where additional linker options are needed (such as when # doing 32-bit compilation for a host where ld defaults to 64-bit, or # vice versa); the common cases where no linker options are needed do # not appear in the list. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) case `/usr/bin/file conftest.o` in *x86-64*) LD="${LD-ld} -m elf32_x86_64" ;; *) LD="${LD-ld} -m elf_i386" ;; esac ;; powerpc64le-*linux*) LD="${LD-ld} -m elf32lppclinux" ;; powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; powerpcle-*linux*) LD="${LD-ld} -m elf64lppc" ;; powerpc-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS=$CFLAGS CFLAGS="$CFLAGS -belf" AC_CACHE_CHECK([whether the C compiler needs -belf], lt_cv_cc_needs_belf, [AC_LANG_PUSH(C) AC_LINK_IFELSE([AC_LANG_PROGRAM([[]],[[]])],[lt_cv_cc_needs_belf=yes],[lt_cv_cc_needs_belf=no]) AC_LANG_POP]) if test yes != "$lt_cv_cc_needs_belf"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS=$SAVE_CFLAGS fi ;; *-*solaris*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo 'int i;' > conftest.$ac_ext if AC_TRY_EVAL(ac_compile); then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) case $host in i?86-*-solaris*|x86_64-*-solaris*) LD="${LD-ld} -m elf_x86_64" ;; sparc*-*-solaris*) LD="${LD-ld} -m elf64_sparc" ;; esac # GNU ld 2.21 introduced _sol2 emulations. Use them if available. if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then LD=${LD-ld}_sol2 fi ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks=$enable_libtool_lock ])# _LT_ENABLE_LOCK # _LT_PROG_AR # ----------- m4_defun([_LT_PROG_AR], [AC_CHECK_TOOLS(AR, [ar], false) : ${AR=ar} : ${AR_FLAGS=cru} _LT_DECL([], [AR], [1], [The archiver]) _LT_DECL([], [AR_FLAGS], [1], [Flags to create an archive]) AC_CACHE_CHECK([for archiver @FILE support], [lt_cv_ar_at_file], [lt_cv_ar_at_file=no AC_COMPILE_IFELSE([AC_LANG_PROGRAM], [echo conftest.$ac_objext > conftest.lst lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&AS_MESSAGE_LOG_FD' AC_TRY_EVAL([lt_ar_try]) if test 0 -eq "$ac_status"; then # Ensure the archiver fails upon bogus file names. rm -f conftest.$ac_objext libconftest.a AC_TRY_EVAL([lt_ar_try]) if test 0 -ne "$ac_status"; then lt_cv_ar_at_file=@ fi fi rm -f conftest.* libconftest.a ]) ]) if test no = "$lt_cv_ar_at_file"; then archiver_list_spec= else archiver_list_spec=$lt_cv_ar_at_file fi _LT_DECL([], [archiver_list_spec], [1], [How to feed a file listing to the archiver]) ])# _LT_PROG_AR # _LT_CMD_OLD_ARCHIVE # ------------------- m4_defun([_LT_CMD_OLD_ARCHIVE], [_LT_PROG_AR AC_CHECK_TOOL(STRIP, strip, :) test -z "$STRIP" && STRIP=: _LT_DECL([], [STRIP], [1], [A symbol stripping program]) AC_CHECK_TOOL(RANLIB, ranlib, :) test -z "$RANLIB" && RANLIB=: _LT_DECL([], [RANLIB], [1], [Commands used to install an old-style archive]) # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in bitrig* | openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" fi case $host_os in darwin*) lock_old_archive_extraction=yes ;; *) lock_old_archive_extraction=no ;; esac _LT_DECL([], [old_postinstall_cmds], [2]) _LT_DECL([], [old_postuninstall_cmds], [2]) _LT_TAGDECL([], [old_archive_cmds], [2], [Commands used to build an old-style archive]) _LT_DECL([], [lock_old_archive_extraction], [0], [Whether to use a lock for old archive extraction]) ])# _LT_CMD_OLD_ARCHIVE # _LT_COMPILER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [OUTPUT-FILE], [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------------------- # Check whether the given compiler option works AC_DEFUN([_LT_COMPILER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no m4_if([$4], , [ac_outfile=conftest.$ac_objext], [ac_outfile=$4]) echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$3" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi fi $RM conftest* ]) if test yes = "[$]$2"; then m4_if([$5], , :, [$5]) else m4_if([$6], , :, [$6]) fi ])# _LT_COMPILER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_COMPILER_OPTION], [_LT_COMPILER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_COMPILER_OPTION], []) # _LT_LINKER_OPTION(MESSAGE, VARIABLE-NAME, FLAGS, # [ACTION-SUCCESS], [ACTION-FAILURE]) # ---------------------------------------------------- # Check whether the given linker option works AC_DEFUN([_LT_LINKER_OPTION], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_SED])dnl AC_CACHE_CHECK([$1], [$2], [$2=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $3" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&AS_MESSAGE_LOG_FD $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then $2=yes fi else $2=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS ]) if test yes = "[$]$2"; then m4_if([$4], , :, [$4]) else m4_if([$5], , :, [$5]) fi ])# _LT_LINKER_OPTION # Old name: AU_ALIAS([AC_LIBTOOL_LINKER_OPTION], [_LT_LINKER_OPTION]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_LINKER_OPTION], []) # LT_CMD_MAX_LEN #--------------- AC_DEFUN([LT_CMD_MAX_LEN], [AC_REQUIRE([AC_CANONICAL_HOST])dnl # find the maximum length of command line arguments AC_MSG_CHECKING([the maximum length of command line arguments]) AC_CACHE_VAL([lt_cv_sys_max_cmd_len], [dnl i=0 teststring=ABCD case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; mint*) # On MiNT this can take a long time and run out of memory. lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; os2*) # The test takes a long time on OS/2. lt_cv_sys_max_cmd_len=8192 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[[ ]]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len" && \ test undefined != "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test X`env echo "$teststring$teststring" 2>/dev/null` \ = "X$teststring$teststring"; } >/dev/null 2>&1 && test 17 != "$i" # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac ]) if test -n "$lt_cv_sys_max_cmd_len"; then AC_MSG_RESULT($lt_cv_sys_max_cmd_len) else AC_MSG_RESULT(none) fi max_cmd_len=$lt_cv_sys_max_cmd_len _LT_DECL([], [max_cmd_len], [0], [What is the maximum length of a command?]) ])# LT_CMD_MAX_LEN # Old name: AU_ALIAS([AC_LIBTOOL_SYS_MAX_CMD_LEN], [LT_CMD_MAX_LEN]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_SYS_MAX_CMD_LEN], []) # _LT_HEADER_DLFCN # ---------------- m4_defun([_LT_HEADER_DLFCN], [AC_CHECK_HEADERS([dlfcn.h], [], [], [AC_INCLUDES_DEFAULT])dnl ])# _LT_HEADER_DLFCN # _LT_TRY_DLOPEN_SELF (ACTION-IF-TRUE, ACTION-IF-TRUE-W-USCORE, # ACTION-IF-FALSE, ACTION-IF-CROSS-COMPILING) # ---------------------------------------------------------------- m4_defun([_LT_TRY_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test yes = "$cross_compiling"; then : [$4] else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF [#line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisibility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; }] _LT_EOF if AC_TRY_EVAL(ac_link) && test -s "conftest$ac_exeext" 2>/dev/null; then (./conftest; exit; ) >&AS_MESSAGE_LOG_FD 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) $1 ;; x$lt_dlneed_uscore) $2 ;; x$lt_dlunknown|x*) $3 ;; esac else : # compilation failed $3 fi fi rm -fr conftest* ])# _LT_TRY_DLOPEN_SELF # LT_SYS_DLOPEN_SELF # ------------------ AC_DEFUN([LT_SYS_DLOPEN_SELF], [m4_require([_LT_HEADER_DLFCN])dnl if test yes != "$enable_dlopen"; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen=load_add_on lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen=LoadLibrary lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen=dlopen lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl],[ lt_cv_dlopen=dyld lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ]) ;; tpf*) # Don't try to run any link tests for TPF. We know it's impossible # because TPF is a cross-compiler, and we know how we open DSOs. lt_cv_dlopen=dlopen lt_cv_dlopen_libs= lt_cv_dlopen_self=no ;; *) AC_CHECK_FUNC([shl_load], [lt_cv_dlopen=shl_load], [AC_CHECK_LIB([dld], [shl_load], [lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld], [AC_CHECK_FUNC([dlopen], [lt_cv_dlopen=dlopen], [AC_CHECK_LIB([dl], [dlopen], [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl], [AC_CHECK_LIB([svld], [dlopen], [lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld], [AC_CHECK_LIB([dld], [dld_link], [lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld]) ]) ]) ]) ]) ]) ;; esac if test no = "$lt_cv_dlopen"; then enable_dlopen=no else enable_dlopen=yes fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS=$CPPFLAGS test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS=$LDFLAGS wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS=$LIBS LIBS="$lt_cv_dlopen_libs $LIBS" AC_CACHE_CHECK([whether a program can dlopen itself], lt_cv_dlopen_self, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self=yes, lt_cv_dlopen_self=yes, lt_cv_dlopen_self=no, lt_cv_dlopen_self=cross) ]) if test yes = "$lt_cv_dlopen_self"; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" AC_CACHE_CHECK([whether a statically linked program can dlopen itself], lt_cv_dlopen_self_static, [dnl _LT_TRY_DLOPEN_SELF( lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=yes, lt_cv_dlopen_self_static=no, lt_cv_dlopen_self_static=cross) ]) fi CPPFLAGS=$save_CPPFLAGS LDFLAGS=$save_LDFLAGS LIBS=$save_LIBS ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi _LT_DECL([dlopen_support], [enable_dlopen], [0], [Whether dlopen is supported]) _LT_DECL([dlopen_self], [enable_dlopen_self], [0], [Whether dlopen of programs is supported]) _LT_DECL([dlopen_self_static], [enable_dlopen_self_static], [0], [Whether dlopen of statically linked programs is supported]) ])# LT_SYS_DLOPEN_SELF # Old name: AU_ALIAS([AC_LIBTOOL_DLOPEN_SELF], [LT_SYS_DLOPEN_SELF]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN_SELF], []) # _LT_COMPILER_C_O([TAGNAME]) # --------------------------- # Check to see if options -c and -o are simultaneously supported by compiler. # This macro does not hard code the compiler like AC_PROG_CC_C_O. m4_defun([_LT_COMPILER_C_O], [m4_require([_LT_DECL_SED])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_CACHE_CHECK([if $compiler supports -c -o file.$ac_objext], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)], [_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [[^ ]]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&AS_MESSAGE_LOG_FD echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes fi fi chmod u+w . 2>&AS_MESSAGE_LOG_FD $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* ]) _LT_TAGDECL([compiler_c_o], [lt_cv_prog_compiler_c_o], [1], [Does compiler simultaneously support -c and -o options?]) ])# _LT_COMPILER_C_O # _LT_COMPILER_FILE_LOCKS([TAGNAME]) # ---------------------------------- # Check to see if we can do hard links to lock some files if needed m4_defun([_LT_COMPILER_FILE_LOCKS], [m4_require([_LT_ENABLE_LOCK])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl _LT_COMPILER_C_O([$1]) hard_links=nottested if test no = "$_LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)" && test no != "$need_locks"; then # do not overwrite the value of need_locks provided by the user AC_MSG_CHECKING([if we can lock with hard links]) hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no AC_MSG_RESULT([$hard_links]) if test no = "$hard_links"; then AC_MSG_WARN(['$CC' does not support '-c -o', so 'make -j' may be unsafe]) need_locks=warn fi else need_locks=no fi _LT_DECL([], [need_locks], [1], [Must we lock files when doing compilation?]) ])# _LT_COMPILER_FILE_LOCKS # _LT_CHECK_OBJDIR # ---------------- m4_defun([_LT_CHECK_OBJDIR], [AC_CACHE_CHECK([for objdir], [lt_cv_objdir], [rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null]) objdir=$lt_cv_objdir _LT_DECL([], [objdir], [0], [The name of the directory that contains temporary libtool files])dnl m4_pattern_allow([LT_OBJDIR])dnl AC_DEFINE_UNQUOTED([LT_OBJDIR], "$lt_cv_objdir/", [Define to the sub-directory where libtool stores uninstalled libraries.]) ])# _LT_CHECK_OBJDIR # _LT_LINKER_HARDCODE_LIBPATH([TAGNAME]) # -------------------------------------- # Check hardcoding attributes. m4_defun([_LT_LINKER_HARDCODE_LIBPATH], [AC_MSG_CHECKING([how to hardcode library paths into programs]) _LT_TAGVAR(hardcode_action, $1)= if test -n "$_LT_TAGVAR(hardcode_libdir_flag_spec, $1)" || test -n "$_LT_TAGVAR(runpath_var, $1)" || test yes = "$_LT_TAGVAR(hardcode_automatic, $1)"; then # We can hardcode non-existent directories. if test no != "$_LT_TAGVAR(hardcode_direct, $1)" && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, $1)" && test no != "$_LT_TAGVAR(hardcode_minus_L, $1)"; then # Linking always hardcodes the temporary library directory. _LT_TAGVAR(hardcode_action, $1)=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. _LT_TAGVAR(hardcode_action, $1)=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. _LT_TAGVAR(hardcode_action, $1)=unsupported fi AC_MSG_RESULT([$_LT_TAGVAR(hardcode_action, $1)]) if test relink = "$_LT_TAGVAR(hardcode_action, $1)" || test yes = "$_LT_TAGVAR(inherit_rpath, $1)"; then # Fast installation is not supported enable_fast_install=no elif test yes = "$shlibpath_overrides_runpath" || test no = "$enable_shared"; then # Fast installation is not necessary enable_fast_install=needless fi _LT_TAGDECL([], [hardcode_action], [0], [How to hardcode a shared library path into an executable]) ])# _LT_LINKER_HARDCODE_LIBPATH # _LT_CMD_STRIPLIB # ---------------- m4_defun([_LT_CMD_STRIPLIB], [m4_require([_LT_DECL_EGREP]) striplib= old_striplib= AC_MSG_CHECKING([whether stripping libraries is possible]) if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" AC_MSG_RESULT([yes]) else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP"; then striplib="$STRIP -x" old_striplib="$STRIP -S" AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) fi ;; *) AC_MSG_RESULT([no]) ;; esac fi _LT_DECL([], [old_striplib], [1], [Commands to strip libraries]) _LT_DECL([], [striplib], [1]) ])# _LT_CMD_STRIPLIB # _LT_PREPARE_MUNGE_PATH_LIST # --------------------------- # Make sure func_munge_path_list() is defined correctly. m4_defun([_LT_PREPARE_MUNGE_PATH_LIST], [[# func_munge_path_list VARIABLE PATH # ----------------------------------- # VARIABLE is name of variable containing _space_ separated list of # directories to be munged by the contents of PATH, which is string # having a format: # "DIR[:DIR]:" # string "DIR[ DIR]" will be prepended to VARIABLE # ":DIR[:DIR]" # string "DIR[ DIR]" will be appended to VARIABLE # "DIRP[:DIRP]::[DIRA:]DIRA" # string "DIRP[ DIRP]" will be prepended to VARIABLE and string # "DIRA[ DIRA]" will be appended to VARIABLE # "DIR[:DIR]" # VARIABLE will be replaced by "DIR[ DIR]" func_munge_path_list () { case x@S|@2 in x) ;; *:) eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'` \@S|@@S|@1\" ;; x:*) eval @S|@1=\"\@S|@@S|@1 `$ECHO @S|@2 | $SED 's/:/ /g'`\" ;; *::*) eval @S|@1=\"\@S|@@S|@1\ `$ECHO @S|@2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" eval @S|@1=\"`$ECHO @S|@2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \@S|@@S|@1\" ;; *) eval @S|@1=\"`$ECHO @S|@2 | $SED 's/:/ /g'`\" ;; esac } ]])# _LT_PREPARE_PATH_LIST # _LT_SYS_DYNAMIC_LINKER([TAG]) # ----------------------------- # PORTME Fill in your ld.so characteristics m4_defun([_LT_SYS_DYNAMIC_LINKER], [AC_REQUIRE([AC_CANONICAL_HOST])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_OBJDUMP])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_CHECK_SHELL_FEATURES])dnl m4_require([_LT_PREPARE_MUNGE_PATH_LIST])dnl AC_MSG_CHECKING([dynamic linker characteristics]) m4_if([$1], [], [ if test yes = "$GCC"; then case $host_os in darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; *) lt_awk_arg='/^libraries:/' ;; esac case $host_os in mingw* | cegcc*) lt_sed_strip_eq='s|=\([[A-Za-z]]:\)|\1|g' ;; *) lt_sed_strip_eq='s|=/|/|g' ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` case $lt_search_path_spec in *\;*) # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` ;; *) lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` ;; esac # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary... lt_tmp_lt_search_path_spec= lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` # ...but if some path component already ends with the multilib dir we assume # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). case "$lt_multi_os_dir; $lt_search_path_spec " in "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) lt_multi_os_dir= ;; esac for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" elif test -n "$lt_multi_os_dir"; then test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' BEGIN {RS = " "; FS = "/|\n";} { lt_foo = ""; lt_count = 0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo = "/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[[lt_foo]]++; } if (lt_freq[[lt_foo]] == 1) { print lt_foo; } }'` # AWK program above erroneously prepends '/' to C:/dos/paths # for these hosts. case $host_os in mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ $SED 's|/\([[A-Za-z]]:\)|\1|g'` ;; esac sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi]) library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=.so postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown AC_ARG_VAR([LT_SYS_LIBRARY_PATH], [User-defined run-time library search path.]) case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='$libname$release$shared_ext$major' ;; aix[[4-9]]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test ia64 = "$host_cpu"; then # AIX 5 supports IA64 library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line '#! .'. This would cause the generated library to # depend on '.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[[01]] | aix4.[[01]].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # Using Import Files as archive members, it is possible to support # filename-based versioning of shared library archives on AIX. While # this would work for both with and without runtime linking, it will # prevent static linking of such archives. So we do filename-based # shared library versioning with .so extension only, which is used # when both runtime linking and shared linking is enabled. # Unfortunately, runtime linking may impact performance, so we do # not want this to be the default eventually. Also, we use the # versioned .so libs for executables only if there is the -brtl # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. # To allow for filename-based versioning support, we need to create # libNAME.so.V as an archive file, containing: # *) an Import File, referring to the versioned filename of the # archive as well as the shared archive member, telling the # bitwidth (32 or 64) of that shared object, and providing the # list of exported symbols of that shared object, eventually # decorated with the 'weak' keyword # *) the shared object with the F_LOADONLY flag set, to really avoid # it being seen by the linker. # At run time we better use the real file rather than another symlink, # but for link time we create the symlink libNAME.so -> libNAME.so.V case $with_aix_soname,$aix_use_runtimelinking in # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. aix,yes) # traditional libtool dynamic_linker='AIX unversionable lib.so' # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; aix,no) # traditional AIX only dynamic_linker='AIX lib.a[(]lib.so.V[)]' # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' ;; svr4,*) # full svr4 only dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)]" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,yes) # both, prefer svr4 dynamic_linker="AIX lib.so.V[(]$shared_archive_member_spec.o[)], lib.a[(]lib.so.V[)]" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # unpreferred sharedlib libNAME.a needs extra handling postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,no) # both, prefer aix dynamic_linker="AIX lib.a[(]lib.so.V[)], lib.so.V[(]$shared_archive_member_spec.o[)]" library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' ;; esac shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([[^/]]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='$libname$shared_ext' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[[45]]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' m4_if([$1], [],[ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api"]) ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext' library_names_spec='$libname.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([[a-zA-Z]]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec=$LIB if $ECHO "$sys_lib_search_path_spec" | [$GREP ';[c-zC-Z]:/' >/dev/null]; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='$libname`echo $release | $SED -e 's/[[.]]/-/g'`$versuffix$shared_ext $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' soname_spec='$libname$release$major$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' m4_if([$1], [],[ sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib"]) sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[[23]].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[[01]]* | freebsdelf3.[[01]]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[[2-9]]* | freebsdelf3.[[2-9]]* | \ freebsd4.[[0-5]] | freebsdelf4.[[0-5]] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=no sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' if test 32 = "$HPUX_IA64_MODE"; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" sys_lib_dlsearch_path_spec=/usr/lib/hpux32 else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" sys_lib_dlsearch_path_spec=/usr/lib/hpux64 fi ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[[3-9]]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test yes = "$lt_cv_prog_gnu_ld"; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; linux*android*) version_type=none # Android doesn't support versioned libraries. need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext' soname_spec='$libname$release$shared_ext' finish_cmds= shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes dynamic_linker='Android linker' # Don't embed -rpath directories since the linker doesn't support them. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH AC_CACHE_VAL([lt_cv_shlibpath_overrides_runpath], [lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$_LT_TAGVAR(lt_prog_compiler_wl, $1)\"; \ LDFLAGS=\"\$LDFLAGS $_LT_TAGVAR(hardcode_libdir_flag_spec, $1)\"" AC_LINK_IFELSE([AC_LANG_PROGRAM([],[])], [AS_IF([ ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null], [lt_cv_shlibpath_overrides_runpath=yes])]) LDFLAGS=$save_LDFLAGS libdir=$save_libdir ]) shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Ideally, we could use ldconfig to report *all* directores which are # searched for libraries, however this is still not possible. Aside from not # being certain /sbin/ldconfig is available, command # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, # even though it is searched at run-time. Try to do the best guess by # appending ld.so.conf contents (and includes) to the search path. if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \[$]2)); skip = 1; } { if (!skip) print \[$]0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsdelf*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='NetBSD ld.elf_so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd* | bitrig*) version_type=sunos sys_lib_dlsearch_path_spec=/usr/lib need_lib_prefix=no if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then need_version=no else need_version=yes fi library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; os2*) libname_spec='$name' version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no # OS/2 can only load a DLL with a base name of 8 characters or less. soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; v=$($ECHO $release$versuffix | tr -d .-); n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); $ECHO $n$v`$shared_ext' library_names_spec='${libname}_dll.$libext' dynamic_linker='OS/2 ld.exe' shlibpath_var=BEGINLIBPATH sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test yes = "$with_gnu_ld"; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec; then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' soname_spec='$libname$shared_ext.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=sco need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test yes = "$with_gnu_ld"; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac AC_MSG_RESULT([$dynamic_linker]) test no = "$dynamic_linker" && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test yes = "$GCC"; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec fi if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec fi # remember unaugmented sys_lib_dlsearch_path content for libtool script decls... configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec # ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" # to be used as default LT_SYS_LIBRARY_PATH value in generated libtool configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH _LT_DECL([], [variables_saved_for_relink], [1], [Variables whose values should be saved in libtool wrapper scripts and restored at link time]) _LT_DECL([], [need_lib_prefix], [0], [Do we need the "lib" prefix for modules?]) _LT_DECL([], [need_version], [0], [Do we need a version for libraries?]) _LT_DECL([], [version_type], [0], [Library versioning type]) _LT_DECL([], [runpath_var], [0], [Shared library runtime path variable]) _LT_DECL([], [shlibpath_var], [0],[Shared library path variable]) _LT_DECL([], [shlibpath_overrides_runpath], [0], [Is shlibpath searched before the hard-coded library search path?]) _LT_DECL([], [libname_spec], [1], [Format of library name prefix]) _LT_DECL([], [library_names_spec], [1], [[List of archive names. First name is the real one, the rest are links. The last name is the one that the linker finds with -lNAME]]) _LT_DECL([], [soname_spec], [1], [[The coded name of the library, if different from the real name]]) _LT_DECL([], [install_override_mode], [1], [Permission mode override for installation of shared libraries]) _LT_DECL([], [postinstall_cmds], [2], [Command to use after installation of a shared archive]) _LT_DECL([], [postuninstall_cmds], [2], [Command to use after uninstallation of a shared archive]) _LT_DECL([], [finish_cmds], [2], [Commands used to finish a libtool library installation in a directory]) _LT_DECL([], [finish_eval], [1], [[As "finish_cmds", except a single script fragment to be evaled but not shown]]) _LT_DECL([], [hardcode_into_libs], [0], [Whether we should hardcode library paths into libraries]) _LT_DECL([], [sys_lib_search_path_spec], [2], [Compile-time system search path for libraries]) _LT_DECL([sys_lib_dlsearch_path_spec], [configure_time_dlsearch_path], [2], [Detected run-time system search path for libraries]) _LT_DECL([], [configure_time_lt_sys_library_path], [2], [Explicit LT_SYS_LIBRARY_PATH set during ./configure time]) ])# _LT_SYS_DYNAMIC_LINKER # _LT_PATH_TOOL_PREFIX(TOOL) # -------------------------- # find a file program that can recognize shared library AC_DEFUN([_LT_PATH_TOOL_PREFIX], [m4_require([_LT_DECL_EGREP])dnl AC_MSG_CHECKING([for $1]) AC_CACHE_VAL(lt_cv_path_MAGIC_CMD, [case $MAGIC_CMD in [[\\/*] | ?:[\\/]*]) lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD=$MAGIC_CMD lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR dnl $ac_dummy forces splitting on constant user-supplied paths. dnl POSIX.2 word splitting is done only on the output of word expansions, dnl not every word. This closes a longstanding sh security hole. ac_dummy="m4_if([$2], , $PATH, [$2])" for ac_dir in $ac_dummy; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$1"; then lt_cv_path_MAGIC_CMD=$ac_dir/"$1" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD=$lt_cv_path_MAGIC_CMD if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS=$lt_save_ifs MAGIC_CMD=$lt_save_MAGIC_CMD ;; esac]) MAGIC_CMD=$lt_cv_path_MAGIC_CMD if test -n "$MAGIC_CMD"; then AC_MSG_RESULT($MAGIC_CMD) else AC_MSG_RESULT(no) fi _LT_DECL([], [MAGIC_CMD], [0], [Used to examine libraries when file_magic_cmd begins with "file"])dnl ])# _LT_PATH_TOOL_PREFIX # Old name: AU_ALIAS([AC_PATH_TOOL_PREFIX], [_LT_PATH_TOOL_PREFIX]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_PATH_TOOL_PREFIX], []) # _LT_PATH_MAGIC # -------------- # find a file program that can recognize a shared library m4_defun([_LT_PATH_MAGIC], [_LT_PATH_TOOL_PREFIX(${ac_tool_prefix}file, /usr/bin$PATH_SEPARATOR$PATH) if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then _LT_PATH_TOOL_PREFIX(file, /usr/bin$PATH_SEPARATOR$PATH) else MAGIC_CMD=: fi fi ])# _LT_PATH_MAGIC # LT_PATH_LD # ---------- # find the pathname to the GNU or non-GNU linker AC_DEFUN([LT_PATH_LD], [AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_PROG_ECHO_BACKSLASH])dnl AC_ARG_WITH([gnu-ld], [AS_HELP_STRING([--with-gnu-ld], [assume the C compiler uses GNU ld @<:@default=no@:>@])], [test no = "$withval" || with_gnu_ld=yes], [with_gnu_ld=no])dnl ac_prog=ld if test yes = "$GCC"; then # Check if gcc -print-prog-name=ld gives a path. AC_MSG_CHECKING([for ld used by $CC]) case $host in *-*-mingw*) # gcc leaves a trailing carriage return, which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [[\\/]]* | ?:[[\\/]]*) re_direlt='/[[^/]][[^/]]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD=$ac_prog ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test yes = "$with_gnu_ld"; then AC_MSG_CHECKING([for GNU ld]) else AC_MSG_CHECKING([for non-GNU ld]) fi AC_CACHE_VAL(lt_cv_path_LD, [if test -z "$LD"; then lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD=$ac_dir/$ac_prog # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &1 conftest.i cat conftest.i conftest.i >conftest2.i : ${lt_DD:=$DD} AC_PATH_PROGS_FEATURE_CHECK([lt_DD], [dd], [if "$ac_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: fi]) rm -f conftest.i conftest2.i conftest.out]) ])# _LT_PATH_DD # _LT_CMD_TRUNCATE # ---------------- # find command to truncate a binary pipe m4_defun([_LT_CMD_TRUNCATE], [m4_require([_LT_PATH_DD]) AC_CACHE_CHECK([how to truncate binary pipes], [lt_cv_truncate_bin], [printf 0123456789abcdef0123456789abcdef >conftest.i cat conftest.i conftest.i >conftest2.i lt_cv_truncate_bin= if "$ac_cv_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" fi rm -f conftest.i conftest2.i conftest.out test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q"]) _LT_DECL([lt_truncate_bin], [lt_cv_truncate_bin], [1], [Command to truncate a binary pipe]) ])# _LT_CMD_TRUNCATE # _LT_CHECK_MAGIC_METHOD # ---------------------- # how to check for library dependencies # -- PORTME fill in with the dynamic library characteristics m4_defun([_LT_CHECK_MAGIC_METHOD], [m4_require([_LT_DECL_EGREP]) m4_require([_LT_DECL_OBJDUMP]) AC_CACHE_CHECK([how to recognize dependent libraries], lt_cv_deplibs_check_method, [lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= lt_cv_deplibs_check_method='unknown' # Need to set the preceding variable on all platforms that support # interlibrary dependencies. # 'none' -- dependencies not supported. # 'unknown' -- same as none, but documents that we really don't know. # 'pass_all' -- all dependencies passed with no checks. # 'test_compile' -- check by making test program. # 'file_magic [[regex]]' -- check by looking for files in library path # that responds to the $file_magic_cmd with a given extended regex. # If you have 'file' or equivalent on your system and you're not sure # whether 'pass_all' will *always* work, you probably want this one. case $host_os in aix[[4-9]]*) lt_cv_deplibs_check_method=pass_all ;; beos*) lt_cv_deplibs_check_method=pass_all ;; bsdi[[45]]*) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib)' lt_cv_file_magic_cmd='/usr/bin/file -L' lt_cv_file_magic_test_file=/shlib/libc.so ;; cygwin*) # func_win32_libid is a shell function defined in ltmain.sh lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' ;; mingw* | pw32*) # Base MSYS/MinGW do not provide the 'file' command needed by # func_win32_libid shell function, so use a weaker test based on 'objdump', # unless we find 'file', for example because we are cross-compiling. if ( file / ) >/dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else # Keep this pattern in sync with the one in func_win32_libid. lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc*) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[[3-9]]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; haiku*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|ELF-[[0-9]][[0-9]]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) [lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]'] lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[[0-9]][[0-9]][[0-9]]|PA-RISC[[0-9]]\.[[0-9]]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[[3-9]]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) lt_cv_deplibs_check_method=pass_all ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd* | bitrig*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[[^/]]+(\.so\.[[0-9]]+\.[[0-9]]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[ML]]SB (shared object|dynamic lib) M[[0-9]][[0-9]]* Version [[0-9]]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [[0-9]][[0-9]]*-bit [[LM]]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; os2*) lt_cv_deplibs_check_method=pass_all ;; esac ]) file_magic_glob= want_nocaseglob=no if test "$build" = "$host"; then case $host_os in mingw* | pw32*) if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then want_nocaseglob=yes else file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[[\1]]\/[[\1]]\/g;/g"` fi ;; esac fi file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown _LT_DECL([], [deplibs_check_method], [1], [Method to check whether dependent libraries are shared objects]) _LT_DECL([], [file_magic_cmd], [1], [Command to use when deplibs_check_method = "file_magic"]) _LT_DECL([], [file_magic_glob], [1], [How to find potential files when deplibs_check_method = "file_magic"]) _LT_DECL([], [want_nocaseglob], [1], [Find potential files using nocaseglob when deplibs_check_method = "file_magic"]) ])# _LT_CHECK_MAGIC_METHOD # LT_PATH_NM # ---------- # find the pathname to a BSD- or MS-compatible name lister AC_DEFUN([LT_PATH_NM], [AC_REQUIRE([AC_PROG_CC])dnl AC_CACHE_CHECK([for BSD- or MS-compatible name lister (nm)], lt_cv_path_NM, [if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM=$NM else lt_nm_to_check=${ac_tool_prefix}nm if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. tmp_nm=$ac_dir/$lt_tmp_nm if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then # Check to see if the nm accepts a BSD-compat flag. # Adding the 'sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty case $build_os in mingw*) lt_bad_file=conftest.nm/nofile ;; *) lt_bad_file=/dev/null ;; esac case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in *$lt_bad_file* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break 2 ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break 2 ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS=$lt_save_ifs done : ${lt_cv_path_NM=no} fi]) if test no != "$lt_cv_path_NM"; then NM=$lt_cv_path_NM else # Didn't find any BSD compatible name lister, look for dumpbin. if test -n "$DUMPBIN"; then : # Let the user override the test. else AC_CHECK_TOOLS(DUMPBIN, [dumpbin "link -dump"], :) case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in *COFF*) DUMPBIN="$DUMPBIN -symbols -headers" ;; *) DUMPBIN=: ;; esac fi AC_SUBST([DUMPBIN]) if test : != "$DUMPBIN"; then NM=$DUMPBIN fi fi test -z "$NM" && NM=nm AC_SUBST([NM]) _LT_DECL([], [NM], [1], [A BSD- or MS-compatible name lister])dnl AC_CACHE_CHECK([the name lister ($NM) interface], [lt_cv_nm_interface], [lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&AS_MESSAGE_LOG_FD) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&AS_MESSAGE_LOG_FD) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&AS_MESSAGE_LOG_FD (eval echo "\"\$as_me:$LINENO: output\"" >&AS_MESSAGE_LOG_FD) cat conftest.out >&AS_MESSAGE_LOG_FD if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest*]) ])# LT_PATH_NM # Old names: AU_ALIAS([AM_PROG_NM], [LT_PATH_NM]) AU_ALIAS([AC_PROG_NM], [LT_PATH_NM]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_PROG_NM], []) dnl AC_DEFUN([AC_PROG_NM], []) # _LT_CHECK_SHAREDLIB_FROM_LINKLIB # -------------------------------- # how to determine the name of the shared library # associated with a specific link library. # -- PORTME fill in with the dynamic library characteristics m4_defun([_LT_CHECK_SHAREDLIB_FROM_LINKLIB], [m4_require([_LT_DECL_EGREP]) m4_require([_LT_DECL_OBJDUMP]) m4_require([_LT_DECL_DLLTOOL]) AC_CACHE_CHECK([how to associate runtime and link libraries], lt_cv_sharedlib_from_linklib_cmd, [lt_cv_sharedlib_from_linklib_cmd='unknown' case $host_os in cygwin* | mingw* | pw32* | cegcc*) # two different shell functions defined in ltmain.sh; # decide which one to use based on capabilities of $DLLTOOL case `$DLLTOOL --help 2>&1` in *--identify-strict*) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ;; *) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ;; esac ;; *) # fallback: assume linklib IS sharedlib lt_cv_sharedlib_from_linklib_cmd=$ECHO ;; esac ]) sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO _LT_DECL([], [sharedlib_from_linklib_cmd], [1], [Command to associate shared and link libraries]) ])# _LT_CHECK_SHAREDLIB_FROM_LINKLIB # _LT_PATH_MANIFEST_TOOL # ---------------------- # locate the manifest tool m4_defun([_LT_PATH_MANIFEST_TOOL], [AC_CHECK_TOOL(MANIFEST_TOOL, mt, :) test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt AC_CACHE_CHECK([if $MANIFEST_TOOL is a manifest tool], [lt_cv_path_mainfest_tool], [lt_cv_path_mainfest_tool=no echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&AS_MESSAGE_LOG_FD $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out cat conftest.err >&AS_MESSAGE_LOG_FD if $GREP 'Manifest Tool' conftest.out > /dev/null; then lt_cv_path_mainfest_tool=yes fi rm -f conftest*]) if test yes != "$lt_cv_path_mainfest_tool"; then MANIFEST_TOOL=: fi _LT_DECL([], [MANIFEST_TOOL], [1], [Manifest tool])dnl ])# _LT_PATH_MANIFEST_TOOL # _LT_DLL_DEF_P([FILE]) # --------------------- # True iff FILE is a Windows DLL '.def' file. # Keep in sync with func_dll_def_p in the libtool script AC_DEFUN([_LT_DLL_DEF_P], [dnl test DEF = "`$SED -n dnl -e '\''s/^[[ ]]*//'\'' dnl Strip leading whitespace -e '\''/^\(;.*\)*$/d'\'' dnl Delete empty lines and comments -e '\''s/^\(EXPORTS\|LIBRARY\)\([[ ]].*\)*$/DEF/p'\'' dnl -e q dnl Only consider the first "real" line $1`" dnl ])# _LT_DLL_DEF_P # LT_LIB_M # -------- # check for math library AC_DEFUN([LT_LIB_M], [AC_REQUIRE([AC_CANONICAL_HOST])dnl LIBM= case $host in *-*-beos* | *-*-cegcc* | *-*-cygwin* | *-*-haiku* | *-*-pw32* | *-*-darwin*) # These system don't have libm, or don't need it ;; *-ncr-sysv4.3*) AC_CHECK_LIB(mw, _mwvalidcheckl, LIBM=-lmw) AC_CHECK_LIB(m, cos, LIBM="$LIBM -lm") ;; *) AC_CHECK_LIB(m, cos, LIBM=-lm) ;; esac AC_SUBST([LIBM]) ])# LT_LIB_M # Old name: AU_ALIAS([AC_CHECK_LIBM], [LT_LIB_M]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_CHECK_LIBM], []) # _LT_COMPILER_NO_RTTI([TAGNAME]) # ------------------------------- m4_defun([_LT_COMPILER_NO_RTTI], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= if test yes = "$GCC"; then case $cc_basename in nvcc*) _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -Xcompiler -fno-builtin' ;; *) _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' ;; esac _LT_COMPILER_OPTION([if $compiler supports -fno-rtti -fno-exceptions], lt_cv_prog_compiler_rtti_exceptions, [-fno-rtti -fno-exceptions], [], [_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)="$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1) -fno-rtti -fno-exceptions"]) fi _LT_TAGDECL([no_builtin_flag], [lt_prog_compiler_no_builtin_flag], [1], [Compiler flag to turn off builtin functions]) ])# _LT_COMPILER_NO_RTTI # _LT_CMD_GLOBAL_SYMBOLS # ---------------------- m4_defun([_LT_CMD_GLOBAL_SYMBOLS], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_PROG_CC])dnl AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([LT_PATH_NM])dnl AC_REQUIRE([LT_PATH_LD])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_TAG_COMPILER])dnl # Check for command to grab the raw symbol name followed by C symbol from nm. AC_MSG_CHECKING([command to parse $NM output from $compiler object]) AC_CACHE_VAL([lt_cv_sys_global_symbol_pipe], [ # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[[BCDEGRST]]' # Regexp to match symbols that can be accessed directly from C. sympat='\([[_A-Za-z]][[_A-Za-z0-9]]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[[BCDT]]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[[ABCDGISTW]]' ;; hpux*) if test ia64 = "$host_cpu"; then symcode='[[ABCDEGRST]]' fi ;; irix* | nonstopux*) symcode='[[BCDEGRST]]' ;; osf*) symcode='[[BCDEGQRST]]' ;; solaris*) symcode='[[BDRT]]' ;; sco3.2v5*) symcode='[[DT]]' ;; sysv4.2uw2*) symcode='[[DT]]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[[ABDT]]' ;; sysv4) symcode='[[DFNSTU]]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[[ABCDGIRSTW]]' ;; esac if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Gets list of data symbols to import. lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" # Adjust the below global symbol transforms to fixup imported variables. lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" lt_c_name_lib_hook="\ -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" else # Disable hooks by default. lt_cv_sys_global_symbol_to_import= lt_cdecl_hook= lt_c_name_hook= lt_c_name_lib_hook= fi # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n"\ $lt_cdecl_hook\ " -e 's/^T .* \(.*\)$/extern int \1();/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ $lt_c_name_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" # Transform an extracted symbol line into symbol name with lib prefix and # symbol address. lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ $lt_c_name_lib_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function, # D for any global variable and I for any imported variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK ['"\ " {last_section=section; section=\$ 3};"\ " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ " /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ " /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ " {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ " s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx]" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[[ ]]\($symcode$symcode*\)[[ ]][[ ]]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if AC_TRY_EVAL(ac_compile); then # Now try to grab the symbols. nlist=conftest.nm if AC_TRY_EVAL(NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE /* DATA imports from DLLs on WIN32 can't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT@&t@_DLSYM_CONST #elif defined __osf__ /* This system does not cope well with relocations in const data. */ # define LT@&t@_DLSYM_CONST #else # define LT@&t@_DLSYM_CONST const #endif #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ LT@&t@_DLSYM_CONST struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[[]] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_globsym_save_LIBS=$LIBS lt_globsym_save_CFLAGS=$CFLAGS LIBS=conftstm.$ac_objext CFLAGS="$CFLAGS$_LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)" if AC_TRY_EVAL(ac_link) && test -s conftest$ac_exeext; then pipe_works=yes fi LIBS=$lt_globsym_save_LIBS CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot find nm_test_var in $nlist" >&AS_MESSAGE_LOG_FD fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&AS_MESSAGE_LOG_FD fi else echo "$progname: failed program was:" >&AS_MESSAGE_LOG_FD cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test yes = "$pipe_works"; then break else lt_cv_sys_global_symbol_pipe= fi done ]) if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then AC_MSG_RESULT(failed) else AC_MSG_RESULT(ok) fi # Response file support. if test "$lt_cv_nm_interface" = "MS dumpbin"; then nm_file_list_spec='@' elif $NM --help 2>/dev/null | grep '[[@]]FILE' >/dev/null; then nm_file_list_spec='@' fi _LT_DECL([global_symbol_pipe], [lt_cv_sys_global_symbol_pipe], [1], [Take the output of nm and produce a listing of raw symbols and C names]) _LT_DECL([global_symbol_to_cdecl], [lt_cv_sys_global_symbol_to_cdecl], [1], [Transform the output of nm in a proper C declaration]) _LT_DECL([global_symbol_to_import], [lt_cv_sys_global_symbol_to_import], [1], [Transform the output of nm into a list of symbols to manually relocate]) _LT_DECL([global_symbol_to_c_name_address], [lt_cv_sys_global_symbol_to_c_name_address], [1], [Transform the output of nm in a C name address pair]) _LT_DECL([global_symbol_to_c_name_address_lib_prefix], [lt_cv_sys_global_symbol_to_c_name_address_lib_prefix], [1], [Transform the output of nm in a C name address pair when lib prefix is needed]) _LT_DECL([nm_interface], [lt_cv_nm_interface], [1], [The name lister interface]) _LT_DECL([], [nm_file_list_spec], [1], [Specify filename containing input files for $NM]) ]) # _LT_CMD_GLOBAL_SYMBOLS # _LT_COMPILER_PIC([TAGNAME]) # --------------------------- m4_defun([_LT_COMPILER_PIC], [m4_require([_LT_TAG_COMPILER])dnl _LT_TAGVAR(lt_prog_compiler_wl, $1)= _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)= m4_if([$1], [CXX], [ # C++ specific cases for pic, static, wl, etc. if test yes = "$GXX"; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) case $host_os in os2*) _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. _LT_TAGVAR(lt_prog_compiler_static, $1)= ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac else case $host_os in aix[[4-9]]*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, $1)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) ;; dgux*) case $cc_basename in ec++*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; ghcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' if test ia64 != "$host_cpu"; then _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' fi ;; aCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # KAI C++ Compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64, which still supported -KPIC. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xlc* | xlC* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL 8.0, 9.0 on PPC and BlueGene _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-W c,exportall' ;; *) ;; esac ;; netbsd* | netbsdelf*-gnu) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; cxx*) # Digital/Compaq C++ _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. _LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; lcc*) # Lucid _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ], [ if test yes = "$GCC"; then _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) case $host_os in os2*) _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. _LT_TAGVAR(lt_prog_compiler_static, $1)= ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac ;; interix[[3-9]]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)=-Kconform_pic fi ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' ;; esac case $cc_basename in nvcc*) # Cuda Compiler Driver 2.2 _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Xlinker ' if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then _LT_TAGVAR(lt_prog_compiler_pic, $1)="-Xcompiler $_LT_TAGVAR(lt_prog_compiler_pic, $1)" fi ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' else _LT_TAGVAR(lt_prog_compiler_static, $1)='-bnso -bI:/lib/syscalls.exp' fi ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fno-common' case $cc_basename in nagfor*) # NAG Fortran compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). m4_if([$1], [GCJ], [], [_LT_TAGVAR(lt_prog_compiler_pic, $1)='-DDLL_EXPORT']) case $host_os in os2*) _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-static' ;; esac ;; hpux9* | hpux10* | hpux11*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? _LT_TAGVAR(lt_prog_compiler_static, $1)='$wl-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # PIC (with -KPIC) is the default. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in # old Intel for x86_64, which still supported -KPIC. ecc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; # Lahey Fortran 8.1. lf95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='--shared' _LT_TAGVAR(lt_prog_compiler_static, $1)='--static' ;; nagfor*) # NAG Fortran compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,-Wl,,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; tcc*) # Fabrice Bellard et al's Tiny C Compiler _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; ccc*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All Alpha code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; xl* | bgxl* | bgf* | mpixl*) # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-qpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [[1-7]].* | *Sun*Fortran*\ 8.[[0-3]]*) # Sun Fortran 8.3 passes all unrecognized flags to the linker _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='' ;; *Sun\ F* | *Sun*Fortran*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' ;; *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' ;; *Intel*\ [[CF]]*Compiler*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-static' ;; *Portland\ Group*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fpic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; esac ;; esac ;; newsos6) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. _LT_TAGVAR(lt_prog_compiler_pic, $1)='-fPIC -shared' ;; osf3* | osf4* | osf5*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' # All OSF/1 code is PIC. _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; rdos*) _LT_TAGVAR(lt_prog_compiler_static, $1)='-non_shared' ;; solaris*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ';; *) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,';; esac ;; sunos4*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Qoption ld ' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-PIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(lt_prog_compiler_pic, $1)='-Kconform_pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_pic, $1)='-KPIC' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; unicos*) _LT_TAGVAR(lt_prog_compiler_wl, $1)='-Wl,' _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; uts4*) _LT_TAGVAR(lt_prog_compiler_pic, $1)='-pic' _LT_TAGVAR(lt_prog_compiler_static, $1)='-Bstatic' ;; *) _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no ;; esac fi ]) case $host_os in # For platforms that do not support PIC, -DPIC is meaningless: *djgpp*) _LT_TAGVAR(lt_prog_compiler_pic, $1)= ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)="$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])" ;; esac AC_CACHE_CHECK([for $compiler option to produce PIC], [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)], [_LT_TAGVAR(lt_cv_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_prog_compiler_pic, $1)]) _LT_TAGVAR(lt_prog_compiler_pic, $1)=$_LT_TAGVAR(lt_cv_prog_compiler_pic, $1) # # Check to make sure the PIC flag actually works. # if test -n "$_LT_TAGVAR(lt_prog_compiler_pic, $1)"; then _LT_COMPILER_OPTION([if $compiler PIC flag $_LT_TAGVAR(lt_prog_compiler_pic, $1) works], [_LT_TAGVAR(lt_cv_prog_compiler_pic_works, $1)], [$_LT_TAGVAR(lt_prog_compiler_pic, $1)@&t@m4_if([$1],[],[ -DPIC],[m4_if([$1],[CXX],[ -DPIC],[])])], [], [case $_LT_TAGVAR(lt_prog_compiler_pic, $1) in "" | " "*) ;; *) _LT_TAGVAR(lt_prog_compiler_pic, $1)=" $_LT_TAGVAR(lt_prog_compiler_pic, $1)" ;; esac], [_LT_TAGVAR(lt_prog_compiler_pic, $1)= _LT_TAGVAR(lt_prog_compiler_can_build_shared, $1)=no]) fi _LT_TAGDECL([pic_flag], [lt_prog_compiler_pic], [1], [Additional compiler flags for building library objects]) _LT_TAGDECL([wl], [lt_prog_compiler_wl], [1], [How to pass a linker flag through the compiler]) # # Check to make sure the static flag actually works. # wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) eval lt_tmp_static_flag=\"$_LT_TAGVAR(lt_prog_compiler_static, $1)\" _LT_LINKER_OPTION([if $compiler static flag $lt_tmp_static_flag works], _LT_TAGVAR(lt_cv_prog_compiler_static_works, $1), $lt_tmp_static_flag, [], [_LT_TAGVAR(lt_prog_compiler_static, $1)=]) _LT_TAGDECL([link_static_flag], [lt_prog_compiler_static], [1], [Compiler flag to prevent dynamic linking]) ])# _LT_COMPILER_PIC # _LT_LINKER_SHLIBS([TAGNAME]) # ---------------------------- # See if the linker supports building shared libraries. m4_defun([_LT_LINKER_SHLIBS], [AC_REQUIRE([LT_PATH_LD])dnl AC_REQUIRE([LT_PATH_NM])dnl m4_require([_LT_PATH_MANIFEST_TOOL])dnl m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_DECL_SED])dnl m4_require([_LT_CMD_GLOBAL_SYMBOLS])dnl m4_require([_LT_TAG_COMPILER])dnl AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) m4_if([$1], [CXX], [ _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] case $host_os in aix[[4-9]]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi ;; pw32*) _LT_TAGVAR(export_symbols_cmds, $1)=$ltdll_cmds ;; cygwin* | mingw* | cegcc*) case $cc_basename in cl*) _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] ;; esac ;; linux* | k*bsd*-gnu | gnu*) _LT_TAGVAR(link_all_deplibs, $1)=no ;; *) _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac ], [ runpath_var= _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_cmds, $1)= _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(old_archive_from_new_cmds, $1)= _LT_TAGVAR(old_archive_from_expsyms_cmds, $1)= _LT_TAGVAR(thread_safe_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list _LT_TAGVAR(include_expsyms, $1)= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ' (' and ')$', so one must not match beginning or # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', # as well as any symbol that contains 'd'. _LT_TAGVAR(exclude_expsyms, $1)=['_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*'] # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. dnl Note also adjust exclude_expsyms for C++ above. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test yes != "$GCC"; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd* | bitrig*) with_gnu_ld=no ;; linux* | k*bsd*-gnu | gnu*) _LT_TAGVAR(link_all_deplibs, $1)=no ;; esac _LT_TAGVAR(ld_shlibs, $1)=yes # On some targets, GNU ld is compatible enough with the native linker # that we're better off using the native interface for both. lt_use_gnu_ld_interface=no if test yes = "$with_gnu_ld"; then case $host_os in aix*) # The AIX port of GNU ld has always aspired to compatibility # with the native linker. However, as the warning in the GNU ld # block says, versions before 2.19.5* couldn't really create working # shared libraries, regardless of the interface used. case `$LD -v 2>&1` in *\ \(GNU\ Binutils\)\ 2.19.5*) ;; *\ \(GNU\ Binutils\)\ 2.[[2-9]]*) ;; *\ \(GNU\ Binutils\)\ [[3-9]]*) ;; *) lt_use_gnu_ld_interface=yes ;; esac ;; *) lt_use_gnu_ld_interface=yes ;; esac fi if test yes = "$lt_use_gnu_ld_interface"; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='$wl' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi supports_anon_versioning=no case `$LD -v | $SED -e 's/([^)]\+)\s\+//' 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[[3-9]]*) # On AIX/PPC, the GNU linker is very broken if test ia64 != "$host_cpu"; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.19, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to install binutils *** 2.20 or above, or modify your PATH so that a non-GNU linker is found. *** You will then need to restart the configuration process. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1 DATA/;s/^.*[[ ]]__nm__\([[^ ]]*\)[[ ]][[^ ]]*/\1 DATA/;/^I[[ ]]/d;/^[[AITW]][[ ]]/s/.* //'\'' | sort | uniq > $export_symbols' _LT_TAGVAR(exclude_expsyms, $1)=['[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname'] if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; haiku*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(link_all_deplibs, $1)=yes ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported shrext_cmds=.dll _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test linux-dietlibc = "$host_os"; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test no = "$tmp_diet" then tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group f77 and f90 compilers _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 _LT_TAGVAR(whole_archive_flag_spec, $1)= tmp_sharedflag='--shared' ;; nagfor*) # NAGFOR 5.3 tmp_sharedflag='-Wl,-shared' ;; xl[[cC]]* | bgxl[[cC]]* | mpixl[[cC]]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; nvcc*) # Cuda Compiler Driver 2.2 _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac _LT_TAGVAR(archive_cmds, $1)='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi case $cc_basename in tcc*) _LT_TAGVAR(export_dynamic_flag_spec, $1)='-rdynamic' ;; xlf* | bgf* | bgxlf* | mpixlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself _LT_TAGVAR(whole_archive_flag_spec, $1)='--whole-archive$convenience --no-whole-archive' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(archive_cmds, $1)='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' if test yes = "$supports_anon_versioning"; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [[01]].* | *\ 2.[[0-9]].* | *\ 2.1[[0-5]].*) _LT_TAGVAR(ld_shlibs, $1)=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; sunos4*) _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac if test no = "$_LT_TAGVAR(ld_shlibs, $1)"; then runpath_var= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. _LT_TAGVAR(hardcode_minus_L, $1)=yes if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. _LT_TAGVAR(hardcode_direct, $1)=unsupported fi ;; aix[[4-9]]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then _LT_TAGVAR(export_symbols_cmds, $1)='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && ([substr](\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else _LT_TAGVAR(export_symbols_cmds, $1)='`func_echo_all $NM | $SED -e '\''s/B\([[^B]]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && ([substr](\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then aix_use_runtimelinking=yes break fi done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # traditional, no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no ;; esac if test yes = "$GCC"; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi ;; esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag="$shared_flag "'$wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. _LT_TAGVAR(always_export_symbols, $1)=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. _LT_TAGVAR(allow_undefined_flag, $1)='-berok' # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' fi _LT_TAGVAR(archive_cmds_need_lc, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared libraries. _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='' ;; m68k) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac ;; bsdi[[45]]*) _LT_TAGVAR(export_dynamic_flag_spec, $1)=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. case $cc_basename in cl*) # Native MSVC _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(file_list_spec, $1)='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes _LT_TAGVAR(exclude_expsyms, $1)='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' _LT_TAGVAR(export_symbols_cmds, $1)='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[[BCDGRS]][[ ]]/s/.*[[ ]]\([[^ ]]*\)/\1,DATA/'\'' | $SED -e '\''/^[[AITW]][[ ]]/s/.*[[ ]]//'\'' | sort | uniq > $export_symbols' # Don't use ranlib _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # Assume MSVC wrapper _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' # FIXME: Should let the user specify the lib program. _LT_TAGVAR(old_archive_cmds, $1)='lib -OUT:$oldlib$oldobjs$old_deplibs' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; esac ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; dgux*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2.*) _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; hpux9*) if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' ;; hpux10*) if test yes,no = "$GCC,$with_gnu_ld"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test no = "$with_gnu_ld"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes fi ;; hpux11*) if test yes,no = "$GCC,$with_gnu_ld"; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) m4_if($1, [], [ # Older versions of the 11.00 compiler do not understand -b yet # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) _LT_LINKER_OPTION([if $CC understands -b], _LT_TAGVAR(lt_cv_prog_compiler__b, $1), [-b], [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags'], [_LT_TAGVAR(archive_cmds, $1)='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags'])], [_LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags']) ;; esac fi if test no = "$with_gnu_ld"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. _LT_TAGVAR(hardcode_minus_L, $1)=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. # This should be the same for all languages, so no per-tag cache variable. AC_CACHE_CHECK([whether the $host_os linker accepts -exported_symbol], [lt_cv_irix_exported_symbol], [save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" AC_LINK_IFELSE( [AC_LANG_SOURCE( [AC_LANG_CASE([C], [[int foo (void) { return 0; }]], [C++], [[int foo (void) { return 0; }]], [Fortran 77], [[ subroutine foo end]], [Fortran], [[ subroutine foo end]])])], [lt_cv_irix_exported_symbol=yes], [lt_cv_irix_exported_symbol=no]) LDFLAGS=$save_LDFLAGS]) if test yes = "$lt_cv_irix_exported_symbol"; then _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' fi _LT_TAGVAR(link_all_deplibs, $1)=no else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes _LT_TAGVAR(link_all_deplibs, $1)=yes ;; linux*) case $cc_basename in tcc*) # Fabrice Bellard et al's Tiny C Compiler _LT_TAGVAR(ld_shlibs, $1)=yes _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else _LT_TAGVAR(archive_cmds, $1)='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; newsos6) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *nto* | *qnx*) ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' fi else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported shrext_cmds=.dll _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; osf3*) if test yes = "$GCC"; then _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test yes = "$GCC"; then _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' else _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' fi _LT_TAGVAR(archive_cmds_need_lc, $1)='no' _LT_TAGVAR(hardcode_libdir_separator, $1)=: ;; solaris*) _LT_TAGVAR(no_undefined_flag, $1)=' -z defs' if test yes = "$GCC"; then wlarc='$wl' _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' _LT_TAGVAR(archive_cmds, $1)='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='$wl' _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. GCC discards it without '$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test yes = "$GCC"; then _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' else _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' fi ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes ;; sunos4*) if test sequent = "$host_vendor"; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4) case $host_vendor in sni) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. _LT_TAGVAR(archive_cmds, $1)='$LD -G -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(reload_cmds, $1)='$CC -r -o $output$reload_objs' _LT_TAGVAR(hardcode_direct, $1)=no ;; motorola) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_direct, $1)=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; sysv4.3*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes _LT_TAGVAR(ld_shlibs, $1)=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) _LT_TAGVAR(archive_cmds, $1)='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(ld_shlibs, $1)=no ;; esac if test sni = "$host_vendor"; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Blargedynsym' ;; esac fi fi ]) AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no _LT_TAGVAR(with_gnu_ld, $1)=$with_gnu_ld _LT_DECL([], [libext], [0], [Old archive suffix (normally "a")])dnl _LT_DECL([], [shrext_cmds], [1], [Shared library suffix (normally ".so")])dnl _LT_DECL([], [extract_expsyms_cmds], [2], [The commands to extract the exported symbol list from a shared archive]) # # Do we need to explicitly link libc? # case "x$_LT_TAGVAR(archive_cmds_need_lc, $1)" in x|xyes) # Assume -lc should be added _LT_TAGVAR(archive_cmds_need_lc, $1)=yes if test yes,yes = "$GCC,$enable_shared"; then case $_LT_TAGVAR(archive_cmds, $1) in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. AC_CACHE_CHECK([whether -lc should be explicitly linked in], [lt_cv_]_LT_TAGVAR(archive_cmds_need_lc, $1), [$RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if AC_TRY_EVAL(ac_compile) 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$_LT_TAGVAR(lt_prog_compiler_wl, $1) pic_flag=$_LT_TAGVAR(lt_prog_compiler_pic, $1) compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$_LT_TAGVAR(allow_undefined_flag, $1) _LT_TAGVAR(allow_undefined_flag, $1)= if AC_TRY_EVAL(_LT_TAGVAR(archive_cmds, $1) 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) then lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=no else lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1)=yes fi _LT_TAGVAR(allow_undefined_flag, $1)=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* ]) _LT_TAGVAR(archive_cmds_need_lc, $1)=$lt_cv_[]_LT_TAGVAR(archive_cmds_need_lc, $1) ;; esac fi ;; esac _LT_TAGDECL([build_libtool_need_lc], [archive_cmds_need_lc], [0], [Whether or not to add -lc for building shared libraries]) _LT_TAGDECL([allow_libtool_libs_with_static_runtimes], [enable_shared_with_static_runtimes], [0], [Whether or not to disallow shared libs when runtime libs are static]) _LT_TAGDECL([], [export_dynamic_flag_spec], [1], [Compiler flag to allow reflexive dlopens]) _LT_TAGDECL([], [whole_archive_flag_spec], [1], [Compiler flag to generate shared objects directly from archives]) _LT_TAGDECL([], [compiler_needs_object], [1], [Whether the compiler copes with passing no objects directly]) _LT_TAGDECL([], [old_archive_from_new_cmds], [2], [Create an old-style archive from a shared archive]) _LT_TAGDECL([], [old_archive_from_expsyms_cmds], [2], [Create a temporary old-style archive to link instead of a shared archive]) _LT_TAGDECL([], [archive_cmds], [2], [Commands used to build a shared archive]) _LT_TAGDECL([], [archive_expsym_cmds], [2]) _LT_TAGDECL([], [module_cmds], [2], [Commands used to build a loadable module if different from building a shared archive.]) _LT_TAGDECL([], [module_expsym_cmds], [2]) _LT_TAGDECL([], [with_gnu_ld], [1], [Whether we are building with GNU ld or not]) _LT_TAGDECL([], [allow_undefined_flag], [1], [Flag that allows shared libraries with undefined symbols to be built]) _LT_TAGDECL([], [no_undefined_flag], [1], [Flag that enforces no undefined symbols]) _LT_TAGDECL([], [hardcode_libdir_flag_spec], [1], [Flag to hardcode $libdir into a binary during linking. This must work even if $libdir does not exist]) _LT_TAGDECL([], [hardcode_libdir_separator], [1], [Whether we need a single "-rpath" flag with a separated argument]) _LT_TAGDECL([], [hardcode_direct], [0], [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_direct_absolute], [0], [Set to "yes" if using DIR/libNAME$shared_ext during linking hardcodes DIR into the resulting binary and the resulting library dependency is "absolute", i.e impossible to change by setting $shlibpath_var if the library is relocated]) _LT_TAGDECL([], [hardcode_minus_L], [0], [Set to "yes" if using the -LDIR flag during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_shlibpath_var], [0], [Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR into the resulting binary]) _LT_TAGDECL([], [hardcode_automatic], [0], [Set to "yes" if building a shared library automatically hardcodes DIR into the library and all subsequent libraries and executables linked against it]) _LT_TAGDECL([], [inherit_rpath], [0], [Set to yes if linker adds runtime paths of dependent libraries to runtime path list]) _LT_TAGDECL([], [link_all_deplibs], [0], [Whether libtool must link a program against all its dependency libraries]) _LT_TAGDECL([], [always_export_symbols], [0], [Set to "yes" if exported symbols are required]) _LT_TAGDECL([], [export_symbols_cmds], [2], [The commands to list exported symbols]) _LT_TAGDECL([], [exclude_expsyms], [1], [Symbols that should not be listed in the preloaded symbols]) _LT_TAGDECL([], [include_expsyms], [1], [Symbols that must always be exported]) _LT_TAGDECL([], [prelink_cmds], [2], [Commands necessary for linking programs (against libraries) with templates]) _LT_TAGDECL([], [postlink_cmds], [2], [Commands necessary for finishing linking programs]) _LT_TAGDECL([], [file_list_spec], [1], [Specify filename containing input files]) dnl FIXME: Not yet implemented dnl _LT_TAGDECL([], [thread_safe_flag_spec], [1], dnl [Compiler flag to generate thread safe objects]) ])# _LT_LINKER_SHLIBS # _LT_LANG_C_CONFIG([TAG]) # ------------------------ # Ensure that the configuration variables for a C compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to 'libtool'. m4_defun([_LT_LANG_C_CONFIG], [m4_require([_LT_DECL_EGREP])dnl lt_save_CC=$CC AC_LANG_PUSH(C) # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' _LT_TAG_COMPILER # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) LT_SYS_DLOPEN_SELF _LT_CMD_STRIPLIB # Report what library types will actually be built AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_CONFIG($1) fi AC_LANG_POP CC=$lt_save_CC ])# _LT_LANG_C_CONFIG # _LT_LANG_CXX_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a C++ compiler are suitably # defined. These variables are subsequently used by _LT_CONFIG to write # the compiler configuration to 'libtool'. m4_defun([_LT_LANG_CXX_CONFIG], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl m4_require([_LT_DECL_EGREP])dnl m4_require([_LT_PATH_MANIFEST_TOOL])dnl if test -n "$CXX" && ( test no != "$CXX" && ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) || (test g++ != "$CXX"))); then AC_PROG_CXXCPP else _lt_caught_CXX_error=yes fi AC_LANG_PUSH(C++) _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(compiler_needs_object, $1)=no _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=unsupported _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_caught_CXX_error"; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[[]]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} CFLAGS=$CXXFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test yes = "$GXX"; then _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)=' -fno-builtin' else _LT_TAGVAR(lt_prog_compiler_no_builtin_flag, $1)= fi if test yes = "$GXX"; then # Set up default GNU C++ configuration LT_PATH_LD # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test yes = "$with_gnu_ld"; then _LT_TAGVAR(archive_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='$wl' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else _LT_TAGVAR(whole_archive_flag_spec, $1)= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics AC_MSG_CHECKING([whether the $compiler linker ($LD) supports shared libraries]) _LT_TAGVAR(ld_shlibs, $1)=yes case $host_os in aix3*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aix[[4-9]]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[[23]]|aix4.[[23]].*|aix[[5-9]]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. _LT_TAGVAR(archive_cmds, $1)='' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(file_list_spec, $1)='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no ;; esac if test yes = "$GXX"; then case $host_os in aix4.[[012]]|aix4.[[012]].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 _LT_TAGVAR(hardcode_direct, $1)=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)= fi esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag=$shared_flag' $wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. _LT_TAGVAR(always_export_symbols, $1)=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. # The "-G" linker flag allows undefined symbols. _LT_TAGVAR(no_undefined_flag, $1)='-bernotok' # Determine the default libpath from the value encoded in an empty # executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $libdir:/usr/lib:/lib' _LT_TAGVAR(allow_undefined_flag, $1)="-z nodefs" _LT_TAGVAR(archive_expsym_cmds, $1)="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. _LT_SYS_MODULE_PATH_AIX([$1]) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. _LT_TAGVAR(no_undefined_flag, $1)=' $wl-bernotok' _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives _LT_TAGVAR(whole_archive_flag_spec, $1)='$convenience' fi _LT_TAGVAR(archive_cmds_need_lc, $1)=yes _LT_TAGVAR(archive_expsym_cmds, $1)='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([[, ]]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared # libraries. Need -bnortl late, we may have -brtl in LDFLAGS. _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi _LT_TAGVAR(archive_expsym_cmds, $1)="$_LT_TAGVAR(archive_expsym_cmds, $1)"'~$RM -r $output_objdir/$realname.d' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then _LT_TAGVAR(allow_undefined_flag, $1)=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME _LT_TAGVAR(archive_cmds, $1)='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) case $GXX,$cc_basename in ,cl* | no,cl*) # Native MSVC # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)=' ' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=yes _LT_TAGVAR(file_list_spec, $1)='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. _LT_TAGVAR(archive_cmds, $1)='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, $1)='true' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes # Don't use ranlib _LT_TAGVAR(old_postinstall_cmds, $1)='chmod 644 $oldlib' _LT_TAGVAR(postlink_cmds, $1)='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ func_to_tool_file "$lt_outputfile"~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # g++ # _LT_TAGVAR(hardcode_libdir_flag_spec, $1) is actually meaningless, # as there is no search path for DLLs. _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-all-symbols' _LT_TAGVAR(allow_undefined_flag, $1)=unsupported _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... _LT_TAGVAR(archive_expsym_cmds, $1)='if _LT_DLL_DEF_P([$export_symbols]); then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; darwin* | rhapsody*) _LT_DARWIN_LINKER_FEATURES($1) ;; os2*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-L$libdir' _LT_TAGVAR(hardcode_minus_L, $1)=yes _LT_TAGVAR(allow_undefined_flag, $1)=unsupported shrext_cmds=.dll _LT_TAGVAR(archive_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(archive_expsym_cmds, $1)='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' _LT_TAGVAR(old_archive_From_new_cmds, $1)='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=yes ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; freebsd2.*) # C++ shared libraries reported to be fairly broken before # switch to ELF _LT_TAGVAR(ld_shlibs, $1)=no ;; freebsd-elf*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions _LT_TAGVAR(ld_shlibs, $1)=yes ;; haiku*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(link_all_deplibs, $1)=yes ;; hpux9*) _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -b $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then _LT_TAGVAR(archive_cmds, $1)='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; hpux10*|hpux11*) if test no = "$with_gnu_ld"; then _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl+b $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: case $host_cpu in hppa*64*|ia64*) ;; *) _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no ;; *) _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(hardcode_minus_L, $1)=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; aCC*) case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then case $host_cpu in hppa*64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib -fPIC $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; interix[[3-9]]*) _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` -o $lib' fi fi _LT_TAGVAR(link_all_deplibs, $1)=yes ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: _LT_TAGVAR(inherit_rpath, $1)=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib $wl-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac _LT_TAGVAR(archive_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive$convenience $wl--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [[1-5]].* | *pgcpp\ [[1-5]].*) _LT_TAGVAR(prelink_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' _LT_TAGVAR(old_archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ $RANLIB $oldlib' _LT_TAGVAR(archive_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 6 and above use weak symbols _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl--rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ;; cxx*) # Compaq C++ _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib $wl-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' ;; xl* | mpixl* | bgxl*) # IBM XL 8.0 on PPC, with GNU ld _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl--export-dynamic' _LT_TAGVAR(archive_cmds, $1)='$CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file $wl$export_symbols' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' _LT_TAGVAR(compiler_needs_object, $1)=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; m88k*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then _LT_TAGVAR(archive_cmds, $1)='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) _LT_TAGVAR(ld_shlibs, $1)=yes ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then _LT_TAGVAR(hardcode_direct, $1)=yes _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=yes _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file,$export_symbols -o $lib' _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-E' _LT_TAGVAR(whole_archive_flag_spec, $1)=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' fi output_verbose_link_cmd=func_echo_all else _LT_TAGVAR(ld_shlibs, $1)=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. _LT_TAGVAR(archive_cmds, $1)='tempext=`echo $shared_ext | $SED -e '\''s/\([[^()0-9A-Za-z{}]]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) _LT_TAGVAR(old_archive_cmds, $1)='$CC -Bstatic -o $oldlib $oldobjs' ;; *) _LT_TAGVAR(old_archive_cmds, $1)='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; cxx*) case $host in osf3*) _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' ;; *) _LT_TAGVAR(allow_undefined_flag, $1)=' -expect_unresolved \*' _LT_TAGVAR(archive_cmds, $1)='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input $wl$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~ $RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-rpath $libdir' ;; esac _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes,no = "$GXX,$with_gnu_ld"; then _LT_TAGVAR(allow_undefined_flag, $1)=' $wl-expect_unresolved $wl\*' case $host in osf3*) _LT_TAGVAR(archive_cmds, $1)='$CC -shared -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; esac _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-rpath $wl$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ _LT_TAGVAR(archive_cmds_need_lc,$1)=yes _LT_TAGVAR(no_undefined_flag, $1)=' -zdefs' _LT_TAGVAR(archive_cmds, $1)='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='-R$libdir' _LT_TAGVAR(hardcode_shlibpath_var, $1)=no case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) _LT_TAGVAR(whole_archive_flag_spec, $1)='-z allextract$convenience -z defaultextract' ;; esac _LT_TAGVAR(link_all_deplibs, $1)=yes output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler _LT_TAGVAR(archive_cmds, $1)='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. _LT_TAGVAR(old_archive_cmds, $1)='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test yes,no = "$GXX,$with_gnu_ld"; then _LT_TAGVAR(no_undefined_flag, $1)=' $wl-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then _LT_TAGVAR(archive_cmds, $1)='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # g++ 2.7 appears to require '-G' NOT '-shared' on this # platform. _LT_TAGVAR(archive_cmds, $1)='$CC -G -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' _LT_TAGVAR(archive_expsym_cmds, $1)='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' fi _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R $wl$libdir' case $host_os in solaris2.[[0-5]] | solaris2.[[0-5]].*) ;; *) _LT_TAGVAR(whole_archive_flag_spec, $1)='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[[01]].[[10]]* | unixware7* | sco3.2v5.0.[[024]]*) _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. _LT_TAGVAR(no_undefined_flag, $1)='$wl-z,text' _LT_TAGVAR(allow_undefined_flag, $1)='$wl-z,nodefs' _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(hardcode_shlibpath_var, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)='$wl-R,$libdir' _LT_TAGVAR(hardcode_libdir_separator, $1)=':' _LT_TAGVAR(link_all_deplibs, $1)=yes _LT_TAGVAR(export_dynamic_flag_spec, $1)='$wl-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) _LT_TAGVAR(archive_cmds, $1)='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(old_archive_cmds, $1)='$CC -Tprelink_objects $oldobjs~ '"$_LT_TAGVAR(old_archive_cmds, $1)" _LT_TAGVAR(reload_cmds, $1)='$CC -Tprelink_objects $reload_objs~ '"$_LT_TAGVAR(reload_cmds, $1)" ;; *) _LT_TAGVAR(archive_cmds, $1)='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' _LT_TAGVAR(archive_expsym_cmds, $1)='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; *) # FIXME: insert proper C++ library support _LT_TAGVAR(ld_shlibs, $1)=no ;; esac AC_MSG_RESULT([$_LT_TAGVAR(ld_shlibs, $1)]) test no = "$_LT_TAGVAR(ld_shlibs, $1)" && can_build_shared=no _LT_TAGVAR(GCC, $1)=$GXX _LT_TAGVAR(LD, $1)=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test yes != "$_lt_caught_CXX_error" AC_LANG_POP ])# _LT_LANG_CXX_CONFIG # _LT_FUNC_STRIPNAME_CNF # ---------------------- # func_stripname_cnf prefix suffix name # strip PREFIX and SUFFIX off of NAME. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). # # This function is identical to the (non-XSI) version of func_stripname, # except this one can be used by m4 code that may be executed by configure, # rather than the libtool script. m4_defun([_LT_FUNC_STRIPNAME_CNF],[dnl AC_REQUIRE([_LT_DECL_SED]) AC_REQUIRE([_LT_PROG_ECHO_BACKSLASH]) func_stripname_cnf () { case @S|@2 in .*) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%\\\\@S|@2\$%%"`;; *) func_stripname_result=`$ECHO "@S|@3" | $SED "s%^@S|@1%%; s%@S|@2\$%%"`;; esac } # func_stripname_cnf ])# _LT_FUNC_STRIPNAME_CNF # _LT_SYS_HIDDEN_LIBDEPS([TAGNAME]) # --------------------------------- # Figure out "hidden" library dependencies from verbose # compiler output when linking a shared library. # Parse the compiler output and extract the necessary # objects, libraries and library flags. m4_defun([_LT_SYS_HIDDEN_LIBDEPS], [m4_require([_LT_FILEUTILS_DEFAULTS])dnl AC_REQUIRE([_LT_FUNC_STRIPNAME_CNF])dnl # Dependencies to place before and after the object being linked: _LT_TAGVAR(predep_objects, $1)= _LT_TAGVAR(postdep_objects, $1)= _LT_TAGVAR(predeps, $1)= _LT_TAGVAR(postdeps, $1)= _LT_TAGVAR(compiler_lib_search_path, $1)= dnl we can't use the lt_simple_compile_test_code here, dnl because it contains code intended for an executable, dnl not a library. It's possible we should let each dnl tag define a new lt_????_link_test_code variable, dnl but it's only used here... m4_if([$1], [], [cat > conftest.$ac_ext <<_LT_EOF int a; void foo (void) { a = 0; } _LT_EOF ], [$1], [CXX], [cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF ], [$1], [F77], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer*4 a a=0 return end _LT_EOF ], [$1], [FC], [cat > conftest.$ac_ext <<_LT_EOF subroutine foo implicit none integer a a=0 return end _LT_EOF ], [$1], [GCJ], [cat > conftest.$ac_ext <<_LT_EOF public class foo { private int a; public void bar (void) { a = 0; } }; _LT_EOF ], [$1], [GO], [cat > conftest.$ac_ext <<_LT_EOF package foo func foo() { } _LT_EOF ]) _lt_libdeps_save_CFLAGS=$CFLAGS case "$CC $CFLAGS " in #( *\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; *\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; *\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; esac dnl Parse the compiler output and extract the necessary dnl objects, libraries and library flags. if AC_TRY_EVAL(ac_compile); then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case $prev$p in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test x-L = "$p" || test x-R = "$p"; then prev=$p continue fi # Expand the sysroot to ease extracting the directories later. if test -z "$prev"; then case $p in -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; esac fi case $p in =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; esac if test no = "$pre_test_object_deps_done"; then case $prev in -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$_LT_TAGVAR(compiler_lib_search_path, $1)"; then _LT_TAGVAR(compiler_lib_search_path, $1)=$prev$p else _LT_TAGVAR(compiler_lib_search_path, $1)="${_LT_TAGVAR(compiler_lib_search_path, $1)} $prev$p" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$_LT_TAGVAR(postdeps, $1)"; then _LT_TAGVAR(postdeps, $1)=$prev$p else _LT_TAGVAR(postdeps, $1)="${_LT_TAGVAR(postdeps, $1)} $prev$p" fi fi prev= ;; *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test no = "$pre_test_object_deps_done"; then if test -z "$_LT_TAGVAR(predep_objects, $1)"; then _LT_TAGVAR(predep_objects, $1)=$p else _LT_TAGVAR(predep_objects, $1)="$_LT_TAGVAR(predep_objects, $1) $p" fi else if test -z "$_LT_TAGVAR(postdep_objects, $1)"; then _LT_TAGVAR(postdep_objects, $1)=$p else _LT_TAGVAR(postdep_objects, $1)="$_LT_TAGVAR(postdep_objects, $1) $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling $1 test program" fi $RM -f confest.$objext CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken m4_if([$1], [CXX], [case $host_os in interix[[3-9]]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. _LT_TAGVAR(predep_objects,$1)= _LT_TAGVAR(postdep_objects,$1)= _LT_TAGVAR(postdeps,$1)= ;; esac ]) case " $_LT_TAGVAR(postdeps, $1) " in *" -lc "*) _LT_TAGVAR(archive_cmds_need_lc, $1)=no ;; esac _LT_TAGVAR(compiler_lib_search_dirs, $1)= if test -n "${_LT_TAGVAR(compiler_lib_search_path, $1)}"; then _LT_TAGVAR(compiler_lib_search_dirs, $1)=`echo " ${_LT_TAGVAR(compiler_lib_search_path, $1)}" | $SED -e 's! -L! !g' -e 's!^ !!'` fi _LT_TAGDECL([], [compiler_lib_search_dirs], [1], [The directories searched by this compiler when creating a shared library]) _LT_TAGDECL([], [predep_objects], [1], [Dependencies to place before and after the objects being linked to create a shared library]) _LT_TAGDECL([], [postdep_objects], [1]) _LT_TAGDECL([], [predeps], [1]) _LT_TAGDECL([], [postdeps], [1]) _LT_TAGDECL([], [compiler_lib_search_path], [1], [The library search path used internally by the compiler when linking a shared library]) ])# _LT_SYS_HIDDEN_LIBDEPS # _LT_LANG_F77_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for a Fortran 77 compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_F77_CONFIG], [AC_LANG_PUSH(Fortran 77) if test -z "$F77" || test no = "$F77"; then _lt_disable_F77=yes fi _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for f77 test sources. ac_ext=f # Object file extension for compiled f77 test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the F77 compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_disable_F77"; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_GCC=$GCC lt_save_CFLAGS=$CFLAGS CC=${F77-"f77"} CFLAGS=$FFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) GCC=$G77 if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)=$G77 _LT_TAGVAR(LD, $1)=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS fi # test yes != "$_lt_disable_F77" AC_LANG_POP ])# _LT_LANG_F77_CONFIG # _LT_LANG_FC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for a Fortran compiler are # suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_FC_CONFIG], [AC_LANG_PUSH(Fortran) if test -z "$FC" || test no = "$FC"; then _lt_disable_FC=yes fi _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(allow_undefined_flag, $1)= _LT_TAGVAR(always_export_symbols, $1)=no _LT_TAGVAR(archive_expsym_cmds, $1)= _LT_TAGVAR(export_dynamic_flag_spec, $1)= _LT_TAGVAR(hardcode_direct, $1)=no _LT_TAGVAR(hardcode_direct_absolute, $1)=no _LT_TAGVAR(hardcode_libdir_flag_spec, $1)= _LT_TAGVAR(hardcode_libdir_separator, $1)= _LT_TAGVAR(hardcode_minus_L, $1)=no _LT_TAGVAR(hardcode_automatic, $1)=no _LT_TAGVAR(inherit_rpath, $1)=no _LT_TAGVAR(module_cmds, $1)= _LT_TAGVAR(module_expsym_cmds, $1)= _LT_TAGVAR(link_all_deplibs, $1)=unknown _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds _LT_TAGVAR(no_undefined_flag, $1)= _LT_TAGVAR(whole_archive_flag_spec, $1)= _LT_TAGVAR(enable_shared_with_static_runtimes, $1)=no # Source file extension for fc test sources. ac_ext=${ac_fc_srcext-f} # Object file extension for compiled fc test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # No sense in running all these tests if we already determined that # the FC compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_disable_FC"; then # Code to be used in simple compile tests lt_simple_compile_test_code="\ subroutine t return end " # Code to be used in simple link tests lt_simple_link_test_code="\ program t end " # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_GCC=$GCC lt_save_CFLAGS=$CFLAGS CC=${FC-"f95"} CFLAGS=$FCFLAGS compiler=$CC GCC=$ac_cv_fc_compiler_gnu _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) if test -n "$compiler"; then AC_MSG_CHECKING([if libtool supports shared libraries]) AC_MSG_RESULT([$can_build_shared]) AC_MSG_CHECKING([whether to build shared libraries]) test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[[4-9]]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac AC_MSG_RESULT([$enable_shared]) AC_MSG_CHECKING([whether to build static libraries]) # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes AC_MSG_RESULT([$enable_static]) _LT_TAGVAR(GCC, $1)=$ac_cv_fc_compiler_gnu _LT_TAGVAR(LD, $1)=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... _LT_SYS_HIDDEN_LIBDEPS($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_SYS_DYNAMIC_LINKER($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi # test -n "$compiler" GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS fi # test yes != "$_lt_disable_FC" AC_LANG_POP ])# _LT_LANG_FC_CONFIG # _LT_LANG_GCJ_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for the GNU Java Compiler compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_GCJ_CONFIG], [AC_REQUIRE([LT_PROG_GCJ])dnl AC_LANG_SAVE # Source file extension for Java test sources. ac_ext=java # Object file extension for compiled Java test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="class foo {}" # Code to be used in simple link tests lt_simple_link_test_code='public class conftest { public static void main(String[[]] argv) {}; }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC=yes CC=${GCJ-"gcj"} CFLAGS=$GCJFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)=$LD _LT_CC_BASENAME([$compiler]) # GCJ did not exist at the time GCC didn't implicitly link libc in. _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi AC_LANG_RESTORE GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_GCJ_CONFIG # _LT_LANG_GO_CONFIG([TAG]) # -------------------------- # Ensure that the configuration variables for the GNU Go compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_GO_CONFIG], [AC_REQUIRE([LT_PROG_GO])dnl AC_LANG_SAVE # Source file extension for Go test sources. ac_ext=go # Object file extension for compiled Go test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="package main; func main() { }" # Code to be used in simple link tests lt_simple_link_test_code='package main; func main() { }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC=yes CC=${GOC-"gccgo"} CFLAGS=$GOFLAGS compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_TAGVAR(LD, $1)=$LD _LT_CC_BASENAME([$compiler]) # Go did not exist at the time GCC didn't implicitly link libc in. _LT_TAGVAR(archive_cmds_need_lc, $1)=no _LT_TAGVAR(old_archive_cmds, $1)=$old_archive_cmds _LT_TAGVAR(reload_flag, $1)=$reload_flag _LT_TAGVAR(reload_cmds, $1)=$reload_cmds if test -n "$compiler"; then _LT_COMPILER_NO_RTTI($1) _LT_COMPILER_PIC($1) _LT_COMPILER_C_O($1) _LT_COMPILER_FILE_LOCKS($1) _LT_LINKER_SHLIBS($1) _LT_LINKER_HARDCODE_LIBPATH($1) _LT_CONFIG($1) fi AC_LANG_RESTORE GCC=$lt_save_GCC CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_GO_CONFIG # _LT_LANG_RC_CONFIG([TAG]) # ------------------------- # Ensure that the configuration variables for the Windows resource compiler # are suitably defined. These variables are subsequently used by _LT_CONFIG # to write the compiler configuration to 'libtool'. m4_defun([_LT_LANG_RC_CONFIG], [AC_REQUIRE([LT_PROG_RC])dnl AC_LANG_SAVE # Source file extension for RC test sources. ac_ext=rc # Object file extension for compiled RC test sources. objext=o _LT_TAGVAR(objext, $1)=$objext # Code to be used in simple compile tests lt_simple_compile_test_code='sample MENU { MENUITEM "&Soup", 100, CHECKED }' # Code to be used in simple link tests lt_simple_link_test_code=$lt_simple_compile_test_code # ltmain only uses $CC for tagged configurations so make sure $CC is set. _LT_TAG_COMPILER # save warnings/boilerplate of simple test code _LT_COMPILER_BOILERPLATE _LT_LINKER_BOILERPLATE # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_GCC=$GCC GCC= CC=${RC-"windres"} CFLAGS= compiler=$CC _LT_TAGVAR(compiler, $1)=$CC _LT_CC_BASENAME([$compiler]) _LT_TAGVAR(lt_cv_prog_compiler_c_o, $1)=yes if test -n "$compiler"; then : _LT_CONFIG($1) fi GCC=$lt_save_GCC AC_LANG_RESTORE CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS ])# _LT_LANG_RC_CONFIG # LT_PROG_GCJ # ----------- AC_DEFUN([LT_PROG_GCJ], [m4_ifdef([AC_PROG_GCJ], [AC_PROG_GCJ], [m4_ifdef([A][M_PROG_GCJ], [A][M_PROG_GCJ], [AC_CHECK_TOOL(GCJ, gcj,) test set = "${GCJFLAGS+set}" || GCJFLAGS="-g -O2" AC_SUBST(GCJFLAGS)])])[]dnl ]) # Old name: AU_ALIAS([LT_AC_PROG_GCJ], [LT_PROG_GCJ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_GCJ], []) # LT_PROG_GO # ---------- AC_DEFUN([LT_PROG_GO], [AC_CHECK_TOOL(GOC, gccgo,) ]) # LT_PROG_RC # ---------- AC_DEFUN([LT_PROG_RC], [AC_CHECK_TOOL(RC, windres,) ]) # Old name: AU_ALIAS([LT_AC_PROG_RC], [LT_PROG_RC]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_RC], []) # _LT_DECL_EGREP # -------------- # If we don't have a new enough Autoconf to choose the best grep # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_EGREP], [AC_REQUIRE([AC_PROG_EGREP])dnl AC_REQUIRE([AC_PROG_FGREP])dnl test -z "$GREP" && GREP=grep _LT_DECL([], [GREP], [1], [A grep program that handles long lines]) _LT_DECL([], [EGREP], [1], [An ERE matcher]) _LT_DECL([], [FGREP], [1], [A literal string matcher]) dnl Non-bleeding-edge autoconf doesn't subst GREP, so do it here too AC_SUBST([GREP]) ]) # _LT_DECL_OBJDUMP # -------------- # If we don't have a new enough Autoconf to choose the best objdump # available, choose the one first in the user's PATH. m4_defun([_LT_DECL_OBJDUMP], [AC_CHECK_TOOL(OBJDUMP, objdump, false) test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [1], [An object symbol dumper]) AC_SUBST([OBJDUMP]) ]) # _LT_DECL_DLLTOOL # ---------------- # Ensure DLLTOOL variable is set. m4_defun([_LT_DECL_DLLTOOL], [AC_CHECK_TOOL(DLLTOOL, dlltool, false) test -z "$DLLTOOL" && DLLTOOL=dlltool _LT_DECL([], [DLLTOOL], [1], [DLL creation program]) AC_SUBST([DLLTOOL]) ]) # _LT_DECL_SED # ------------ # Check for a fully-functional sed program, that truncates # as few characters as possible. Prefer GNU sed if found. m4_defun([_LT_DECL_SED], [AC_PROG_SED test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" _LT_DECL([], [SED], [1], [A sed program that does not truncate output]) _LT_DECL([], [Xsed], ["\$SED -e 1s/^X//"], [Sed that helps us avoid accidentally triggering echo(1) options like -n]) ])# _LT_DECL_SED m4_ifndef([AC_PROG_SED], [ # NOTE: This macro has been submitted for inclusion into # # GNU Autoconf as AC_PROG_SED. When it is available in # # a released version of Autoconf we should remove this # # macro and use it instead. # m4_defun([AC_PROG_SED], [AC_MSG_CHECKING([for a sed that does not truncate output]) AC_CACHE_VAL(lt_cv_path_SED, [# Loop through the user's path and test for sed and gsed. # Then use that list of sed's as ones to test for truncation. as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for lt_ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do if $as_executable_p "$as_dir/$lt_ac_prog$ac_exec_ext"; then lt_ac_sed_list="$lt_ac_sed_list $as_dir/$lt_ac_prog$ac_exec_ext" fi done done done IFS=$as_save_IFS lt_ac_max=0 lt_ac_count=0 # Add /usr/xpg4/bin/sed as it is typically found on Solaris # along with /bin/sed that truncates output. for lt_ac_sed in $lt_ac_sed_list /usr/xpg4/bin/sed; do test ! -f "$lt_ac_sed" && continue cat /dev/null > conftest.in lt_ac_count=0 echo $ECHO_N "0123456789$ECHO_C" >conftest.in # Check for GNU sed and select it if it is found. if "$lt_ac_sed" --version 2>&1 < /dev/null | grep 'GNU' > /dev/null; then lt_cv_path_SED=$lt_ac_sed break fi while true; do cat conftest.in conftest.in >conftest.tmp mv conftest.tmp conftest.in cp conftest.in conftest.nl echo >>conftest.nl $lt_ac_sed -e 's/a$//' < conftest.nl >conftest.out || break cmp -s conftest.out conftest.nl || break # 10000 chars as input seems more than enough test 10 -lt "$lt_ac_count" && break lt_ac_count=`expr $lt_ac_count + 1` if test "$lt_ac_count" -gt "$lt_ac_max"; then lt_ac_max=$lt_ac_count lt_cv_path_SED=$lt_ac_sed fi done done ]) SED=$lt_cv_path_SED AC_SUBST([SED]) AC_MSG_RESULT([$SED]) ])#AC_PROG_SED ])#m4_ifndef # Old name: AU_ALIAS([LT_AC_PROG_SED], [AC_PROG_SED]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([LT_AC_PROG_SED], []) # _LT_CHECK_SHELL_FEATURES # ------------------------ # Find out whether the shell is Bourne or XSI compatible, # or has some other useful features. m4_defun([_LT_CHECK_SHELL_FEATURES], [if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi _LT_DECL([], [lt_unset], [0], [whether the shell understands "unset"])dnl # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac _LT_DECL([SP2NL], [lt_SP2NL], [1], [turn spaces into newlines])dnl _LT_DECL([NL2SP], [lt_NL2SP], [1], [turn newlines into spaces])dnl ])# _LT_CHECK_SHELL_FEATURES # _LT_PATH_CONVERSION_FUNCTIONS # ----------------------------- # Determine what file name conversion functions should be used by # func_to_host_file (and, implicitly, by func_to_host_path). These are needed # for certain cross-compile configurations and native mingw. m4_defun([_LT_PATH_CONVERSION_FUNCTIONS], [AC_REQUIRE([AC_CANONICAL_HOST])dnl AC_REQUIRE([AC_CANONICAL_BUILD])dnl AC_MSG_CHECKING([how to convert $build file names to $host format]) AC_CACHE_VAL(lt_cv_to_host_file_cmd, [case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ;; esac ;; *-*-cygwin* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_noop ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ;; esac ;; * ) # unhandled hosts (and "normal" native builds) lt_cv_to_host_file_cmd=func_convert_file_noop ;; esac ]) to_host_file_cmd=$lt_cv_to_host_file_cmd AC_MSG_RESULT([$lt_cv_to_host_file_cmd]) _LT_DECL([to_host_file_cmd], [lt_cv_to_host_file_cmd], [0], [convert $build file names to $host format])dnl AC_MSG_CHECKING([how to convert $build file names to toolchain format]) AC_CACHE_VAL(lt_cv_to_tool_file_cmd, [#assume ordinary cross tools, or native build. lt_cv_to_tool_file_cmd=func_convert_file_noop case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ;; esac ;; esac ]) to_tool_file_cmd=$lt_cv_to_tool_file_cmd AC_MSG_RESULT([$lt_cv_to_tool_file_cmd]) _LT_DECL([to_tool_file_cmd], [lt_cv_to_tool_file_cmd], [0], [convert $build files to toolchain format])dnl ])# _LT_PATH_CONVERSION_FUNCTIONS # Helper functions for option handling. -*- Autoconf -*- # # Copyright (C) 2004-2005, 2007-2009, 2011-2015 Free Software # Foundation, Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 8 ltoptions.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOPTIONS_VERSION], [m4_if([1])]) # _LT_MANGLE_OPTION(MACRO-NAME, OPTION-NAME) # ------------------------------------------ m4_define([_LT_MANGLE_OPTION], [[_LT_OPTION_]m4_bpatsubst($1__$2, [[^a-zA-Z0-9_]], [_])]) # _LT_SET_OPTION(MACRO-NAME, OPTION-NAME) # --------------------------------------- # Set option OPTION-NAME for macro MACRO-NAME, and if there is a # matching handler defined, dispatch to it. Other OPTION-NAMEs are # saved as a flag. m4_define([_LT_SET_OPTION], [m4_define(_LT_MANGLE_OPTION([$1], [$2]))dnl m4_ifdef(_LT_MANGLE_DEFUN([$1], [$2]), _LT_MANGLE_DEFUN([$1], [$2]), [m4_warning([Unknown $1 option '$2'])])[]dnl ]) # _LT_IF_OPTION(MACRO-NAME, OPTION-NAME, IF-SET, [IF-NOT-SET]) # ------------------------------------------------------------ # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. m4_define([_LT_IF_OPTION], [m4_ifdef(_LT_MANGLE_OPTION([$1], [$2]), [$3], [$4])]) # _LT_UNLESS_OPTIONS(MACRO-NAME, OPTION-LIST, IF-NOT-SET) # ------------------------------------------------------- # Execute IF-NOT-SET unless all options in OPTION-LIST for MACRO-NAME # are set. m4_define([_LT_UNLESS_OPTIONS], [m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [m4_ifdef(_LT_MANGLE_OPTION([$1], _LT_Option), [m4_define([$0_found])])])[]dnl m4_ifdef([$0_found], [m4_undefine([$0_found])], [$3 ])[]dnl ]) # _LT_SET_OPTIONS(MACRO-NAME, OPTION-LIST) # ---------------------------------------- # OPTION-LIST is a space-separated list of Libtool options associated # with MACRO-NAME. If any OPTION has a matching handler declared with # LT_OPTION_DEFINE, dispatch to that macro; otherwise complain about # the unknown option and exit. m4_defun([_LT_SET_OPTIONS], [# Set options m4_foreach([_LT_Option], m4_split(m4_normalize([$2])), [_LT_SET_OPTION([$1], _LT_Option)]) m4_if([$1],[LT_INIT],[ dnl dnl Simply set some default values (i.e off) if boolean options were not dnl specified: _LT_UNLESS_OPTIONS([LT_INIT], [dlopen], [enable_dlopen=no ]) _LT_UNLESS_OPTIONS([LT_INIT], [win32-dll], [enable_win32_dll=no ]) dnl dnl If no reference was made to various pairs of opposing options, then dnl we run the default mode handler for the pair. For example, if neither dnl 'shared' nor 'disable-shared' was passed, we enable building of shared dnl archives by default: _LT_UNLESS_OPTIONS([LT_INIT], [shared disable-shared], [_LT_ENABLE_SHARED]) _LT_UNLESS_OPTIONS([LT_INIT], [static disable-static], [_LT_ENABLE_STATIC]) _LT_UNLESS_OPTIONS([LT_INIT], [pic-only no-pic], [_LT_WITH_PIC]) _LT_UNLESS_OPTIONS([LT_INIT], [fast-install disable-fast-install], [_LT_ENABLE_FAST_INSTALL]) _LT_UNLESS_OPTIONS([LT_INIT], [aix-soname=aix aix-soname=both aix-soname=svr4], [_LT_WITH_AIX_SONAME([aix])]) ]) ])# _LT_SET_OPTIONS # _LT_MANGLE_DEFUN(MACRO-NAME, OPTION-NAME) # ----------------------------------------- m4_define([_LT_MANGLE_DEFUN], [[_LT_OPTION_DEFUN_]m4_bpatsubst(m4_toupper([$1__$2]), [[^A-Z0-9_]], [_])]) # LT_OPTION_DEFINE(MACRO-NAME, OPTION-NAME, CODE) # ----------------------------------------------- m4_define([LT_OPTION_DEFINE], [m4_define(_LT_MANGLE_DEFUN([$1], [$2]), [$3])[]dnl ])# LT_OPTION_DEFINE # dlopen # ------ LT_OPTION_DEFINE([LT_INIT], [dlopen], [enable_dlopen=yes ]) AU_DEFUN([AC_LIBTOOL_DLOPEN], [_LT_SET_OPTION([LT_INIT], [dlopen]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'dlopen' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_DLOPEN], []) # win32-dll # --------- # Declare package support for building win32 dll's. LT_OPTION_DEFINE([LT_INIT], [win32-dll], [enable_win32_dll=yes case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-cegcc*) AC_CHECK_TOOL(AS, as, false) AC_CHECK_TOOL(DLLTOOL, dlltool, false) AC_CHECK_TOOL(OBJDUMP, objdump, false) ;; esac test -z "$AS" && AS=as _LT_DECL([], [AS], [1], [Assembler program])dnl test -z "$DLLTOOL" && DLLTOOL=dlltool _LT_DECL([], [DLLTOOL], [1], [DLL creation program])dnl test -z "$OBJDUMP" && OBJDUMP=objdump _LT_DECL([], [OBJDUMP], [1], [Object dumper program])dnl ])# win32-dll AU_DEFUN([AC_LIBTOOL_WIN32_DLL], [AC_REQUIRE([AC_CANONICAL_HOST])dnl _LT_SET_OPTION([LT_INIT], [win32-dll]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'win32-dll' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_WIN32_DLL], []) # _LT_ENABLE_SHARED([DEFAULT]) # ---------------------------- # implement the --enable-shared flag, and supports the 'shared' and # 'disable-shared' LT_INIT options. # DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. m4_define([_LT_ENABLE_SHARED], [m4_define([_LT_ENABLE_SHARED_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([shared], [AS_HELP_STRING([--enable-shared@<:@=PKGS@:>@], [build shared libraries @<:@default=]_LT_ENABLE_SHARED_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS=$lt_save_ifs ;; esac], [enable_shared=]_LT_ENABLE_SHARED_DEFAULT) _LT_DECL([build_libtool_libs], [enable_shared], [0], [Whether or not to build shared libraries]) ])# _LT_ENABLE_SHARED LT_OPTION_DEFINE([LT_INIT], [shared], [_LT_ENABLE_SHARED([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-shared], [_LT_ENABLE_SHARED([no])]) # Old names: AC_DEFUN([AC_ENABLE_SHARED], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[shared]) ]) AC_DEFUN([AC_DISABLE_SHARED], [_LT_SET_OPTION([LT_INIT], [disable-shared]) ]) AU_DEFUN([AM_ENABLE_SHARED], [AC_ENABLE_SHARED($@)]) AU_DEFUN([AM_DISABLE_SHARED], [AC_DISABLE_SHARED($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_SHARED], []) dnl AC_DEFUN([AM_DISABLE_SHARED], []) # _LT_ENABLE_STATIC([DEFAULT]) # ---------------------------- # implement the --enable-static flag, and support the 'static' and # 'disable-static' LT_INIT options. # DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. m4_define([_LT_ENABLE_STATIC], [m4_define([_LT_ENABLE_STATIC_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([static], [AS_HELP_STRING([--enable-static@<:@=PKGS@:>@], [build static libraries @<:@default=]_LT_ENABLE_STATIC_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS=$lt_save_ifs ;; esac], [enable_static=]_LT_ENABLE_STATIC_DEFAULT) _LT_DECL([build_old_libs], [enable_static], [0], [Whether or not to build static libraries]) ])# _LT_ENABLE_STATIC LT_OPTION_DEFINE([LT_INIT], [static], [_LT_ENABLE_STATIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-static], [_LT_ENABLE_STATIC([no])]) # Old names: AC_DEFUN([AC_ENABLE_STATIC], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[static]) ]) AC_DEFUN([AC_DISABLE_STATIC], [_LT_SET_OPTION([LT_INIT], [disable-static]) ]) AU_DEFUN([AM_ENABLE_STATIC], [AC_ENABLE_STATIC($@)]) AU_DEFUN([AM_DISABLE_STATIC], [AC_DISABLE_STATIC($@)]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AM_ENABLE_STATIC], []) dnl AC_DEFUN([AM_DISABLE_STATIC], []) # _LT_ENABLE_FAST_INSTALL([DEFAULT]) # ---------------------------------- # implement the --enable-fast-install flag, and support the 'fast-install' # and 'disable-fast-install' LT_INIT options. # DEFAULT is either 'yes' or 'no'. If omitted, it defaults to 'yes'. m4_define([_LT_ENABLE_FAST_INSTALL], [m4_define([_LT_ENABLE_FAST_INSTALL_DEFAULT], [m4_if($1, no, no, yes)])dnl AC_ARG_ENABLE([fast-install], [AS_HELP_STRING([--enable-fast-install@<:@=PKGS@:>@], [optimize for fast installation @<:@default=]_LT_ENABLE_FAST_INSTALL_DEFAULT[@:>@])], [p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS=$lt_save_ifs ;; esac], [enable_fast_install=]_LT_ENABLE_FAST_INSTALL_DEFAULT) _LT_DECL([fast_install], [enable_fast_install], [0], [Whether or not to optimize for fast installation])dnl ])# _LT_ENABLE_FAST_INSTALL LT_OPTION_DEFINE([LT_INIT], [fast-install], [_LT_ENABLE_FAST_INSTALL([yes])]) LT_OPTION_DEFINE([LT_INIT], [disable-fast-install], [_LT_ENABLE_FAST_INSTALL([no])]) # Old names: AU_DEFUN([AC_ENABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], m4_if([$1], [no], [disable-])[fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'fast-install' option into LT_INIT's first parameter.]) ]) AU_DEFUN([AC_DISABLE_FAST_INSTALL], [_LT_SET_OPTION([LT_INIT], [disable-fast-install]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'disable-fast-install' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_ENABLE_FAST_INSTALL], []) dnl AC_DEFUN([AM_DISABLE_FAST_INSTALL], []) # _LT_WITH_AIX_SONAME([DEFAULT]) # ---------------------------------- # implement the --with-aix-soname flag, and support the `aix-soname=aix' # and `aix-soname=both' and `aix-soname=svr4' LT_INIT options. DEFAULT # is either `aix', `both' or `svr4'. If omitted, it defaults to `aix'. m4_define([_LT_WITH_AIX_SONAME], [m4_define([_LT_WITH_AIX_SONAME_DEFAULT], [m4_if($1, svr4, svr4, m4_if($1, both, both, aix))])dnl shared_archive_member_spec= case $host,$enable_shared in power*-*-aix[[5-9]]*,yes) AC_MSG_CHECKING([which variant of shared library versioning to provide]) AC_ARG_WITH([aix-soname], [AS_HELP_STRING([--with-aix-soname=aix|svr4|both], [shared library versioning (aka "SONAME") variant to provide on AIX, @<:@default=]_LT_WITH_AIX_SONAME_DEFAULT[@:>@.])], [case $withval in aix|svr4|both) ;; *) AC_MSG_ERROR([Unknown argument to --with-aix-soname]) ;; esac lt_cv_with_aix_soname=$with_aix_soname], [AC_CACHE_VAL([lt_cv_with_aix_soname], [lt_cv_with_aix_soname=]_LT_WITH_AIX_SONAME_DEFAULT) with_aix_soname=$lt_cv_with_aix_soname]) AC_MSG_RESULT([$with_aix_soname]) if test aix != "$with_aix_soname"; then # For the AIX way of multilib, we name the shared archive member # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, # the AIX toolchain works better with OBJECT_MODE set (default 32). if test 64 = "${OBJECT_MODE-32}"; then shared_archive_member_spec=shr_64 else shared_archive_member_spec=shr fi fi ;; *) with_aix_soname=aix ;; esac _LT_DECL([], [shared_archive_member_spec], [0], [Shared archive member basename, for filename based shared library versioning on AIX])dnl ])# _LT_WITH_AIX_SONAME LT_OPTION_DEFINE([LT_INIT], [aix-soname=aix], [_LT_WITH_AIX_SONAME([aix])]) LT_OPTION_DEFINE([LT_INIT], [aix-soname=both], [_LT_WITH_AIX_SONAME([both])]) LT_OPTION_DEFINE([LT_INIT], [aix-soname=svr4], [_LT_WITH_AIX_SONAME([svr4])]) # _LT_WITH_PIC([MODE]) # -------------------- # implement the --with-pic flag, and support the 'pic-only' and 'no-pic' # LT_INIT options. # MODE is either 'yes' or 'no'. If omitted, it defaults to 'both'. m4_define([_LT_WITH_PIC], [AC_ARG_WITH([pic], [AS_HELP_STRING([--with-pic@<:@=PKGS@:>@], [try to use only PIC/non-PIC objects @<:@default=use both@:>@])], [lt_p=${PACKAGE-default} case $withval in yes|no) pic_mode=$withval ;; *) pic_mode=default # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for lt_pkg in $withval; do IFS=$lt_save_ifs if test "X$lt_pkg" = "X$lt_p"; then pic_mode=yes fi done IFS=$lt_save_ifs ;; esac], [pic_mode=m4_default([$1], [default])]) _LT_DECL([], [pic_mode], [0], [What type of objects to build])dnl ])# _LT_WITH_PIC LT_OPTION_DEFINE([LT_INIT], [pic-only], [_LT_WITH_PIC([yes])]) LT_OPTION_DEFINE([LT_INIT], [no-pic], [_LT_WITH_PIC([no])]) # Old name: AU_DEFUN([AC_LIBTOOL_PICMODE], [_LT_SET_OPTION([LT_INIT], [pic-only]) AC_DIAGNOSE([obsolete], [$0: Remove this warning and the call to _LT_SET_OPTION when you put the 'pic-only' option into LT_INIT's first parameter.]) ]) dnl aclocal-1.4 backwards compatibility: dnl AC_DEFUN([AC_LIBTOOL_PICMODE], []) m4_define([_LTDL_MODE], []) LT_OPTION_DEFINE([LTDL_INIT], [nonrecursive], [m4_define([_LTDL_MODE], [nonrecursive])]) LT_OPTION_DEFINE([LTDL_INIT], [recursive], [m4_define([_LTDL_MODE], [recursive])]) LT_OPTION_DEFINE([LTDL_INIT], [subproject], [m4_define([_LTDL_MODE], [subproject])]) m4_define([_LTDL_TYPE], []) LT_OPTION_DEFINE([LTDL_INIT], [installable], [m4_define([_LTDL_TYPE], [installable])]) LT_OPTION_DEFINE([LTDL_INIT], [convenience], [m4_define([_LTDL_TYPE], [convenience])]) # ltsugar.m4 -- libtool m4 base layer. -*-Autoconf-*- # # Copyright (C) 2004-2005, 2007-2008, 2011-2015 Free Software # Foundation, Inc. # Written by Gary V. Vaughan, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 6 ltsugar.m4 # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTSUGAR_VERSION], [m4_if([0.1])]) # lt_join(SEP, ARG1, [ARG2...]) # ----------------------------- # Produce ARG1SEPARG2...SEPARGn, omitting [] arguments and their # associated separator. # Needed until we can rely on m4_join from Autoconf 2.62, since all earlier # versions in m4sugar had bugs. m4_define([lt_join], [m4_if([$#], [1], [], [$#], [2], [[$2]], [m4_if([$2], [], [], [[$2]_])$0([$1], m4_shift(m4_shift($@)))])]) m4_define([_lt_join], [m4_if([$#$2], [2], [], [m4_if([$2], [], [], [[$1$2]])$0([$1], m4_shift(m4_shift($@)))])]) # lt_car(LIST) # lt_cdr(LIST) # ------------ # Manipulate m4 lists. # These macros are necessary as long as will still need to support # Autoconf-2.59, which quotes differently. m4_define([lt_car], [[$1]]) m4_define([lt_cdr], [m4_if([$#], 0, [m4_fatal([$0: cannot be called without arguments])], [$#], 1, [], [m4_dquote(m4_shift($@))])]) m4_define([lt_unquote], $1) # lt_append(MACRO-NAME, STRING, [SEPARATOR]) # ------------------------------------------ # Redefine MACRO-NAME to hold its former content plus 'SEPARATOR''STRING'. # Note that neither SEPARATOR nor STRING are expanded; they are appended # to MACRO-NAME as is (leaving the expansion for when MACRO-NAME is invoked). # No SEPARATOR is output if MACRO-NAME was previously undefined (different # than defined and empty). # # This macro is needed until we can rely on Autoconf 2.62, since earlier # versions of m4sugar mistakenly expanded SEPARATOR but not STRING. m4_define([lt_append], [m4_define([$1], m4_ifdef([$1], [m4_defn([$1])[$3]])[$2])]) # lt_combine(SEP, PREFIX-LIST, INFIX, SUFFIX1, [SUFFIX2...]) # ---------------------------------------------------------- # Produce a SEP delimited list of all paired combinations of elements of # PREFIX-LIST with SUFFIX1 through SUFFIXn. Each element of the list # has the form PREFIXmINFIXSUFFIXn. # Needed until we can rely on m4_combine added in Autoconf 2.62. m4_define([lt_combine], [m4_if(m4_eval([$# > 3]), [1], [m4_pushdef([_Lt_sep], [m4_define([_Lt_sep], m4_defn([lt_car]))])]]dnl [[m4_foreach([_Lt_prefix], [$2], [m4_foreach([_Lt_suffix], ]m4_dquote(m4_dquote(m4_shift(m4_shift(m4_shift($@)))))[, [_Lt_sep([$1])[]m4_defn([_Lt_prefix])[$3]m4_defn([_Lt_suffix])])])])]) # lt_if_append_uniq(MACRO-NAME, VARNAME, [SEPARATOR], [UNIQ], [NOT-UNIQ]) # ----------------------------------------------------------------------- # Iff MACRO-NAME does not yet contain VARNAME, then append it (delimited # by SEPARATOR if supplied) and expand UNIQ, else NOT-UNIQ. m4_define([lt_if_append_uniq], [m4_ifdef([$1], [m4_if(m4_index([$3]m4_defn([$1])[$3], [$3$2$3]), [-1], [lt_append([$1], [$2], [$3])$4], [$5])], [lt_append([$1], [$2], [$3])$4])]) # lt_dict_add(DICT, KEY, VALUE) # ----------------------------- m4_define([lt_dict_add], [m4_define([$1($2)], [$3])]) # lt_dict_add_subkey(DICT, KEY, SUBKEY, VALUE) # -------------------------------------------- m4_define([lt_dict_add_subkey], [m4_define([$1($2:$3)], [$4])]) # lt_dict_fetch(DICT, KEY, [SUBKEY]) # ---------------------------------- m4_define([lt_dict_fetch], [m4_ifval([$3], m4_ifdef([$1($2:$3)], [m4_defn([$1($2:$3)])]), m4_ifdef([$1($2)], [m4_defn([$1($2)])]))]) # lt_if_dict_fetch(DICT, KEY, [SUBKEY], VALUE, IF-TRUE, [IF-FALSE]) # ----------------------------------------------------------------- m4_define([lt_if_dict_fetch], [m4_if(lt_dict_fetch([$1], [$2], [$3]), [$4], [$5], [$6])]) # lt_dict_filter(DICT, [SUBKEY], VALUE, [SEPARATOR], KEY, [...]) # -------------------------------------------------------------- m4_define([lt_dict_filter], [m4_if([$5], [], [], [lt_join(m4_quote(m4_default([$4], [[, ]])), lt_unquote(m4_split(m4_normalize(m4_foreach(_Lt_key, lt_car([m4_shiftn(4, $@)]), [lt_if_dict_fetch([$1], _Lt_key, [$2], [$3], [_Lt_key ])])))))])[]dnl ]) # ltversion.m4 -- version numbers -*- Autoconf -*- # # Copyright (C) 2004, 2011-2015 Free Software Foundation, Inc. # Written by Scott James Remnant, 2004 # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # @configure_input@ # serial 4179 ltversion.m4 # This file is part of GNU Libtool m4_define([LT_PACKAGE_VERSION], [2.4.6]) m4_define([LT_PACKAGE_REVISION], [2.4.6]) AC_DEFUN([LTVERSION_VERSION], [macro_version='2.4.6' macro_revision='2.4.6' _LT_DECL(, macro_version, 0, [Which release of libtool.m4 was used?]) _LT_DECL(, macro_revision, 0) ]) # lt~obsolete.m4 -- aclocal satisfying obsolete definitions. -*-Autoconf-*- # # Copyright (C) 2004-2005, 2007, 2009, 2011-2015 Free Software # Foundation, Inc. # Written by Scott James Remnant, 2004. # # This file is free software; the Free Software Foundation gives # unlimited permission to copy and/or distribute it, with or without # modifications, as long as this notice is preserved. # serial 5 lt~obsolete.m4 # These exist entirely to fool aclocal when bootstrapping libtool. # # In the past libtool.m4 has provided macros via AC_DEFUN (or AU_DEFUN), # which have later been changed to m4_define as they aren't part of the # exported API, or moved to Autoconf or Automake where they belong. # # The trouble is, aclocal is a bit thick. It'll see the old AC_DEFUN # in /usr/share/aclocal/libtool.m4 and remember it, then when it sees us # using a macro with the same name in our local m4/libtool.m4 it'll # pull the old libtool.m4 in (it doesn't see our shiny new m4_define # and doesn't know about Autoconf macros at all.) # # So we provide this file, which has a silly filename so it's always # included after everything else. This provides aclocal with the # AC_DEFUNs it wants, but when m4 processes it, it doesn't do anything # because those macros already exist, or will be overwritten later. # We use AC_DEFUN over AU_DEFUN for compatibility with aclocal-1.6. # # Anytime we withdraw an AC_DEFUN or AU_DEFUN, remember to add it here. # Yes, that means every name once taken will need to remain here until # we give up compatibility with versions before 1.7, at which point # we need to keep only those names which we still refer to. # This is to help aclocal find these macros, as it can't see m4_define. AC_DEFUN([LTOBSOLETE_VERSION], [m4_if([1])]) m4_ifndef([AC_LIBTOOL_LINKER_OPTION], [AC_DEFUN([AC_LIBTOOL_LINKER_OPTION])]) m4_ifndef([AC_PROG_EGREP], [AC_DEFUN([AC_PROG_EGREP])]) m4_ifndef([_LT_AC_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_AC_PROG_ECHO_BACKSLASH])]) m4_ifndef([_LT_AC_SHELL_INIT], [AC_DEFUN([_LT_AC_SHELL_INIT])]) m4_ifndef([_LT_AC_SYS_LIBPATH_AIX], [AC_DEFUN([_LT_AC_SYS_LIBPATH_AIX])]) m4_ifndef([_LT_PROG_LTMAIN], [AC_DEFUN([_LT_PROG_LTMAIN])]) m4_ifndef([_LT_AC_TAGVAR], [AC_DEFUN([_LT_AC_TAGVAR])]) m4_ifndef([AC_LTDL_ENABLE_INSTALL], [AC_DEFUN([AC_LTDL_ENABLE_INSTALL])]) m4_ifndef([AC_LTDL_PREOPEN], [AC_DEFUN([AC_LTDL_PREOPEN])]) m4_ifndef([_LT_AC_SYS_COMPILER], [AC_DEFUN([_LT_AC_SYS_COMPILER])]) m4_ifndef([_LT_AC_LOCK], [AC_DEFUN([_LT_AC_LOCK])]) m4_ifndef([AC_LIBTOOL_SYS_OLD_ARCHIVE], [AC_DEFUN([AC_LIBTOOL_SYS_OLD_ARCHIVE])]) m4_ifndef([_LT_AC_TRY_DLOPEN_SELF], [AC_DEFUN([_LT_AC_TRY_DLOPEN_SELF])]) m4_ifndef([AC_LIBTOOL_PROG_CC_C_O], [AC_DEFUN([AC_LIBTOOL_PROG_CC_C_O])]) m4_ifndef([AC_LIBTOOL_SYS_HARD_LINK_LOCKS], [AC_DEFUN([AC_LIBTOOL_SYS_HARD_LINK_LOCKS])]) m4_ifndef([AC_LIBTOOL_OBJDIR], [AC_DEFUN([AC_LIBTOOL_OBJDIR])]) m4_ifndef([AC_LTDL_OBJDIR], [AC_DEFUN([AC_LTDL_OBJDIR])]) m4_ifndef([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH], [AC_DEFUN([AC_LIBTOOL_PROG_LD_HARDCODE_LIBPATH])]) m4_ifndef([AC_LIBTOOL_SYS_LIB_STRIP], [AC_DEFUN([AC_LIBTOOL_SYS_LIB_STRIP])]) m4_ifndef([AC_PATH_MAGIC], [AC_DEFUN([AC_PATH_MAGIC])]) m4_ifndef([AC_PROG_LD_GNU], [AC_DEFUN([AC_PROG_LD_GNU])]) m4_ifndef([AC_PROG_LD_RELOAD_FLAG], [AC_DEFUN([AC_PROG_LD_RELOAD_FLAG])]) m4_ifndef([AC_DEPLIBS_CHECK_METHOD], [AC_DEFUN([AC_DEPLIBS_CHECK_METHOD])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_NO_RTTI], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_NO_RTTI])]) m4_ifndef([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE], [AC_DEFUN([AC_LIBTOOL_SYS_GLOBAL_SYMBOL_PIPE])]) m4_ifndef([AC_LIBTOOL_PROG_COMPILER_PIC], [AC_DEFUN([AC_LIBTOOL_PROG_COMPILER_PIC])]) m4_ifndef([AC_LIBTOOL_PROG_LD_SHLIBS], [AC_DEFUN([AC_LIBTOOL_PROG_LD_SHLIBS])]) m4_ifndef([AC_LIBTOOL_POSTDEP_PREDEP], [AC_DEFUN([AC_LIBTOOL_POSTDEP_PREDEP])]) m4_ifndef([LT_AC_PROG_EGREP], [AC_DEFUN([LT_AC_PROG_EGREP])]) m4_ifndef([LT_AC_PROG_SED], [AC_DEFUN([LT_AC_PROG_SED])]) m4_ifndef([_LT_CC_BASENAME], [AC_DEFUN([_LT_CC_BASENAME])]) m4_ifndef([_LT_COMPILER_BOILERPLATE], [AC_DEFUN([_LT_COMPILER_BOILERPLATE])]) m4_ifndef([_LT_LINKER_BOILERPLATE], [AC_DEFUN([_LT_LINKER_BOILERPLATE])]) m4_ifndef([_AC_PROG_LIBTOOL], [AC_DEFUN([_AC_PROG_LIBTOOL])]) m4_ifndef([AC_LIBTOOL_SETUP], [AC_DEFUN([AC_LIBTOOL_SETUP])]) m4_ifndef([_LT_AC_CHECK_DLFCN], [AC_DEFUN([_LT_AC_CHECK_DLFCN])]) m4_ifndef([AC_LIBTOOL_SYS_DYNAMIC_LINKER], [AC_DEFUN([AC_LIBTOOL_SYS_DYNAMIC_LINKER])]) m4_ifndef([_LT_AC_TAGCONFIG], [AC_DEFUN([_LT_AC_TAGCONFIG])]) m4_ifndef([AC_DISABLE_FAST_INSTALL], [AC_DEFUN([AC_DISABLE_FAST_INSTALL])]) m4_ifndef([_LT_AC_LANG_CXX], [AC_DEFUN([_LT_AC_LANG_CXX])]) m4_ifndef([_LT_AC_LANG_F77], [AC_DEFUN([_LT_AC_LANG_F77])]) m4_ifndef([_LT_AC_LANG_GCJ], [AC_DEFUN([_LT_AC_LANG_GCJ])]) m4_ifndef([AC_LIBTOOL_LANG_C_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_C_CONFIG])]) m4_ifndef([_LT_AC_LANG_C_CONFIG], [AC_DEFUN([_LT_AC_LANG_C_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_CXX_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_CXX_CONFIG])]) m4_ifndef([_LT_AC_LANG_CXX_CONFIG], [AC_DEFUN([_LT_AC_LANG_CXX_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_F77_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_F77_CONFIG])]) m4_ifndef([_LT_AC_LANG_F77_CONFIG], [AC_DEFUN([_LT_AC_LANG_F77_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_GCJ_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_GCJ_CONFIG])]) m4_ifndef([_LT_AC_LANG_GCJ_CONFIG], [AC_DEFUN([_LT_AC_LANG_GCJ_CONFIG])]) m4_ifndef([AC_LIBTOOL_LANG_RC_CONFIG], [AC_DEFUN([AC_LIBTOOL_LANG_RC_CONFIG])]) m4_ifndef([_LT_AC_LANG_RC_CONFIG], [AC_DEFUN([_LT_AC_LANG_RC_CONFIG])]) m4_ifndef([AC_LIBTOOL_CONFIG], [AC_DEFUN([AC_LIBTOOL_CONFIG])]) m4_ifndef([_LT_AC_FILE_LTDLL_C], [AC_DEFUN([_LT_AC_FILE_LTDLL_C])]) m4_ifndef([_LT_REQUIRED_DARWIN_CHECKS], [AC_DEFUN([_LT_REQUIRED_DARWIN_CHECKS])]) m4_ifndef([_LT_AC_PROG_CXXCPP], [AC_DEFUN([_LT_AC_PROG_CXXCPP])]) m4_ifndef([_LT_PREPARE_SED_QUOTE_VARS], [AC_DEFUN([_LT_PREPARE_SED_QUOTE_VARS])]) m4_ifndef([_LT_PROG_ECHO_BACKSLASH], [AC_DEFUN([_LT_PROG_ECHO_BACKSLASH])]) m4_ifndef([_LT_PROG_F77], [AC_DEFUN([_LT_PROG_F77])]) m4_ifndef([_LT_PROG_FC], [AC_DEFUN([_LT_PROG_FC])]) m4_ifndef([_LT_PROG_CXX], [AC_DEFUN([_LT_PROG_CXX])]) dnl pkg.m4 - Macros to locate and utilise pkg-config. -*- Autoconf -*- dnl serial 11 (pkg-config-0.29) dnl dnl Copyright © 2004 Scott James Remnant . dnl Copyright © 2012-2015 Dan Nicholson dnl dnl This program is free software; you can redistribute it and/or modify dnl it under the terms of the GNU General Public License as published by dnl the Free Software Foundation; either version 2 of the License, or dnl (at your option) any later version. dnl dnl This program is distributed in the hope that it will be useful, but dnl WITHOUT ANY WARRANTY; without even the implied warranty of dnl MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU dnl General Public License for more details. dnl dnl You should have received a copy of the GNU General Public License dnl along with this program; if not, write to the Free Software dnl Foundation, Inc., 59 Temple Place - Suite 330, Boston, MA dnl 02111-1307, USA. dnl dnl As a special exception to the GNU General Public License, if you dnl distribute this file as part of a program that contains a dnl configuration script generated by Autoconf, you may include it under dnl the same distribution terms that you use for the rest of that dnl program. dnl PKG_PREREQ(MIN-VERSION) dnl ----------------------- dnl Since: 0.29 dnl dnl Verify that the version of the pkg-config macros are at least dnl MIN-VERSION. Unlike PKG_PROG_PKG_CONFIG, which checks the user's dnl installed version of pkg-config, this checks the developer's version dnl of pkg.m4 when generating configure. dnl dnl To ensure that this macro is defined, also add: dnl m4_ifndef([PKG_PREREQ], dnl [m4_fatal([must install pkg-config 0.29 or later before running autoconf/autogen])]) dnl dnl See the "Since" comment for each macro you use to see what version dnl of the macros you require. m4_defun([PKG_PREREQ], [m4_define([PKG_MACROS_VERSION], [0.29]) m4_if(m4_version_compare(PKG_MACROS_VERSION, [$1]), -1, [m4_fatal([pkg.m4 version $1 or higher is required but ]PKG_MACROS_VERSION[ found])]) ])dnl PKG_PREREQ dnl PKG_PROG_PKG_CONFIG([MIN-VERSION]) dnl ---------------------------------- dnl Since: 0.16 dnl dnl Search for the pkg-config tool and set the PKG_CONFIG variable to dnl first found in the path. Checks that the version of pkg-config found dnl is at least MIN-VERSION. If MIN-VERSION is not specified, 0.9.0 is dnl used since that's the first version where most current features of dnl pkg-config existed. AC_DEFUN([PKG_PROG_PKG_CONFIG], [m4_pattern_forbid([^_?PKG_[A-Z_]+$]) m4_pattern_allow([^PKG_CONFIG(_(PATH|LIBDIR|SYSROOT_DIR|ALLOW_SYSTEM_(CFLAGS|LIBS)))?$]) m4_pattern_allow([^PKG_CONFIG_(DISABLE_UNINSTALLED|TOP_BUILD_DIR|DEBUG_SPEW)$]) AC_ARG_VAR([PKG_CONFIG], [path to pkg-config utility]) AC_ARG_VAR([PKG_CONFIG_PATH], [directories to add to pkg-config's search path]) AC_ARG_VAR([PKG_CONFIG_LIBDIR], [path overriding pkg-config's built-in search path]) if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then AC_PATH_TOOL([PKG_CONFIG], [pkg-config]) fi if test -n "$PKG_CONFIG"; then _pkg_min_version=m4_default([$1], [0.9.0]) AC_MSG_CHECKING([pkg-config is at least version $_pkg_min_version]) if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) PKG_CONFIG="" fi fi[]dnl ])dnl PKG_PROG_PKG_CONFIG dnl PKG_CHECK_EXISTS(MODULES, [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) dnl ------------------------------------------------------------------- dnl Since: 0.18 dnl dnl Check to see whether a particular set of modules exists. Similar to dnl PKG_CHECK_MODULES(), but does not set variables or print errors. dnl dnl Please remember that m4 expands AC_REQUIRE([PKG_PROG_PKG_CONFIG]) dnl only at the first occurence in configure.ac, so if the first place dnl it's called might be skipped (such as if it is within an "if", you dnl have to call PKG_CHECK_EXISTS manually AC_DEFUN([PKG_CHECK_EXISTS], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl if test -n "$PKG_CONFIG" && \ AC_RUN_LOG([$PKG_CONFIG --exists --print-errors "$1"]); then m4_default([$2], [:]) m4_ifvaln([$3], [else $3])dnl fi]) dnl _PKG_CONFIG([VARIABLE], [COMMAND], [MODULES]) dnl --------------------------------------------- dnl Internal wrapper calling pkg-config via PKG_CONFIG and setting dnl pkg_failed based on the result. m4_define([_PKG_CONFIG], [if test -n "$$1"; then pkg_cv_[]$1="$$1" elif test -n "$PKG_CONFIG"; then PKG_CHECK_EXISTS([$3], [pkg_cv_[]$1=`$PKG_CONFIG --[]$2 "$3" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes ], [pkg_failed=yes]) else pkg_failed=untried fi[]dnl ])dnl _PKG_CONFIG dnl _PKG_SHORT_ERRORS_SUPPORTED dnl --------------------------- dnl Internal check to see if pkg-config supports short errors. AC_DEFUN([_PKG_SHORT_ERRORS_SUPPORTED], [AC_REQUIRE([PKG_PROG_PKG_CONFIG]) if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi[]dnl ])dnl _PKG_SHORT_ERRORS_SUPPORTED dnl PKG_CHECK_MODULES(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], dnl [ACTION-IF-NOT-FOUND]) dnl -------------------------------------------------------------- dnl Since: 0.4.0 dnl dnl Note that if there is a possibility the first call to dnl PKG_CHECK_MODULES might not happen, you should be sure to include an dnl explicit call to PKG_PROG_PKG_CONFIG in your configure.ac AC_DEFUN([PKG_CHECK_MODULES], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1][_CFLAGS], [C compiler flags for $1, overriding pkg-config])dnl AC_ARG_VAR([$1][_LIBS], [linker flags for $1, overriding pkg-config])dnl pkg_failed=no AC_MSG_CHECKING([for $1]) _PKG_CONFIG([$1][_CFLAGS], [cflags], [$2]) _PKG_CONFIG([$1][_LIBS], [libs], [$2]) m4_define([_PKG_TEXT], [Alternatively, you may set the environment variables $1[]_CFLAGS and $1[]_LIBS to avoid the need to call pkg-config. See the pkg-config man page for more details.]) if test $pkg_failed = yes; then AC_MSG_RESULT([no]) _PKG_SHORT_ERRORS_SUPPORTED if test $_pkg_short_errors_supported = yes; then $1[]_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "$2" 2>&1` else $1[]_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "$2" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$$1[]_PKG_ERRORS" >&AS_MESSAGE_LOG_FD m4_default([$4], [AC_MSG_ERROR( [Package requirements ($2) were not met: $$1_PKG_ERRORS Consider adjusting the PKG_CONFIG_PATH environment variable if you installed software in a non-standard prefix. _PKG_TEXT])[]dnl ]) elif test $pkg_failed = untried; then AC_MSG_RESULT([no]) m4_default([$4], [AC_MSG_FAILURE( [The pkg-config script could not be found or is too old. Make sure it is in your PATH or set the PKG_CONFIG environment variable to the full path to pkg-config. _PKG_TEXT To get pkg-config, see .])[]dnl ]) else $1[]_CFLAGS=$pkg_cv_[]$1[]_CFLAGS $1[]_LIBS=$pkg_cv_[]$1[]_LIBS AC_MSG_RESULT([yes]) $3 fi[]dnl ])dnl PKG_CHECK_MODULES dnl PKG_CHECK_MODULES_STATIC(VARIABLE-PREFIX, MODULES, [ACTION-IF-FOUND], dnl [ACTION-IF-NOT-FOUND]) dnl --------------------------------------------------------------------- dnl Since: 0.29 dnl dnl Checks for existence of MODULES and gathers its build flags with dnl static libraries enabled. Sets VARIABLE-PREFIX_CFLAGS from --cflags dnl and VARIABLE-PREFIX_LIBS from --libs. dnl dnl Note that if there is a possibility the first call to dnl PKG_CHECK_MODULES_STATIC might not happen, you should be sure to dnl include an explicit call to PKG_PROG_PKG_CONFIG in your dnl configure.ac. AC_DEFUN([PKG_CHECK_MODULES_STATIC], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl _save_PKG_CONFIG=$PKG_CONFIG PKG_CONFIG="$PKG_CONFIG --static" PKG_CHECK_MODULES($@) PKG_CONFIG=$_save_PKG_CONFIG[]dnl ])dnl PKG_CHECK_MODULES_STATIC dnl PKG_INSTALLDIR([DIRECTORY]) dnl ------------------------- dnl Since: 0.27 dnl dnl Substitutes the variable pkgconfigdir as the location where a module dnl should install pkg-config .pc files. By default the directory is dnl $libdir/pkgconfig, but the default can be changed by passing dnl DIRECTORY. The user can override through the --with-pkgconfigdir dnl parameter. AC_DEFUN([PKG_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${libdir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([pkgconfigdir], [AS_HELP_STRING([--with-pkgconfigdir], pkg_description)],, [with_pkgconfigdir=]pkg_default) AC_SUBST([pkgconfigdir], [$with_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ])dnl PKG_INSTALLDIR dnl PKG_NOARCH_INSTALLDIR([DIRECTORY]) dnl -------------------------------- dnl Since: 0.27 dnl dnl Substitutes the variable noarch_pkgconfigdir as the location where a dnl module should install arch-independent pkg-config .pc files. By dnl default the directory is $datadir/pkgconfig, but the default can be dnl changed by passing DIRECTORY. The user can override through the dnl --with-noarch-pkgconfigdir parameter. AC_DEFUN([PKG_NOARCH_INSTALLDIR], [m4_pushdef([pkg_default], [m4_default([$1], ['${datadir}/pkgconfig'])]) m4_pushdef([pkg_description], [pkg-config arch-independent installation directory @<:@]pkg_default[@:>@]) AC_ARG_WITH([noarch-pkgconfigdir], [AS_HELP_STRING([--with-noarch-pkgconfigdir], pkg_description)],, [with_noarch_pkgconfigdir=]pkg_default) AC_SUBST([noarch_pkgconfigdir], [$with_noarch_pkgconfigdir]) m4_popdef([pkg_default]) m4_popdef([pkg_description]) ])dnl PKG_NOARCH_INSTALLDIR dnl PKG_CHECK_VAR(VARIABLE, MODULE, CONFIG-VARIABLE, dnl [ACTION-IF-FOUND], [ACTION-IF-NOT-FOUND]) dnl ------------------------------------------- dnl Since: 0.28 dnl dnl Retrieves the value of the pkg-config variable for the given module. AC_DEFUN([PKG_CHECK_VAR], [AC_REQUIRE([PKG_PROG_PKG_CONFIG])dnl AC_ARG_VAR([$1], [value of $3 for $2, overriding pkg-config])dnl _PKG_CONFIG([$1], [variable="][$3]["], [$2]) AS_VAR_COPY([$1], [pkg_cv_][$1]) AS_VAR_IF([$1], [""], [$5], [$4])dnl ])dnl PKG_CHECK_VAR # Copyright (C) 2002-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_AUTOMAKE_VERSION(VERSION) # ---------------------------- # Automake X.Y traces this macro to ensure aclocal.m4 has been # generated from the m4 files accompanying Automake X.Y. # (This private macro should not be called outside this file.) AC_DEFUN([AM_AUTOMAKE_VERSION], [am__api_version='1.15' dnl Some users find AM_AUTOMAKE_VERSION and mistake it for a way to dnl require some minimum version. Point them to the right macro. m4_if([$1], [1.15], [], [AC_FATAL([Do not call $0, use AM_INIT_AUTOMAKE([$1]).])])dnl ]) # _AM_AUTOCONF_VERSION(VERSION) # ----------------------------- # aclocal traces this macro to find the Autoconf version. # This is a private macro too. Using m4_define simplifies # the logic in aclocal, which can simply ignore this definition. m4_define([_AM_AUTOCONF_VERSION], []) # AM_SET_CURRENT_AUTOMAKE_VERSION # ------------------------------- # Call AM_AUTOMAKE_VERSION and AM_AUTOMAKE_VERSION so they can be traced. # This function is AC_REQUIREd by AM_INIT_AUTOMAKE. AC_DEFUN([AM_SET_CURRENT_AUTOMAKE_VERSION], [AM_AUTOMAKE_VERSION([1.15])dnl m4_ifndef([AC_AUTOCONF_VERSION], [m4_copy([m4_PACKAGE_VERSION], [AC_AUTOCONF_VERSION])])dnl _AM_AUTOCONF_VERSION(m4_defn([AC_AUTOCONF_VERSION]))]) # AM_AUX_DIR_EXPAND -*- Autoconf -*- # Copyright (C) 2001-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # For projects using AC_CONFIG_AUX_DIR([foo]), Autoconf sets # $ac_aux_dir to '$srcdir/foo'. In other projects, it is set to # '$srcdir', '$srcdir/..', or '$srcdir/../..'. # # Of course, Automake must honor this variable whenever it calls a # tool from the auxiliary directory. The problem is that $srcdir (and # therefore $ac_aux_dir as well) can be either absolute or relative, # depending on how configure is run. This is pretty annoying, since # it makes $ac_aux_dir quite unusable in subdirectories: in the top # source directory, any form will work fine, but in subdirectories a # relative path needs to be adjusted first. # # $ac_aux_dir/missing # fails when called from a subdirectory if $ac_aux_dir is relative # $top_srcdir/$ac_aux_dir/missing # fails if $ac_aux_dir is absolute, # fails when called from a subdirectory in a VPATH build with # a relative $ac_aux_dir # # The reason of the latter failure is that $top_srcdir and $ac_aux_dir # are both prefixed by $srcdir. In an in-source build this is usually # harmless because $srcdir is '.', but things will broke when you # start a VPATH build or use an absolute $srcdir. # # So we could use something similar to $top_srcdir/$ac_aux_dir/missing, # iff we strip the leading $srcdir from $ac_aux_dir. That would be: # am_aux_dir='\$(top_srcdir)/'`expr "$ac_aux_dir" : "$srcdir//*\(.*\)"` # and then we would define $MISSING as # MISSING="\${SHELL} $am_aux_dir/missing" # This will work as long as MISSING is not called from configure, because # unfortunately $(top_srcdir) has no meaning in configure. # However there are other variables, like CC, which are often used in # configure, and could therefore not use this "fixed" $ac_aux_dir. # # Another solution, used here, is to always expand $ac_aux_dir to an # absolute PATH. The drawback is that using absolute paths prevent a # configured tree to be moved without reconfiguration. AC_DEFUN([AM_AUX_DIR_EXPAND], [AC_REQUIRE([AC_CONFIG_AUX_DIR_DEFAULT])dnl # Expand $ac_aux_dir to an absolute path. am_aux_dir=`cd "$ac_aux_dir" && pwd` ]) # AM_CONDITIONAL -*- Autoconf -*- # Copyright (C) 1997-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_CONDITIONAL(NAME, SHELL-CONDITION) # ------------------------------------- # Define a conditional. AC_DEFUN([AM_CONDITIONAL], [AC_PREREQ([2.52])dnl m4_if([$1], [TRUE], [AC_FATAL([$0: invalid condition: $1])], [$1], [FALSE], [AC_FATAL([$0: invalid condition: $1])])dnl AC_SUBST([$1_TRUE])dnl AC_SUBST([$1_FALSE])dnl _AM_SUBST_NOTMAKE([$1_TRUE])dnl _AM_SUBST_NOTMAKE([$1_FALSE])dnl m4_define([_AM_COND_VALUE_$1], [$2])dnl if $2; then $1_TRUE= $1_FALSE='#' else $1_TRUE='#' $1_FALSE= fi AC_CONFIG_COMMANDS_PRE( [if test -z "${$1_TRUE}" && test -z "${$1_FALSE}"; then AC_MSG_ERROR([[conditional "$1" was never defined. Usually this means the macro was only invoked conditionally.]]) fi])]) # Copyright (C) 1999-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # There are a few dirty hacks below to avoid letting 'AC_PROG_CC' be # written in clear, in which case automake, when reading aclocal.m4, # will think it sees a *use*, and therefore will trigger all it's # C support machinery. Also note that it means that autoscan, seeing # CC etc. in the Makefile, will ask for an AC_PROG_CC use... # _AM_DEPENDENCIES(NAME) # ---------------------- # See how the compiler implements dependency checking. # NAME is "CC", "CXX", "OBJC", "OBJCXX", "UPC", or "GJC". # We try a few techniques and use that to set a single cache variable. # # We don't AC_REQUIRE the corresponding AC_PROG_CC since the latter was # modified to invoke _AM_DEPENDENCIES(CC); we would have a circular # dependency, and given that the user is not expected to run this macro, # just rely on AC_PROG_CC. AC_DEFUN([_AM_DEPENDENCIES], [AC_REQUIRE([AM_SET_DEPDIR])dnl AC_REQUIRE([AM_OUTPUT_DEPENDENCY_COMMANDS])dnl AC_REQUIRE([AM_MAKE_INCLUDE])dnl AC_REQUIRE([AM_DEP_TRACK])dnl m4_if([$1], [CC], [depcc="$CC" am_compiler_list=], [$1], [CXX], [depcc="$CXX" am_compiler_list=], [$1], [OBJC], [depcc="$OBJC" am_compiler_list='gcc3 gcc'], [$1], [OBJCXX], [depcc="$OBJCXX" am_compiler_list='gcc3 gcc'], [$1], [UPC], [depcc="$UPC" am_compiler_list=], [$1], [GCJ], [depcc="$GCJ" am_compiler_list='gcc3 gcc'], [depcc="$$1" am_compiler_list=]) AC_CACHE_CHECK([dependency style of $depcc], [am_cv_$1_dependencies_compiler_type], [if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_$1_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n ['s/^#*\([a-zA-Z0-9]*\))$/\1/p'] < ./depcomp` fi am__universal=false m4_case([$1], [CC], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac], [CXX], [case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac]) for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_$1_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_$1_dependencies_compiler_type=none fi ]) AC_SUBST([$1DEPMODE], [depmode=$am_cv_$1_dependencies_compiler_type]) AM_CONDITIONAL([am__fastdep$1], [ test "x$enable_dependency_tracking" != xno \ && test "$am_cv_$1_dependencies_compiler_type" = gcc3]) ]) # AM_SET_DEPDIR # ------------- # Choose a directory name for dependency files. # This macro is AC_REQUIREd in _AM_DEPENDENCIES. AC_DEFUN([AM_SET_DEPDIR], [AC_REQUIRE([AM_SET_LEADING_DOT])dnl AC_SUBST([DEPDIR], ["${am__leading_dot}deps"])dnl ]) # AM_DEP_TRACK # ------------ AC_DEFUN([AM_DEP_TRACK], [AC_ARG_ENABLE([dependency-tracking], [dnl AS_HELP_STRING( [--enable-dependency-tracking], [do not reject slow dependency extractors]) AS_HELP_STRING( [--disable-dependency-tracking], [speeds up one-time build])]) if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' am__nodep='_no' fi AM_CONDITIONAL([AMDEP], [test "x$enable_dependency_tracking" != xno]) AC_SUBST([AMDEPBACKSLASH])dnl _AM_SUBST_NOTMAKE([AMDEPBACKSLASH])dnl AC_SUBST([am__nodep])dnl _AM_SUBST_NOTMAKE([am__nodep])dnl ]) # Generate code to set up dependency tracking. -*- Autoconf -*- # Copyright (C) 1999-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_OUTPUT_DEPENDENCY_COMMANDS # ------------------------------ AC_DEFUN([_AM_OUTPUT_DEPENDENCY_COMMANDS], [{ # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`AS_DIRNAME("$mf")` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`AS_DIRNAME(["$file"])` AS_MKDIR_P([$dirpart/$fdir]) # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ])# _AM_OUTPUT_DEPENDENCY_COMMANDS # AM_OUTPUT_DEPENDENCY_COMMANDS # ----------------------------- # This macro should only be invoked once -- use via AC_REQUIRE. # # This code is only required when automatic dependency tracking # is enabled. FIXME. This creates each '.P' file that we will # need in order to bootstrap the dependency handling code. AC_DEFUN([AM_OUTPUT_DEPENDENCY_COMMANDS], [AC_CONFIG_COMMANDS([depfiles], [test x"$AMDEP_TRUE" != x"" || _AM_OUTPUT_DEPENDENCY_COMMANDS], [AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir"]) ]) # Do all the work for Automake. -*- Autoconf -*- # Copyright (C) 1996-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This macro actually does too much. Some checks are only needed if # your package does certain things. But this isn't really a big deal. dnl Redefine AC_PROG_CC to automatically invoke _AM_PROG_CC_C_O. m4_define([AC_PROG_CC], m4_defn([AC_PROG_CC]) [_AM_PROG_CC_C_O ]) # AM_INIT_AUTOMAKE(PACKAGE, VERSION, [NO-DEFINE]) # AM_INIT_AUTOMAKE([OPTIONS]) # ----------------------------------------------- # The call with PACKAGE and VERSION arguments is the old style # call (pre autoconf-2.50), which is being phased out. PACKAGE # and VERSION should now be passed to AC_INIT and removed from # the call to AM_INIT_AUTOMAKE. # We support both call styles for the transition. After # the next Automake release, Autoconf can make the AC_INIT # arguments mandatory, and then we can depend on a new Autoconf # release and drop the old call support. AC_DEFUN([AM_INIT_AUTOMAKE], [AC_PREREQ([2.65])dnl dnl Autoconf wants to disallow AM_ names. We explicitly allow dnl the ones we care about. m4_pattern_allow([^AM_[A-Z]+FLAGS$])dnl AC_REQUIRE([AM_SET_CURRENT_AUTOMAKE_VERSION])dnl AC_REQUIRE([AC_PROG_INSTALL])dnl if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." AC_SUBST([am__isrc], [' -I$(srcdir)'])_AM_SUBST_NOTMAKE([am__isrc])dnl # test to see if srcdir already configured if test -f $srcdir/config.status; then AC_MSG_ERROR([source directory already configured; run "make distclean" there first]) fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi AC_SUBST([CYGPATH_W]) # Define the identity of the package. dnl Distinguish between old-style and new-style calls. m4_ifval([$2], [AC_DIAGNOSE([obsolete], [$0: two- and three-arguments forms are deprecated.]) m4_ifval([$3], [_AM_SET_OPTION([no-define])])dnl AC_SUBST([PACKAGE], [$1])dnl AC_SUBST([VERSION], [$2])], [_AM_SET_OPTIONS([$1])dnl dnl Diagnose old-style AC_INIT with new-style AM_AUTOMAKE_INIT. m4_if( m4_ifdef([AC_PACKAGE_NAME], [ok]):m4_ifdef([AC_PACKAGE_VERSION], [ok]), [ok:ok],, [m4_fatal([AC_INIT should be called with package and version arguments])])dnl AC_SUBST([PACKAGE], ['AC_PACKAGE_TARNAME'])dnl AC_SUBST([VERSION], ['AC_PACKAGE_VERSION'])])dnl _AM_IF_OPTION([no-define],, [AC_DEFINE_UNQUOTED([PACKAGE], ["$PACKAGE"], [Name of package]) AC_DEFINE_UNQUOTED([VERSION], ["$VERSION"], [Version number of package])])dnl # Some tools Automake needs. AC_REQUIRE([AM_SANITY_CHECK])dnl AC_REQUIRE([AC_ARG_PROGRAM])dnl AM_MISSING_PROG([ACLOCAL], [aclocal-${am__api_version}]) AM_MISSING_PROG([AUTOCONF], [autoconf]) AM_MISSING_PROG([AUTOMAKE], [automake-${am__api_version}]) AM_MISSING_PROG([AUTOHEADER], [autoheader]) AM_MISSING_PROG([MAKEINFO], [makeinfo]) AC_REQUIRE([AM_PROG_INSTALL_SH])dnl AC_REQUIRE([AM_PROG_INSTALL_STRIP])dnl AC_REQUIRE([AC_PROG_MKDIR_P])dnl # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # AC_SUBST([mkdir_p], ['$(MKDIR_P)']) # We need awk for the "check" target (and possibly the TAP driver). The # system "awk" is bad on some platforms. AC_REQUIRE([AC_PROG_AWK])dnl AC_REQUIRE([AC_PROG_MAKE_SET])dnl AC_REQUIRE([AM_SET_LEADING_DOT])dnl _AM_IF_OPTION([tar-ustar], [_AM_PROG_TAR([ustar])], [_AM_IF_OPTION([tar-pax], [_AM_PROG_TAR([pax])], [_AM_PROG_TAR([v7])])]) _AM_IF_OPTION([no-dependencies],, [AC_PROVIDE_IFELSE([AC_PROG_CC], [_AM_DEPENDENCIES([CC])], [m4_define([AC_PROG_CC], m4_defn([AC_PROG_CC])[_AM_DEPENDENCIES([CC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_CXX], [_AM_DEPENDENCIES([CXX])], [m4_define([AC_PROG_CXX], m4_defn([AC_PROG_CXX])[_AM_DEPENDENCIES([CXX])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJC], [_AM_DEPENDENCIES([OBJC])], [m4_define([AC_PROG_OBJC], m4_defn([AC_PROG_OBJC])[_AM_DEPENDENCIES([OBJC])])])dnl AC_PROVIDE_IFELSE([AC_PROG_OBJCXX], [_AM_DEPENDENCIES([OBJCXX])], [m4_define([AC_PROG_OBJCXX], m4_defn([AC_PROG_OBJCXX])[_AM_DEPENDENCIES([OBJCXX])])])dnl ]) AC_REQUIRE([AM_SILENT_RULES])dnl dnl The testsuite driver may need to know about EXEEXT, so add the dnl 'am__EXEEXT' conditional if _AM_COMPILER_EXEEXT was seen. This dnl macro is hooked onto _AC_COMPILER_EXEEXT early, see below. AC_CONFIG_COMMANDS_PRE(dnl [m4_provide_if([_AM_COMPILER_EXEEXT], [AM_CONDITIONAL([am__EXEEXT], [test -n "$EXEEXT"])])])dnl # POSIX will say in a future version that running "rm -f" with no argument # is OK; and we want to be able to make that assumption in our Makefile # recipes. So use an aggressive probe to check that the usage we want is # actually supported "in the wild" to an acceptable degree. # See automake bug#10828. # To make any issue more visible, cause the running configure to be aborted # by default if the 'rm' program in use doesn't match our expectations; the # user can still override this though. if rm -f && rm -fr && rm -rf; then : OK; else cat >&2 <<'END' Oops! Your 'rm' program seems unable to run without file operands specified on the command line, even when the '-f' option is present. This is contrary to the behaviour of most rm programs out there, and not conforming with the upcoming POSIX standard: Please tell bug-automake@gnu.org about your system, including the value of your $PATH and any error possibly output before this message. This can help us improve future automake versions. END if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then echo 'Configuration will proceed anyway, since you have set the' >&2 echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 echo >&2 else cat >&2 <<'END' Aborting the configuration process, to ensure you take notice of the issue. You can download and install GNU coreutils to get an 'rm' implementation that behaves properly: . If you want to complete the configuration process using your problematic 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM to "yes", and re-run configure. END AC_MSG_ERROR([Your 'rm' program is bad, sorry.]) fi fi dnl The trailing newline in this macro's definition is deliberate, for dnl backward compatibility and to allow trailing 'dnl'-style comments dnl after the AM_INIT_AUTOMAKE invocation. See automake bug#16841. ]) dnl Hook into '_AC_COMPILER_EXEEXT' early to learn its expansion. Do not dnl add the conditional right here, as _AC_COMPILER_EXEEXT may be further dnl mangled by Autoconf and run in a shell conditional statement. m4_define([_AC_COMPILER_EXEEXT], m4_defn([_AC_COMPILER_EXEEXT])[m4_provide([_AM_COMPILER_EXEEXT])]) # When config.status generates a header, we must update the stamp-h file. # This file resides in the same directory as the config header # that is generated. The stamp files are numbered to have different names. # Autoconf calls _AC_AM_CONFIG_HEADER_HOOK (when defined) in the # loop where config.status creates the headers, so we can generate # our stamp files there. AC_DEFUN([_AC_AM_CONFIG_HEADER_HOOK], [# Compute $1's index in $config_headers. _am_arg=$1 _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`AS_DIRNAME(["$_am_arg"])`/stamp-h[]$_am_stamp_count]) # Copyright (C) 2001-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_SH # ------------------ # Define $install_sh. AC_DEFUN([AM_PROG_INSTALL_SH], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl if test x"${install_sh+set}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi AC_SUBST([install_sh])]) # Copyright (C) 2003-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # Check whether the underlying file-system supports filenames # with a leading dot. For instance MS-DOS doesn't. AC_DEFUN([AM_SET_LEADING_DOT], [rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null AC_SUBST([am__leading_dot])]) # Check to see how 'make' treats includes. -*- Autoconf -*- # Copyright (C) 2001-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MAKE_INCLUDE() # ----------------- # Check to see how make treats includes. AC_DEFUN([AM_MAKE_INCLUDE], [am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. AC_MSG_CHECKING([for style of include used by $am_make]) am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi AC_SUBST([am__include]) AC_SUBST([am__quote]) AC_MSG_RESULT([$_am_result]) rm -f confinc confmf ]) # Fake the existence of programs that GNU maintainers use. -*- Autoconf -*- # Copyright (C) 1997-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_MISSING_PROG(NAME, PROGRAM) # ------------------------------ AC_DEFUN([AM_MISSING_PROG], [AC_REQUIRE([AM_MISSING_HAS_RUN]) $1=${$1-"${am_missing_run}$2"} AC_SUBST($1)]) # AM_MISSING_HAS_RUN # ------------------ # Define MISSING if not defined so far and test if it is modern enough. # If it is, set am_missing_run to use it, otherwise, to nothing. AC_DEFUN([AM_MISSING_HAS_RUN], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([missing])dnl if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= AC_MSG_WARN(['missing' script is too old or missing]) fi ]) # Helper functions for option handling. -*- Autoconf -*- # Copyright (C) 2001-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_MANGLE_OPTION(NAME) # ----------------------- AC_DEFUN([_AM_MANGLE_OPTION], [[_AM_OPTION_]m4_bpatsubst($1, [[^a-zA-Z0-9_]], [_])]) # _AM_SET_OPTION(NAME) # -------------------- # Set option NAME. Presently that only means defining a flag for this option. AC_DEFUN([_AM_SET_OPTION], [m4_define(_AM_MANGLE_OPTION([$1]), [1])]) # _AM_SET_OPTIONS(OPTIONS) # ------------------------ # OPTIONS is a space-separated list of Automake options. AC_DEFUN([_AM_SET_OPTIONS], [m4_foreach_w([_AM_Option], [$1], [_AM_SET_OPTION(_AM_Option)])]) # _AM_IF_OPTION(OPTION, IF-SET, [IF-NOT-SET]) # ------------------------------------------- # Execute IF-SET if OPTION is set, IF-NOT-SET otherwise. AC_DEFUN([_AM_IF_OPTION], [m4_ifset(_AM_MANGLE_OPTION([$1]), [$2], [$3])]) # Copyright (C) 1999-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_PROG_CC_C_O # --------------- # Like AC_PROG_CC_C_O, but changed for automake. We rewrite AC_PROG_CC # to automatically call this. AC_DEFUN([_AM_PROG_CC_C_O], [AC_REQUIRE([AM_AUX_DIR_EXPAND])dnl AC_REQUIRE_AUX_FILE([compile])dnl AC_LANG_PUSH([C])dnl AC_CACHE_CHECK( [whether $CC understands -c and -o together], [am_cv_prog_cc_c_o], [AC_LANG_CONFTEST([AC_LANG_PROGRAM([])]) # Make sure it works both with $CC and with simple cc. # Following AC_PROG_CC_C_O, we do the test twice because some # compilers refuse to overwrite an existing .o file with -o, # though they will create one. am_cv_prog_cc_c_o=yes for am_i in 1 2; do if AM_RUN_LOG([$CC -c conftest.$ac_ext -o conftest2.$ac_objext]) \ && test -f conftest2.$ac_objext; then : OK else am_cv_prog_cc_c_o=no break fi done rm -f core conftest* unset am_i]) if test "$am_cv_prog_cc_c_o" != yes; then # Losing compiler, so override with the script. # FIXME: It is wrong to rewrite CC. # But if we don't then we get into trouble of one sort or another. # A longer-term fix would be to have automake use am__CC in this case, # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" CC="$am_aux_dir/compile $CC" fi AC_LANG_POP([C])]) # For backward compatibility. AC_DEFUN_ONCE([AM_PROG_CC_C_O], [AC_REQUIRE([AC_PROG_CC])]) # Copyright (C) 2001-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_RUN_LOG(COMMAND) # ------------------- # Run COMMAND, save the exit status in ac_status, and log it. # (This has been adapted from Autoconf's _AC_RUN_LOG macro.) AC_DEFUN([AM_RUN_LOG], [{ echo "$as_me:$LINENO: $1" >&AS_MESSAGE_LOG_FD ($1) >&AS_MESSAGE_LOG_FD 2>&AS_MESSAGE_LOG_FD ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&AS_MESSAGE_LOG_FD (exit $ac_status); }]) # Check to make sure that the build environment is sane. -*- Autoconf -*- # Copyright (C) 1996-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SANITY_CHECK # --------------- AC_DEFUN([AM_SANITY_CHECK], [AC_MSG_CHECKING([whether build environment is sane]) # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[[\\\"\#\$\&\'\`$am_lf]]*) AC_MSG_ERROR([unsafe absolute working directory name]);; esac case $srcdir in *[[\\\"\#\$\&\'\`$am_lf\ \ ]]*) AC_MSG_ERROR([unsafe srcdir value: '$srcdir']);; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$[*]" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$[*]" != "X $srcdir/configure conftest.file" \ && test "$[*]" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". AC_MSG_ERROR([ls -t appears to fail. Make sure there is not a broken alias in your environment]) fi if test "$[2]" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$[2]" = conftest.file ) then # Ok. : else AC_MSG_ERROR([newly created file is older than distributed files! Check your system clock]) fi AC_MSG_RESULT([yes]) # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi AC_CONFIG_COMMANDS_PRE( [AC_MSG_CHECKING([that generated files are newer than configure]) if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi AC_MSG_RESULT([done])]) rm -f conftest.file ]) # Copyright (C) 2009-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_SILENT_RULES([DEFAULT]) # -------------------------- # Enable less verbose build rules; with the default set to DEFAULT # ("yes" being less verbose, "no" or empty being verbose). AC_DEFUN([AM_SILENT_RULES], [AC_ARG_ENABLE([silent-rules], [dnl AS_HELP_STRING( [--enable-silent-rules], [less verbose build output (undo: "make V=1")]) AS_HELP_STRING( [--disable-silent-rules], [verbose build output (undo: "make V=0")])dnl ]) case $enable_silent_rules in @%:@ ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=m4_if([$1], [yes], [0], [1]);; esac dnl dnl A few 'make' implementations (e.g., NonStop OS and NextStep) dnl do not support nested variable expansions. dnl See automake bug#9928 and bug#10237. am_make=${MAKE-make} AC_CACHE_CHECK([whether $am_make supports nested variables], [am_cv_make_support_nested_variables], [if AS_ECHO([['TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit']]) | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi]) if test $am_cv_make_support_nested_variables = yes; then dnl Using '$V' instead of '$(V)' breaks IRIX make. AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AC_SUBST([AM_V])dnl AM_SUBST_NOTMAKE([AM_V])dnl AC_SUBST([AM_DEFAULT_V])dnl AM_SUBST_NOTMAKE([AM_DEFAULT_V])dnl AC_SUBST([AM_DEFAULT_VERBOSITY])dnl AM_BACKSLASH='\' AC_SUBST([AM_BACKSLASH])dnl _AM_SUBST_NOTMAKE([AM_BACKSLASH])dnl ]) # Copyright (C) 2001-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # AM_PROG_INSTALL_STRIP # --------------------- # One issue with vendor 'install' (even GNU) is that you can't # specify the program used to strip binaries. This is especially # annoying in cross-compiling environments, where the build's strip # is unlikely to handle the host's binaries. # Fortunately install-sh will honor a STRIPPROG variable, so we # always use install-sh in "make install-strip", and initialize # STRIPPROG with the value of the STRIP variable (set by the user). AC_DEFUN([AM_PROG_INSTALL_STRIP], [AC_REQUIRE([AM_PROG_INSTALL_SH])dnl # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. dnl Don't test for $cross_compiling = yes, because it might be 'maybe'. if test "$cross_compiling" != no; then AC_CHECK_TOOL([STRIP], [strip], :) fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" AC_SUBST([INSTALL_STRIP_PROGRAM])]) # Copyright (C) 2006-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_SUBST_NOTMAKE(VARIABLE) # --------------------------- # Prevent Automake from outputting VARIABLE = @VARIABLE@ in Makefile.in. # This macro is traced by Automake. AC_DEFUN([_AM_SUBST_NOTMAKE]) # AM_SUBST_NOTMAKE(VARIABLE) # -------------------------- # Public sister of _AM_SUBST_NOTMAKE. AC_DEFUN([AM_SUBST_NOTMAKE], [_AM_SUBST_NOTMAKE($@)]) # Check how to create a tarball. -*- Autoconf -*- # Copyright (C) 2004-2014 Free Software Foundation, Inc. # # This file is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # _AM_PROG_TAR(FORMAT) # -------------------- # Check how to create a tarball in format FORMAT. # FORMAT should be one of 'v7', 'ustar', or 'pax'. # # Substitute a variable $(am__tar) that is a command # writing to stdout a FORMAT-tarball containing the directory # $tardir. # tardir=directory && $(am__tar) > result.tar # # Substitute a variable $(am__untar) that extract such # a tarball read from stdin. # $(am__untar) < result.tar # AC_DEFUN([_AM_PROG_TAR], [# Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AC_SUBST([AMTAR], ['$${TAR-tar}']) # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar m4_if([$1], [ustar], [plaintar]) pax cpio none' m4_if([$1], [v7], [am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -'], [m4_case([$1], [ustar], [# The POSIX 1988 'ustar' format is defined with fixed-size fields. # There is notably a 21 bits limit for the UID and the GID. In fact, # the 'pax' utility can hang on bigger UID/GID (see automake bug#8343 # and bug#13588). am_max_uid=2097151 # 2^21 - 1 am_max_gid=$am_max_uid # The $UID and $GID variables are not portable, so we need to resort # to the POSIX-mandated id(1) utility. Errors in the 'id' calls # below are definitely unexpected, so allow the users to see them # (that is, avoid stderr redirection). am_uid=`id -u || echo unknown` am_gid=`id -g || echo unknown` AC_MSG_CHECKING([whether UID '$am_uid' is supported by ustar format]) if test $am_uid -le $am_max_uid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi AC_MSG_CHECKING([whether GID '$am_gid' is supported by ustar format]) if test $am_gid -le $am_max_gid; then AC_MSG_RESULT([yes]) else AC_MSG_RESULT([no]) _am_tools=none fi], [pax], [], [m4_fatal([Unknown tar format])]) AC_MSG_CHECKING([how to create a $1 tar archive]) # Go ahead even if we have the value already cached. We do so because we # need to set the values for the 'am__tar' and 'am__untar' variables. _am_tools=${am_cv_prog_tar_$1-$_am_tools} for _am_tool in $_am_tools; do case $_am_tool in gnutar) for _am_tar in tar gnutar gtar; do AM_RUN_LOG([$_am_tar --version]) && break done am__tar="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$$tardir"' am__tar_="$_am_tar --format=m4_if([$1], [pax], [posix], [$1]) -chf - "'"$tardir"' am__untar="$_am_tar -xf -" ;; plaintar) # Must skip GNU tar: if it does not support --format= it doesn't create # ustar tarball either. (tar --version) >/dev/null 2>&1 && continue am__tar='tar chf - "$$tardir"' am__tar_='tar chf - "$tardir"' am__untar='tar xf -' ;; pax) am__tar='pax -L -x $1 -w "$$tardir"' am__tar_='pax -L -x $1 -w "$tardir"' am__untar='pax -r' ;; cpio) am__tar='find "$$tardir" -print | cpio -o -H $1 -L' am__tar_='find "$tardir" -print | cpio -o -H $1 -L' am__untar='cpio -i -H $1 -d' ;; none) am__tar=false am__tar_=false am__untar=false ;; esac # If the value was cached, stop now. We just wanted to have am__tar # and am__untar set. test -n "${am_cv_prog_tar_$1}" && break # tar/untar a dummy directory, and stop if the command works. rm -rf conftest.dir mkdir conftest.dir echo GrepMe > conftest.dir/file AM_RUN_LOG([tardir=conftest.dir && eval $am__tar_ >conftest.tar]) rm -rf conftest.dir if test -s conftest.tar; then AM_RUN_LOG([$am__untar /dev/null 2>&1 && break fi done rm -rf conftest.dir AC_CACHE_VAL([am_cv_prog_tar_$1], [am_cv_prog_tar_$1=$_am_tool]) AC_MSG_RESULT([$am_cv_prog_tar_$1])]) AC_SUBST([am__tar]) AC_SUBST([am__untar]) ]) # _AM_PROG_TAR m4_include([m4/gettext.m4]) m4_include([m4/iconv.m4]) m4_include([m4/lib-ld.m4]) m4_include([m4/lib-link.m4]) m4_include([m4/lib-prefix.m4]) m4_include([m4/nls.m4]) m4_include([m4/po.m4]) m4_include([m4/progtest.m4]) dar-2.6.8/misc/0000755000175000017520000000000013617552352010244 500000000000000dar-2.6.8/misc/Makefile.am0000644000175000017520000000225113617552272012221 00000000000000####################################################################### # dar - disk archive - a backup/restoration program # Copyright (C) 2002-2020 Denis Corbin # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # to contact the author : http://dar.linux.free.fr/email.html ####################################################################### dist_noinst_DATA = make_cygwin_dist batch_cygwin batch_linux batch_linux_ea README batch_solaris date_past_N_days Args/Makefile Args/args.c noinst_PROGRAMS=todos todos_SOURCES=todos.c dar-2.6.8/misc/batch_linux_ea0000755000175000017520000000273213606627706013067 00000000000000#!/bin/tcsh -f ####################################################################### # dar - disk archive - a backup/restoration program # Copyright (C) 2002-2020 Denis Corbin # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # to contact the author : http://dar.linux.free.fr/email.html ####################################################################### if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'configure' script resides" exit 1 endif if ("$1" == "") then echo "usage: $0 " exit endif misc/make_rpm $1 misc/dar_ea.rpm.spec mkdir rpm_dar_ea mv *.rpm rpm_dar_ea # misc/make_rpm $1 misc/dar32_ea.rpm.spec # mkdir rpm_dar32_ea # mv *.rpm rpm_dar32_ea misc/make_rpm $1 misc/dar64_ea.rpm.spec mkdir rpm_dar64_ea mv *.rpm rpm_dar64_ea dar-2.6.8/misc/date_past_N_days0000755000175000017520000000060613617551261013353 00000000000000#!/bin/bash if [ -z "$1" ] ; then echo "usage $0: " echo " returns the date it was N days ago expressed as seconds since 1969" echo "" echo "example: dar -c backup -af -A \`$0 3\` " echo " \"backup\" will only contain files that have changed during the" echo " last 3 days" exit 1 fi echo $(( `date +%s` - $1 * 86400 )) dar-2.6.8/misc/batch_linux0000755000175000017520000000267713606627706012432 00000000000000#!/bin/tcsh -f ####################################################################### # dar - disk archive - a backup/restoration program # Copyright (C) 2002-2020 Denis Corbin # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # to contact the author : http://dar.linux.free.fr/email.html ####################################################################### if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'configure' script resides" exit 1 endif if ("$1" == "") then echo "usage: $0 " exit endif misc/make_rpm $1 misc/dar.rpm.spec mkdir rpm_dar mv *.rpm rpm_dar # misc/make_rpm $1 misc/dar32.rpm.spec # mkdir rpm_dar32 # mv *.rpm rpm_dar32 misc/make_rpm $1 misc/dar64.rpm.spec mkdir rpm_dar64 mv *.rpm rpm_dar64 dar-2.6.8/misc/README0000644000175000017520000000157213617551261011047 00000000000000What will you find in this directory? -------------------- | IMPORTANT! | as there is so many distro today (yep, the Slackware hegemony is over...) | I do not produce anymore binary packages. Ask your preferred distro | maintainer team to upgrade their packages or wait for they do so. -------------------- many script that help(ed) build the releases: - batch_cygwin - batch_linux - match_linux_ea - make_cygwin_dist a helper program to translate documentation to DOS/Windows text format - todos.c a helper program to list argument passed to a given command - Args/args.c This one was used on dar-support mailing-list to troubleshoot arguments passed to dar from a user defined script a helper script to get the date past N dates - date_past_N_days and can be used as dar -c -af -A `date_past_N_days 3` ... this will only save file that have changed in the last 3 days dar-2.6.8/misc/make_cygwin_dist0000755000175000017520000000371413606427316013436 00000000000000#!/bin/tcsh -f if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'conigure' script resides" exit 1 endif if( "$1" == "") then echo "usage : $0 " exit endif set version=$1 set winflavor=$2 set darflavor=$3 set dir_name="dar$darflavor-$version-$winflavor" set build_dir=`pwd`/$dir_name echo "install directory is $build_dir" if( -e $build_dir) then echo "cannot create $build_dir file exists" exit 1 endif mkdir $build_dir make clean distclean || echo "ignoring error, tree already clean" # setenv CXXFLAGS -O ./configure --prefix=$build_dir --disable-gpgme-linking $4 $5 $6 $7 $8 $9 ${10} ${11} make $MAKE_OPT make install-strip mv $build_dir/bin/* $build_dir/lib/*.so.*.*.* $build_dir cp /bin/cygwin1.dll /bin/cygz.dll /bin/cygbz2-1.dll /bin/cygiconv-2.dll /usr/bin/cygintl-8.dll /bin/cyggcc_s-seh-1.dll /bin/cygstdc++-6.dll /bin/cyggpg-error-0.dll /bin/cyglzo2-2.dll /usr/bin/cyggcrypt-20.dll /usr/bin/cyggpgme-11.dll /usr/bin/cyglzma-5.dll /bin/cyggcrypt-20.dll /bin/cygrsync-2.dll /bin/cygcurl-4.dll /bin/cygcrypto-1.0.0.dll /bin/cyggssapi_krb5-2.dll /bin/cygidn2-0.dll /bin/cyglber-2-4-2.dll /bin/cygldap-2-4-2.dll /bin/cygnghttp2-14.dll /bin/cygpsl-5.dll /bin/cygssh2-1.dll /bin/cygssl-1.0.0.dll /bin/cygunistring-2.dll /bin/cygk5crypto-3.dll /bin/cygkrb5-3.dll /bin/cygkrb5support-0.dll /bin/cygcom_err-2.dll /bin/cygsasl2-3.dll /bin/cygcom_err-2.dll /usr/bin/cygbrotlidec-1.dll /usr/bin/cygcrypto-1.1.dll /usr/bin/cygssh-4.dll /usr/bin/cygssl-1.1.dll /usr/bin/cygbrotlicommon-1.dll $build_dir cp -R doc $build_dir foreach fichier (README TODO INSTALL ChangeLog THANKS COPYING) cp $fichier "$build_dir/`basename $fichier`.txt" misc/todos "$build_dir/`basename $fichier`.txt" end rm -rf $build_dir/man $build_dir/bin $build_dir/lib $build_dir/include rm -rf $build_dir/share zip -9 -r "$dir_name".zip $dir_name rm -rf $build_dir dar-2.6.8/misc/todos.c0000644000175000017520000000457013606627706011472 00000000000000/********************************************************************* // dar - disk archive - a backup/restoration program // Copyright (C) 2002-2020 Denis Corbin // // This program is free software; you can redistribute it and/or // modify it under the terms of the GNU General Public License // as published by the Free Software Foundation; either version 2 // of the License, or (at your option) any later version. // // This program is distributed in the hope that it will be useful, // but WITHOUT ANY WARRANTY; without even the implied warranty of // MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the // GNU General Public License for more details. // // You should have received a copy of the GNU General Public License // along with this program; if not, write to the Free Software // Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. // // to contact the author : http://dar.linux.free.fr/email.html *********************************************************************/ #include #include #include #include #include #include #define TMP ".###tmp_file" #define MAX 10240 char c='\n'; int main(int argc,char *argv[]) { char buffer[MAX]; char sortie[2*MAX]; char *file; int p; register int i,lu,d; if(argc < 2) { printf("usage : %s liste de fichiers\n",argv[0]); return 1; } close(0);close(1); for(p=1;p {win32|win64} [options to dar]" exit endif if("$2" != "win32" && "$2" != "win64") then echo "unknown windows flavor" exit endif setenv MAKE_OPT "$3 $4 $5 $6 $7 $8 $9 ${10}" # misc/make_cygwin_dist $1 "" # misc/make_cygwin_dist $1 $2 "32" --enable-mode=32 misc/make_cygwin_dist $1 $2 "64" --enable-mode=64 dar-2.6.8/misc/Args/0000755000175000017520000000000013617552352011140 500000000000000dar-2.6.8/misc/Args/Makefile0000644000175000017520000000005613617551261012517 00000000000000 all: args args: args.c clean: rm -f args dar-2.6.8/misc/Args/args.c0000644000175000017520000000045513617551261012162 00000000000000#include int main(int argc, char *argv[]) { int i; printf("Argument received from the parent process, one by line surrounded by double quotes:\n"); for(i = 0; i < argc; ++i) printf("argument %3d: \"%s\"\n", i, argv[i]); printf("End of argument list\n"); return 0; } dar-2.6.8/misc/Makefile.in0000644000175000017520000004667013617552304012243 00000000000000# Makefile.in generated by automake 1.15 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2014 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ ####################################################################### # dar - disk archive - a backup/restoration program # Copyright (C) 2002-2020 Denis Corbin # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # to contact the author : http://dar.linux.free.fr/email.html ####################################################################### VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ noinst_PROGRAMS = todos$(EXEEXT) subdir = misc ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(dist_noinst_DATA) \ $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = PROGRAMS = $(noinst_PROGRAMS) am_todos_OBJECTS = todos.$(OBJEXT) todos_OBJECTS = $(am_todos_OBJECTS) todos_LDADD = $(LDADD) AM_V_lt = $(am__v_lt_@AM_V@) am__v_lt_ = $(am__v_lt_@AM_DEFAULT_V@) am__v_lt_0 = --silent am__v_lt_1 = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = DEFAULT_INCLUDES = -I.@am__isrc@ -I$(top_builddir) depcomp = $(SHELL) $(top_srcdir)/depcomp am__depfiles_maybe = depfiles am__mv = mv -f COMPILE = $(CC) $(DEFS) $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) \ $(CPPFLAGS) $(AM_CFLAGS) $(CFLAGS) LTCOMPILE = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=compile $(CC) $(DEFS) \ $(DEFAULT_INCLUDES) $(INCLUDES) $(AM_CPPFLAGS) $(CPPFLAGS) \ $(AM_CFLAGS) $(CFLAGS) AM_V_CC = $(am__v_CC_@AM_V@) am__v_CC_ = $(am__v_CC_@AM_DEFAULT_V@) am__v_CC_0 = @echo " CC " $@; am__v_CC_1 = CCLD = $(CC) LINK = $(LIBTOOL) $(AM_V_lt) --tag=CC $(AM_LIBTOOLFLAGS) \ $(LIBTOOLFLAGS) --mode=link $(CCLD) $(AM_CFLAGS) $(CFLAGS) \ $(AM_LDFLAGS) $(LDFLAGS) -o $@ AM_V_CCLD = $(am__v_CCLD_@AM_V@) am__v_CCLD_ = $(am__v_CCLD_@AM_DEFAULT_V@) am__v_CCLD_0 = @echo " CCLD " $@; am__v_CCLD_1 = SOURCES = $(todos_SOURCES) DIST_SOURCES = $(todos_SOURCES) am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac DATA = $(dist_noinst_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) # Read a list of newline-separated strings from the standard input, # and print each of them once, without duplicates. Input order is # *not* preserved. am__uniquify_input = $(AWK) '\ BEGIN { nonempty = 0; } \ { items[$$0] = 1; nonempty = 1; } \ END { if (nonempty) { for (i in items) print i; }; } \ ' # Make sure the list of sources is unique. This is necessary because, # e.g., the same source file might be shared among _SOURCES variables # for different programs/libraries. am__define_uniq_tagged_files = \ list='$(am__tagged_files)'; \ unique=`for i in $$list; do \ if test -f "$$i"; then echo $$i; else echo $(srcdir)/$$i; fi; \ done | $(am__uniquify_input)` ETAGS = etags CTAGS = ctags am__DIST_COMMON = $(srcdir)/Makefile.in $(top_srcdir)/depcomp README DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CXXSTDFLAGS = @CXXSTDFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPGME_CFLAGS = @GPGME_CFLAGS@ GPGME_CONFIG = @GPGME_CONFIG@ GPGME_LIBS = @GPGME_LIBS@ GREP = @GREP@ HAS_DOT = @HAS_DOT@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBCURL_CFLAGS = @LIBCURL_CFLAGS@ LIBCURL_LIBS = @LIBCURL_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTHREADAR_CFLAGS = @LIBTHREADAR_CFLAGS@ LIBTHREADAR_LIBS = @LIBTHREADAR_LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYEXT = @PYEXT@ PYFLAGS = @PYFLAGS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dot = @dot@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ dist_noinst_DATA = make_cygwin_dist batch_cygwin batch_linux batch_linux_ea README batch_solaris date_past_N_days Args/Makefile Args/args.c todos_SOURCES = todos.c all: all-am .SUFFIXES: .SUFFIXES: .c .lo .o .obj $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu misc/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu misc/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): clean-noinstPROGRAMS: @list='$(noinst_PROGRAMS)'; test -n "$$list" || exit 0; \ echo " rm -f" $$list; \ rm -f $$list || exit $$?; \ test -n "$(EXEEXT)" || exit 0; \ list=`for p in $$list; do echo "$$p"; done | sed 's/$(EXEEXT)$$//'`; \ echo " rm -f" $$list; \ rm -f $$list todos$(EXEEXT): $(todos_OBJECTS) $(todos_DEPENDENCIES) $(EXTRA_todos_DEPENDENCIES) @rm -f todos$(EXEEXT) $(AM_V_CCLD)$(LINK) $(todos_OBJECTS) $(todos_LDADD) $(LIBS) mostlyclean-compile: -rm -f *.$(OBJEXT) distclean-compile: -rm -f *.tab.c @AMDEP_TRUE@@am__include@ @am__quote@./$(DEPDIR)/todos.Po@am__quote@ .c.o: @am__fastdepCC_TRUE@ $(AM_V_CC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.o$$||'`;\ @am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepCC_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ $< .c.obj: @am__fastdepCC_TRUE@ $(AM_V_CC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.obj$$||'`;\ @am__fastdepCC_TRUE@ $(COMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ `$(CYGPATH_W) '$<'` &&\ @am__fastdepCC_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Po @AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=no @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(COMPILE) -c -o $@ `$(CYGPATH_W) '$<'` .c.lo: @am__fastdepCC_TRUE@ $(AM_V_CC)depbase=`echo $@ | sed 's|[^/]*$$|$(DEPDIR)/&|;s|\.lo$$||'`;\ @am__fastdepCC_TRUE@ $(LTCOMPILE) -MT $@ -MD -MP -MF $$depbase.Tpo -c -o $@ $< &&\ @am__fastdepCC_TRUE@ $(am__mv) $$depbase.Tpo $$depbase.Plo @AMDEP_TRUE@@am__fastdepCC_FALSE@ $(AM_V_CC)source='$<' object='$@' libtool=yes @AMDEPBACKSLASH@ @AMDEP_TRUE@@am__fastdepCC_FALSE@ DEPDIR=$(DEPDIR) $(CCDEPMODE) $(depcomp) @AMDEPBACKSLASH@ @am__fastdepCC_FALSE@ $(AM_V_CC@am__nodep@)$(LTCOMPILE) -c -o $@ $< mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs ID: $(am__tagged_files) $(am__define_uniq_tagged_files); mkid -fID $$unique tags: tags-am TAGS: tags tags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) set x; \ here=`pwd`; \ $(am__define_uniq_tagged_files); \ shift; \ if test -z "$(ETAGS_ARGS)$$*$$unique"; then :; else \ test -n "$$unique" || unique=$$empty_fix; \ if test $$# -gt 0; then \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ "$$@" $$unique; \ else \ $(ETAGS) $(ETAGSFLAGS) $(AM_ETAGSFLAGS) $(ETAGS_ARGS) \ $$unique; \ fi; \ fi ctags: ctags-am CTAGS: ctags ctags-am: $(TAGS_DEPENDENCIES) $(am__tagged_files) $(am__define_uniq_tagged_files); \ test -z "$(CTAGS_ARGS)$$unique" \ || $(CTAGS) $(CTAGSFLAGS) $(AM_CTAGSFLAGS) $(CTAGS_ARGS) \ $$unique GTAGS: here=`$(am__cd) $(top_builddir) && pwd` \ && $(am__cd) $(top_srcdir) \ && gtags -i $(GTAGS_ARGS) "$$here" cscopelist: cscopelist-am cscopelist-am: $(am__tagged_files) list='$(am__tagged_files)'; \ case "$(srcdir)" in \ [\\/]* | ?:[\\/]*) sdir="$(srcdir)" ;; \ *) sdir=$(subdir)/$(srcdir) ;; \ esac; \ for i in $$list; do \ if test -f "$$i"; then \ echo "$(subdir)/$$i"; \ else \ echo "$$sdir/$$i"; \ fi; \ done >> $(top_builddir)/cscope.files distclean-tags: -rm -f TAGS ID GTAGS GRTAGS GSYMS GPATH tags distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(PROGRAMS) $(DATA) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool clean-noinstPROGRAMS \ mostlyclean-am distclean: distclean-am -rm -rf ./$(DEPDIR) -rm -f Makefile distclean-am: clean-am distclean-compile distclean-generic \ distclean-tags dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -rf ./$(DEPDIR) -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-compile mostlyclean-generic \ mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: CTAGS GTAGS TAGS all all-am check check-am clean clean-generic \ clean-libtool clean-noinstPROGRAMS cscopelist-am ctags \ ctags-am distclean distclean-compile distclean-generic \ distclean-libtool distclean-tags distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-compile \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags tags-am uninstall uninstall-am .PRECIOUS: Makefile # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.6.8/misc/batch_solaris0000644000175000017520000000401013606627706012723 00000000000000#!/bin/tcsh -f ####################################################################### # dar - disk archive - a backup/restoration program # Copyright (C) 2002-2020 Denis Corbin # # This program is free software; you can redistribute it and/or # modify it under the terms of the GNU General Public License # as published by the Free Software Foundation; either version 2 # of the License, or (at your option) any later version. # # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program; if not, write to the Free Software # Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. # # to contact the author : http://dar.linux.free.fr/email.html ####################################################################### if(! -f configure) then echo "run this script from the root directory of CVS working directory" echo "which is where the 'configure' script resides" exit 1 endif if("$1" == "" || "$2" == "" || "$3" == "") then echo "usage: $0 " echo "example : $0 2.0.0 sun4 Solaris" exit endif set filen="$1-$2-$3" ./configure CXXFLAGS=-O && make && mkdir dar-$filen && make DESTDIR=`pwd`/dar-$filen install-strip && tar -cf dar-$filen.tar dar-$filen && gzip -9 dar-$filen.tar || exit 1 make clean && make distclean || exit 1 # ./configure CXXFLAGS=-O --enable-mode=32 && make && mkdir dar32-$filen && make DESTDIR=`pwd`/dar32-$filen install-strip && tar -cf dar32-$filen.tar dar32-$filen && gzip -9 dar32-$filen.tar || exit 1 # make clean && make distclean || exit 1 ./configure CXXFLAGS=-O --enable-mode=64 && make && mkdir dar64-$filen && make DESTDIR=`pwd`/dar64-$filen install-strip && tar -cf dar64-$filen.tar dar64-$filen && gzip -9 dar64-$filen.tar || exit 1 make clean && make distclean || exit 1 dar-2.6.8/ltmain.sh0000644000175000017520000117147412756047127011072 00000000000000#! /bin/sh ## DO NOT EDIT - This file generated from ./build-aux/ltmain.in ## by inline-source v2014-01-03.01 # libtool (GNU libtool) 2.4.6 # Provide generalized library-building support services. # Written by Gordon Matzigkeit , 1996 # Copyright (C) 1996-2015 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # GNU Libtool is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of the License, or # (at your option) any later version. # # As a special exception to the GNU General Public License, # if you distribute this file as part of a program or library that # is built using GNU Libtool, you may include this file under the # same distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . PROGRAM=libtool PACKAGE=libtool VERSION="2.4.6 Debian-2.4.6-2" package_revision=2.4.6 ## ------ ## ## Usage. ## ## ------ ## # Run './libtool --help' for help with using this script from the # command line. ## ------------------------------- ## ## User overridable command paths. ## ## ------------------------------- ## # After configure completes, it has a better idea of some of the # shell tools we need than the defaults used by the functions shared # with bootstrap, so set those here where they can still be over- # ridden by the user, but otherwise take precedence. : ${AUTOCONF="autoconf"} : ${AUTOMAKE="automake"} ## -------------------------- ## ## Source external libraries. ## ## -------------------------- ## # Much of our low-level functionality needs to be sourced from external # libraries, which are installed to $pkgauxdir. # Set a version string for this script. scriptversion=2015-01-20.17; # UTC # General shell script boiler plate, and helper functions. # Written by Gary V. Vaughan, 2004 # Copyright (C) 2004-2015 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 3 of the License, or # (at your option) any later version. # As a special exception to the GNU General Public License, if you distribute # this file as part of a program or library that is built using GNU Libtool, # you may include this file under the same distribution terms that you use # for the rest of that program. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNES FOR A PARTICULAR PURPOSE. See the GNU # General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # Please report bugs or propose patches to gary@gnu.org. ## ------ ## ## Usage. ## ## ------ ## # Evaluate this file near the top of your script to gain access to # the functions and variables defined here: # # . `echo "$0" | ${SED-sed} 's|[^/]*$||'`/build-aux/funclib.sh # # If you need to override any of the default environment variable # settings, do that before evaluating this file. ## -------------------- ## ## Shell normalisation. ## ## -------------------- ## # Some shells need a little help to be as Bourne compatible as possible. # Before doing anything else, make sure all that help has been provided! DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in *posix*) set -o posix ;; esac fi # NLS nuisances: We save the old values in case they are required later. _G_user_locale= _G_safe_locale= for _G_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES do eval "if test set = \"\${$_G_var+set}\"; then save_$_G_var=\$$_G_var $_G_var=C export $_G_var _G_user_locale=\"$_G_var=\\\$save_\$_G_var; \$_G_user_locale\" _G_safe_locale=\"$_G_var=C; \$_G_safe_locale\" fi" done # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # Make sure IFS has a sensible default sp=' ' nl=' ' IFS="$sp $nl" # There are apparently some retarded systems that use ';' as a PATH separator! if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi ## ------------------------- ## ## Locate command utilities. ## ## ------------------------- ## # func_executable_p FILE # ---------------------- # Check that FILE is an executable regular file. func_executable_p () { test -f "$1" && test -x "$1" } # func_path_progs PROGS_LIST CHECK_FUNC [PATH] # -------------------------------------------- # Search for either a program that responds to --version with output # containing "GNU", or else returned by CHECK_FUNC otherwise, by # trying all the directories in PATH with each of the elements of # PROGS_LIST. # # CHECK_FUNC should accept the path to a candidate program, and # set $func_check_prog_result if it truncates its output less than # $_G_path_prog_max characters. func_path_progs () { _G_progs_list=$1 _G_check_func=$2 _G_PATH=${3-"$PATH"} _G_path_prog_max=0 _G_path_prog_found=false _G_save_IFS=$IFS; IFS=${PATH_SEPARATOR-:} for _G_dir in $_G_PATH; do IFS=$_G_save_IFS test -z "$_G_dir" && _G_dir=. for _G_prog_name in $_G_progs_list; do for _exeext in '' .EXE; do _G_path_prog=$_G_dir/$_G_prog_name$_exeext func_executable_p "$_G_path_prog" || continue case `"$_G_path_prog" --version 2>&1` in *GNU*) func_path_progs_result=$_G_path_prog _G_path_prog_found=: ;; *) $_G_check_func $_G_path_prog func_path_progs_result=$func_check_prog_result ;; esac $_G_path_prog_found && break 3 done done done IFS=$_G_save_IFS test -z "$func_path_progs_result" && { echo "no acceptable sed could be found in \$PATH" >&2 exit 1 } } # We want to be able to use the functions in this file before configure # has figured out where the best binaries are kept, which means we have # to search for them ourselves - except when the results are already set # where we skip the searches. # Unless the user overrides by setting SED, search the path for either GNU # sed, or the sed that truncates its output the least. test -z "$SED" && { _G_sed_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ for _G_i in 1 2 3 4 5 6 7; do _G_sed_script=$_G_sed_script$nl$_G_sed_script done echo "$_G_sed_script" 2>/dev/null | sed 99q >conftest.sed _G_sed_script= func_check_prog_sed () { _G_path_prog=$1 _G_count=0 printf 0123456789 >conftest.in while : do cat conftest.in conftest.in >conftest.tmp mv conftest.tmp conftest.in cp conftest.in conftest.nl echo '' >> conftest.nl "$_G_path_prog" -f conftest.sed conftest.out 2>/dev/null || break diff conftest.out conftest.nl >/dev/null 2>&1 || break _G_count=`expr $_G_count + 1` if test "$_G_count" -gt "$_G_path_prog_max"; then # Best one so far, save it but keep looking for a better one func_check_prog_result=$_G_path_prog _G_path_prog_max=$_G_count fi # 10*(2^10) chars as input seems more than enough test 10 -lt "$_G_count" && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out } func_path_progs "sed gsed" func_check_prog_sed $PATH:/usr/xpg4/bin rm -f conftest.sed SED=$func_path_progs_result } # Unless the user overrides by setting GREP, search the path for either GNU # grep, or the grep that truncates its output the least. test -z "$GREP" && { func_check_prog_grep () { _G_path_prog=$1 _G_count=0 _G_path_prog_max=0 printf 0123456789 >conftest.in while : do cat conftest.in conftest.in >conftest.tmp mv conftest.tmp conftest.in cp conftest.in conftest.nl echo 'GREP' >> conftest.nl "$_G_path_prog" -e 'GREP$' -e '-(cannot match)-' conftest.out 2>/dev/null || break diff conftest.out conftest.nl >/dev/null 2>&1 || break _G_count=`expr $_G_count + 1` if test "$_G_count" -gt "$_G_path_prog_max"; then # Best one so far, save it but keep looking for a better one func_check_prog_result=$_G_path_prog _G_path_prog_max=$_G_count fi # 10*(2^10) chars as input seems more than enough test 10 -lt "$_G_count" && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out } func_path_progs "grep ggrep" func_check_prog_grep $PATH:/usr/xpg4/bin GREP=$func_path_progs_result } ## ------------------------------- ## ## User overridable command paths. ## ## ------------------------------- ## # All uppercase variable names are used for environment variables. These # variables can be overridden by the user before calling a script that # uses them if a suitable command of that name is not already available # in the command search PATH. : ${CP="cp -f"} : ${ECHO="printf %s\n"} : ${EGREP="$GREP -E"} : ${FGREP="$GREP -F"} : ${LN_S="ln -s"} : ${MAKE="make"} : ${MKDIR="mkdir"} : ${MV="mv -f"} : ${RM="rm -f"} : ${SHELL="${CONFIG_SHELL-/bin/sh}"} ## -------------------- ## ## Useful sed snippets. ## ## -------------------- ## sed_dirname='s|/[^/]*$||' sed_basename='s|^.*/||' # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. sed_quote_subst='s|\([`"$\\]\)|\\\1|g' # Same as above, but do not quote variable references. sed_double_quote_subst='s/\(["`\\]\)/\\\1/g' # Sed substitution that turns a string into a regex matching for the # string literally. sed_make_literal_regex='s|[].[^$\\*\/]|\\&|g' # Sed substitution that converts a w32 file name or path # that contains forward slashes, into one that contains # (escaped) backslashes. A very naive implementation. sed_naive_backslashify='s|\\\\*|\\|g;s|/|\\|g;s|\\|\\\\|g' # Re-'\' parameter expansions in output of sed_double_quote_subst that # were '\'-ed in input to the same. If an odd number of '\' preceded a # '$' in input to sed_double_quote_subst, that '$' was protected from # expansion. Since each input '\' is now two '\'s, look for any number # of runs of four '\'s followed by two '\'s and then a '$'. '\' that '$'. _G_bs='\\' _G_bs2='\\\\' _G_bs4='\\\\\\\\' _G_dollar='\$' sed_double_backslash="\ s/$_G_bs4/&\\ /g s/^$_G_bs2$_G_dollar/$_G_bs&/ s/\\([^$_G_bs]\\)$_G_bs2$_G_dollar/\\1$_G_bs2$_G_bs$_G_dollar/g s/\n//g" ## ----------------- ## ## Global variables. ## ## ----------------- ## # Except for the global variables explicitly listed below, the following # functions in the '^func_' namespace, and the '^require_' namespace # variables initialised in the 'Resource management' section, sourcing # this file will not pollute your global namespace with anything # else. There's no portable way to scope variables in Bourne shell # though, so actually running these functions will sometimes place # results into a variable named after the function, and often use # temporary variables in the '^_G_' namespace. If you are careful to # avoid using those namespaces casually in your sourcing script, things # should continue to work as you expect. And, of course, you can freely # overwrite any of the functions or variables defined here before # calling anything to customize them. EXIT_SUCCESS=0 EXIT_FAILURE=1 EXIT_MISMATCH=63 # $? = 63 is used to indicate version mismatch to missing. EXIT_SKIP=77 # $? = 77 is used to indicate a skipped test to automake. # Allow overriding, eg assuming that you follow the convention of # putting '$debug_cmd' at the start of all your functions, you can get # bash to show function call trace with: # # debug_cmd='eval echo "${FUNCNAME[0]} $*" >&2' bash your-script-name debug_cmd=${debug_cmd-":"} exit_cmd=: # By convention, finish your script with: # # exit $exit_status # # so that you can set exit_status to non-zero if you want to indicate # something went wrong during execution without actually bailing out at # the point of failure. exit_status=$EXIT_SUCCESS # Work around backward compatibility issue on IRIX 6.5. On IRIX 6.4+, sh # is ksh but when the shell is invoked as "sh" and the current value of # the _XPG environment variable is not equal to 1 (one), the special # positional parameter $0, within a function call, is the name of the # function. progpath=$0 # The name of this program. progname=`$ECHO "$progpath" |$SED "$sed_basename"` # Make sure we have an absolute progpath for reexecution: case $progpath in [\\/]*|[A-Za-z]:\\*) ;; *[\\/]*) progdir=`$ECHO "$progpath" |$SED "$sed_dirname"` progdir=`cd "$progdir" && pwd` progpath=$progdir/$progname ;; *) _G_IFS=$IFS IFS=${PATH_SEPARATOR-:} for progdir in $PATH; do IFS=$_G_IFS test -x "$progdir/$progname" && break done IFS=$_G_IFS test -n "$progdir" || progdir=`pwd` progpath=$progdir/$progname ;; esac ## ----------------- ## ## Standard options. ## ## ----------------- ## # The following options affect the operation of the functions defined # below, and should be set appropriately depending on run-time para- # meters passed on the command line. opt_dry_run=false opt_quiet=false opt_verbose=false # Categories 'all' and 'none' are always available. Append any others # you will pass as the first argument to func_warning from your own # code. warning_categories= # By default, display warnings according to 'opt_warning_types'. Set # 'warning_func' to ':' to elide all warnings, or func_fatal_error to # treat the next displayed warning as a fatal error. warning_func=func_warn_and_continue # Set to 'all' to display all warnings, 'none' to suppress all # warnings, or a space delimited list of some subset of # 'warning_categories' to display only the listed warnings. opt_warning_types=all ## -------------------- ## ## Resource management. ## ## -------------------- ## # This section contains definitions for functions that each ensure a # particular resource (a file, or a non-empty configuration variable for # example) is available, and if appropriate to extract default values # from pertinent package files. Call them using their associated # 'require_*' variable to ensure that they are executed, at most, once. # # It's entirely deliberate that calling these functions can set # variables that don't obey the namespace limitations obeyed by the rest # of this file, in order that that they be as useful as possible to # callers. # require_term_colors # ------------------- # Allow display of bold text on terminals that support it. require_term_colors=func_require_term_colors func_require_term_colors () { $debug_cmd test -t 1 && { # COLORTERM and USE_ANSI_COLORS environment variables take # precedence, because most terminfo databases neglect to describe # whether color sequences are supported. test -n "${COLORTERM+set}" && : ${USE_ANSI_COLORS="1"} if test 1 = "$USE_ANSI_COLORS"; then # Standard ANSI escape sequences tc_reset='' tc_bold=''; tc_standout='' tc_red=''; tc_green='' tc_blue=''; tc_cyan='' else # Otherwise trust the terminfo database after all. test -n "`tput sgr0 2>/dev/null`" && { tc_reset=`tput sgr0` test -n "`tput bold 2>/dev/null`" && tc_bold=`tput bold` tc_standout=$tc_bold test -n "`tput smso 2>/dev/null`" && tc_standout=`tput smso` test -n "`tput setaf 1 2>/dev/null`" && tc_red=`tput setaf 1` test -n "`tput setaf 2 2>/dev/null`" && tc_green=`tput setaf 2` test -n "`tput setaf 4 2>/dev/null`" && tc_blue=`tput setaf 4` test -n "`tput setaf 5 2>/dev/null`" && tc_cyan=`tput setaf 5` } fi } require_term_colors=: } ## ----------------- ## ## Function library. ## ## ----------------- ## # This section contains a variety of useful functions to call in your # scripts. Take note of the portable wrappers for features provided by # some modern shells, which will fall back to slower equivalents on # less featureful shells. # func_append VAR VALUE # --------------------- # Append VALUE onto the existing contents of VAR. # We should try to minimise forks, especially on Windows where they are # unreasonably slow, so skip the feature probes when bash or zsh are # being used: if test set = "${BASH_VERSION+set}${ZSH_VERSION+set}"; then : ${_G_HAVE_ARITH_OP="yes"} : ${_G_HAVE_XSI_OPS="yes"} # The += operator was introduced in bash 3.1 case $BASH_VERSION in [12].* | 3.0 | 3.0*) ;; *) : ${_G_HAVE_PLUSEQ_OP="yes"} ;; esac fi # _G_HAVE_PLUSEQ_OP # Can be empty, in which case the shell is probed, "yes" if += is # useable or anything else if it does not work. test -z "$_G_HAVE_PLUSEQ_OP" \ && (eval 'x=a; x+=" b"; test "a b" = "$x"') 2>/dev/null \ && _G_HAVE_PLUSEQ_OP=yes if test yes = "$_G_HAVE_PLUSEQ_OP" then # This is an XSI compatible shell, allowing a faster implementation... eval 'func_append () { $debug_cmd eval "$1+=\$2" }' else # ...otherwise fall back to using expr, which is often a shell builtin. func_append () { $debug_cmd eval "$1=\$$1\$2" } fi # func_append_quoted VAR VALUE # ---------------------------- # Quote VALUE and append to the end of shell variable VAR, separated # by a space. if test yes = "$_G_HAVE_PLUSEQ_OP"; then eval 'func_append_quoted () { $debug_cmd func_quote_for_eval "$2" eval "$1+=\\ \$func_quote_for_eval_result" }' else func_append_quoted () { $debug_cmd func_quote_for_eval "$2" eval "$1=\$$1\\ \$func_quote_for_eval_result" } fi # func_append_uniq VAR VALUE # -------------------------- # Append unique VALUE onto the existing contents of VAR, assuming # entries are delimited by the first character of VALUE. For example: # # func_append_uniq options " --another-option option-argument" # # will only append to $options if " --another-option option-argument " # is not already present somewhere in $options already (note spaces at # each end implied by leading space in second argument). func_append_uniq () { $debug_cmd eval _G_current_value='`$ECHO $'$1'`' _G_delim=`expr "$2" : '\(.\)'` case $_G_delim$_G_current_value$_G_delim in *"$2$_G_delim"*) ;; *) func_append "$@" ;; esac } # func_arith TERM... # ------------------ # Set func_arith_result to the result of evaluating TERMs. test -z "$_G_HAVE_ARITH_OP" \ && (eval 'test 2 = $(( 1 + 1 ))') 2>/dev/null \ && _G_HAVE_ARITH_OP=yes if test yes = "$_G_HAVE_ARITH_OP"; then eval 'func_arith () { $debug_cmd func_arith_result=$(( $* )) }' else func_arith () { $debug_cmd func_arith_result=`expr "$@"` } fi # func_basename FILE # ------------------ # Set func_basename_result to FILE with everything up to and including # the last / stripped. if test yes = "$_G_HAVE_XSI_OPS"; then # If this shell supports suffix pattern removal, then use it to avoid # forking. Hide the definitions single quotes in case the shell chokes # on unsupported syntax... _b='func_basename_result=${1##*/}' _d='case $1 in */*) func_dirname_result=${1%/*}$2 ;; * ) func_dirname_result=$3 ;; esac' else # ...otherwise fall back to using sed. _b='func_basename_result=`$ECHO "$1" |$SED "$sed_basename"`' _d='func_dirname_result=`$ECHO "$1" |$SED "$sed_dirname"` if test "X$func_dirname_result" = "X$1"; then func_dirname_result=$3 else func_append func_dirname_result "$2" fi' fi eval 'func_basename () { $debug_cmd '"$_b"' }' # func_dirname FILE APPEND NONDIR_REPLACEMENT # ------------------------------------------- # Compute the dirname of FILE. If nonempty, add APPEND to the result, # otherwise set result to NONDIR_REPLACEMENT. eval 'func_dirname () { $debug_cmd '"$_d"' }' # func_dirname_and_basename FILE APPEND NONDIR_REPLACEMENT # -------------------------------------------------------- # Perform func_basename and func_dirname in a single function # call: # dirname: Compute the dirname of FILE. If nonempty, # add APPEND to the result, otherwise set result # to NONDIR_REPLACEMENT. # value returned in "$func_dirname_result" # basename: Compute filename of FILE. # value retuned in "$func_basename_result" # For efficiency, we do not delegate to the functions above but instead # duplicate the functionality here. eval 'func_dirname_and_basename () { $debug_cmd '"$_b"' '"$_d"' }' # func_echo ARG... # ---------------- # Echo program name prefixed message. func_echo () { $debug_cmd _G_message=$* func_echo_IFS=$IFS IFS=$nl for _G_line in $_G_message; do IFS=$func_echo_IFS $ECHO "$progname: $_G_line" done IFS=$func_echo_IFS } # func_echo_all ARG... # -------------------- # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "$*" } # func_echo_infix_1 INFIX ARG... # ------------------------------ # Echo program name, followed by INFIX on the first line, with any # additional lines not showing INFIX. func_echo_infix_1 () { $debug_cmd $require_term_colors _G_infix=$1; shift _G_indent=$_G_infix _G_prefix="$progname: $_G_infix: " _G_message=$* # Strip color escape sequences before counting printable length for _G_tc in "$tc_reset" "$tc_bold" "$tc_standout" "$tc_red" "$tc_green" "$tc_blue" "$tc_cyan" do test -n "$_G_tc" && { _G_esc_tc=`$ECHO "$_G_tc" | $SED "$sed_make_literal_regex"` _G_indent=`$ECHO "$_G_indent" | $SED "s|$_G_esc_tc||g"` } done _G_indent="$progname: "`echo "$_G_indent" | $SED 's|.| |g'`" " ## exclude from sc_prohibit_nested_quotes func_echo_infix_1_IFS=$IFS IFS=$nl for _G_line in $_G_message; do IFS=$func_echo_infix_1_IFS $ECHO "$_G_prefix$tc_bold$_G_line$tc_reset" >&2 _G_prefix=$_G_indent done IFS=$func_echo_infix_1_IFS } # func_error ARG... # ----------------- # Echo program name prefixed message to standard error. func_error () { $debug_cmd $require_term_colors func_echo_infix_1 " $tc_standout${tc_red}error$tc_reset" "$*" >&2 } # func_fatal_error ARG... # ----------------------- # Echo program name prefixed message to standard error, and exit. func_fatal_error () { $debug_cmd func_error "$*" exit $EXIT_FAILURE } # func_grep EXPRESSION FILENAME # ----------------------------- # Check whether EXPRESSION matches any line of FILENAME, without output. func_grep () { $debug_cmd $GREP "$1" "$2" >/dev/null 2>&1 } # func_len STRING # --------------- # Set func_len_result to the length of STRING. STRING may not # start with a hyphen. test -z "$_G_HAVE_XSI_OPS" \ && (eval 'x=a/b/c; test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \ && _G_HAVE_XSI_OPS=yes if test yes = "$_G_HAVE_XSI_OPS"; then eval 'func_len () { $debug_cmd func_len_result=${#1} }' else func_len () { $debug_cmd func_len_result=`expr "$1" : ".*" 2>/dev/null || echo $max_cmd_len` } fi # func_mkdir_p DIRECTORY-PATH # --------------------------- # Make sure the entire path to DIRECTORY-PATH is available. func_mkdir_p () { $debug_cmd _G_directory_path=$1 _G_dir_list= if test -n "$_G_directory_path" && test : != "$opt_dry_run"; then # Protect directory names starting with '-' case $_G_directory_path in -*) _G_directory_path=./$_G_directory_path ;; esac # While some portion of DIR does not yet exist... while test ! -d "$_G_directory_path"; do # ...make a list in topmost first order. Use a colon delimited # list incase some portion of path contains whitespace. _G_dir_list=$_G_directory_path:$_G_dir_list # If the last portion added has no slash in it, the list is done case $_G_directory_path in */*) ;; *) break ;; esac # ...otherwise throw away the child directory and loop _G_directory_path=`$ECHO "$_G_directory_path" | $SED -e "$sed_dirname"` done _G_dir_list=`$ECHO "$_G_dir_list" | $SED 's|:*$||'` func_mkdir_p_IFS=$IFS; IFS=: for _G_dir in $_G_dir_list; do IFS=$func_mkdir_p_IFS # mkdir can fail with a 'File exist' error if two processes # try to create one of the directories concurrently. Don't # stop in that case! $MKDIR "$_G_dir" 2>/dev/null || : done IFS=$func_mkdir_p_IFS # Bail out if we (or some other process) failed to create a directory. test -d "$_G_directory_path" || \ func_fatal_error "Failed to create '$1'" fi } # func_mktempdir [BASENAME] # ------------------------- # Make a temporary directory that won't clash with other running # libtool processes, and avoids race conditions if possible. If # given, BASENAME is the basename for that directory. func_mktempdir () { $debug_cmd _G_template=${TMPDIR-/tmp}/${1-$progname} if test : = "$opt_dry_run"; then # Return a directory name, but don't create it in dry-run mode _G_tmpdir=$_G_template-$$ else # If mktemp works, use that first and foremost _G_tmpdir=`mktemp -d "$_G_template-XXXXXXXX" 2>/dev/null` if test ! -d "$_G_tmpdir"; then # Failing that, at least try and use $RANDOM to avoid a race _G_tmpdir=$_G_template-${RANDOM-0}$$ func_mktempdir_umask=`umask` umask 0077 $MKDIR "$_G_tmpdir" umask $func_mktempdir_umask fi # If we're not in dry-run mode, bomb out on failure test -d "$_G_tmpdir" || \ func_fatal_error "cannot create temporary directory '$_G_tmpdir'" fi $ECHO "$_G_tmpdir" } # func_normal_abspath PATH # ------------------------ # Remove doubled-up and trailing slashes, "." path components, # and cancel out any ".." path components in PATH after making # it an absolute path. func_normal_abspath () { $debug_cmd # These SED scripts presuppose an absolute path with a trailing slash. _G_pathcar='s|^/\([^/]*\).*$|\1|' _G_pathcdr='s|^/[^/]*||' _G_removedotparts=':dotsl s|/\./|/|g t dotsl s|/\.$|/|' _G_collapseslashes='s|/\{1,\}|/|g' _G_finalslash='s|/*$|/|' # Start from root dir and reassemble the path. func_normal_abspath_result= func_normal_abspath_tpath=$1 func_normal_abspath_altnamespace= case $func_normal_abspath_tpath in "") # Empty path, that just means $cwd. func_stripname '' '/' "`pwd`" func_normal_abspath_result=$func_stripname_result return ;; # The next three entries are used to spot a run of precisely # two leading slashes without using negated character classes; # we take advantage of case's first-match behaviour. ///*) # Unusual form of absolute path, do nothing. ;; //*) # Not necessarily an ordinary path; POSIX reserves leading '//' # and for example Cygwin uses it to access remote file shares # over CIFS/SMB, so we conserve a leading double slash if found. func_normal_abspath_altnamespace=/ ;; /*) # Absolute path, do nothing. ;; *) # Relative path, prepend $cwd. func_normal_abspath_tpath=`pwd`/$func_normal_abspath_tpath ;; esac # Cancel out all the simple stuff to save iterations. We also want # the path to end with a slash for ease of parsing, so make sure # there is one (and only one) here. func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -e "$_G_removedotparts" -e "$_G_collapseslashes" -e "$_G_finalslash"` while :; do # Processed it all yet? if test / = "$func_normal_abspath_tpath"; then # If we ascended to the root using ".." the result may be empty now. if test -z "$func_normal_abspath_result"; then func_normal_abspath_result=/ fi break fi func_normal_abspath_tcomponent=`$ECHO "$func_normal_abspath_tpath" | $SED \ -e "$_G_pathcar"` func_normal_abspath_tpath=`$ECHO "$func_normal_abspath_tpath" | $SED \ -e "$_G_pathcdr"` # Figure out what to do with it case $func_normal_abspath_tcomponent in "") # Trailing empty path component, ignore it. ;; ..) # Parent dir; strip last assembled component from result. func_dirname "$func_normal_abspath_result" func_normal_abspath_result=$func_dirname_result ;; *) # Actual path component, append it. func_append func_normal_abspath_result "/$func_normal_abspath_tcomponent" ;; esac done # Restore leading double-slash if one was found on entry. func_normal_abspath_result=$func_normal_abspath_altnamespace$func_normal_abspath_result } # func_notquiet ARG... # -------------------- # Echo program name prefixed message only when not in quiet mode. func_notquiet () { $debug_cmd $opt_quiet || func_echo ${1+"$@"} # A bug in bash halts the script if the last line of a function # fails when set -e is in force, so we need another command to # work around that: : } # func_relative_path SRCDIR DSTDIR # -------------------------------- # Set func_relative_path_result to the relative path from SRCDIR to DSTDIR. func_relative_path () { $debug_cmd func_relative_path_result= func_normal_abspath "$1" func_relative_path_tlibdir=$func_normal_abspath_result func_normal_abspath "$2" func_relative_path_tbindir=$func_normal_abspath_result # Ascend the tree starting from libdir while :; do # check if we have found a prefix of bindir case $func_relative_path_tbindir in $func_relative_path_tlibdir) # found an exact match func_relative_path_tcancelled= break ;; $func_relative_path_tlibdir*) # found a matching prefix func_stripname "$func_relative_path_tlibdir" '' "$func_relative_path_tbindir" func_relative_path_tcancelled=$func_stripname_result if test -z "$func_relative_path_result"; then func_relative_path_result=. fi break ;; *) func_dirname $func_relative_path_tlibdir func_relative_path_tlibdir=$func_dirname_result if test -z "$func_relative_path_tlibdir"; then # Have to descend all the way to the root! func_relative_path_result=../$func_relative_path_result func_relative_path_tcancelled=$func_relative_path_tbindir break fi func_relative_path_result=../$func_relative_path_result ;; esac done # Now calculate path; take care to avoid doubling-up slashes. func_stripname '' '/' "$func_relative_path_result" func_relative_path_result=$func_stripname_result func_stripname '/' '/' "$func_relative_path_tcancelled" if test -n "$func_stripname_result"; then func_append func_relative_path_result "/$func_stripname_result" fi # Normalisation. If bindir is libdir, return '.' else relative path. if test -n "$func_relative_path_result"; then func_stripname './' '' "$func_relative_path_result" func_relative_path_result=$func_stripname_result fi test -n "$func_relative_path_result" || func_relative_path_result=. : } # func_quote_for_eval ARG... # -------------------------- # Aesthetically quote ARGs to be evaled later. # This function returns two values: # i) func_quote_for_eval_result # double-quoted, suitable for a subsequent eval # ii) func_quote_for_eval_unquoted_result # has all characters that are still active within double # quotes backslashified. func_quote_for_eval () { $debug_cmd func_quote_for_eval_unquoted_result= func_quote_for_eval_result= while test 0 -lt $#; do case $1 in *[\\\`\"\$]*) _G_unquoted_arg=`printf '%s\n' "$1" |$SED "$sed_quote_subst"` ;; *) _G_unquoted_arg=$1 ;; esac if test -n "$func_quote_for_eval_unquoted_result"; then func_append func_quote_for_eval_unquoted_result " $_G_unquoted_arg" else func_append func_quote_for_eval_unquoted_result "$_G_unquoted_arg" fi case $_G_unquoted_arg in # Double-quote args containing shell metacharacters to delay # word splitting, command substitution and variable expansion # for a subsequent eval. # Many Bourne shells cannot handle close brackets correctly # in scan sets, so we specify it separately. *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") _G_quoted_arg=\"$_G_unquoted_arg\" ;; *) _G_quoted_arg=$_G_unquoted_arg ;; esac if test -n "$func_quote_for_eval_result"; then func_append func_quote_for_eval_result " $_G_quoted_arg" else func_append func_quote_for_eval_result "$_G_quoted_arg" fi shift done } # func_quote_for_expand ARG # ------------------------- # Aesthetically quote ARG to be evaled later; same as above, # but do not quote variable references. func_quote_for_expand () { $debug_cmd case $1 in *[\\\`\"]*) _G_arg=`$ECHO "$1" | $SED \ -e "$sed_double_quote_subst" -e "$sed_double_backslash"` ;; *) _G_arg=$1 ;; esac case $_G_arg in # Double-quote args containing shell metacharacters to delay # word splitting and command substitution for a subsequent eval. # Many Bourne shells cannot handle close brackets correctly # in scan sets, so we specify it separately. *[\[\~\#\^\&\*\(\)\{\}\|\;\<\>\?\'\ \ ]*|*]*|"") _G_arg=\"$_G_arg\" ;; esac func_quote_for_expand_result=$_G_arg } # func_stripname PREFIX SUFFIX NAME # --------------------------------- # strip PREFIX and SUFFIX from NAME, and store in func_stripname_result. # PREFIX and SUFFIX must not contain globbing or regex special # characters, hashes, percent signs, but SUFFIX may contain a leading # dot (in which case that matches only a dot). if test yes = "$_G_HAVE_XSI_OPS"; then eval 'func_stripname () { $debug_cmd # pdksh 5.2.14 does not do ${X%$Y} correctly if both X and Y are # positional parameters, so assign one to ordinary variable first. func_stripname_result=$3 func_stripname_result=${func_stripname_result#"$1"} func_stripname_result=${func_stripname_result%"$2"} }' else func_stripname () { $debug_cmd case $2 in .*) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%\\\\$2\$%%"`;; *) func_stripname_result=`$ECHO "$3" | $SED -e "s%^$1%%" -e "s%$2\$%%"`;; esac } fi # func_show_eval CMD [FAIL_EXP] # ----------------------------- # Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP # is given, then evaluate it. func_show_eval () { $debug_cmd _G_cmd=$1 _G_fail_exp=${2-':'} func_quote_for_expand "$_G_cmd" eval "func_notquiet $func_quote_for_expand_result" $opt_dry_run || { eval "$_G_cmd" _G_status=$? if test 0 -ne "$_G_status"; then eval "(exit $_G_status); $_G_fail_exp" fi } } # func_show_eval_locale CMD [FAIL_EXP] # ------------------------------------ # Unless opt_quiet is true, then output CMD. Then, if opt_dryrun is # not true, evaluate CMD. If the evaluation of CMD fails, and FAIL_EXP # is given, then evaluate it. Use the saved locale for evaluation. func_show_eval_locale () { $debug_cmd _G_cmd=$1 _G_fail_exp=${2-':'} $opt_quiet || { func_quote_for_expand "$_G_cmd" eval "func_echo $func_quote_for_expand_result" } $opt_dry_run || { eval "$_G_user_locale $_G_cmd" _G_status=$? eval "$_G_safe_locale" if test 0 -ne "$_G_status"; then eval "(exit $_G_status); $_G_fail_exp" fi } } # func_tr_sh # ---------- # Turn $1 into a string suitable for a shell variable name. # Result is stored in $func_tr_sh_result. All characters # not in the set a-zA-Z0-9_ are replaced with '_'. Further, # if $1 begins with a digit, a '_' is prepended as well. func_tr_sh () { $debug_cmd case $1 in [0-9]* | *[!a-zA-Z0-9_]*) func_tr_sh_result=`$ECHO "$1" | $SED -e 's/^\([0-9]\)/_\1/' -e 's/[^a-zA-Z0-9_]/_/g'` ;; * ) func_tr_sh_result=$1 ;; esac } # func_verbose ARG... # ------------------- # Echo program name prefixed message in verbose mode only. func_verbose () { $debug_cmd $opt_verbose && func_echo "$*" : } # func_warn_and_continue ARG... # ----------------------------- # Echo program name prefixed warning message to standard error. func_warn_and_continue () { $debug_cmd $require_term_colors func_echo_infix_1 "${tc_red}warning$tc_reset" "$*" >&2 } # func_warning CATEGORY ARG... # ---------------------------- # Echo program name prefixed warning message to standard error. Warning # messages can be filtered according to CATEGORY, where this function # elides messages where CATEGORY is not listed in the global variable # 'opt_warning_types'. func_warning () { $debug_cmd # CATEGORY must be in the warning_categories list! case " $warning_categories " in *" $1 "*) ;; *) func_internal_error "invalid warning category '$1'" ;; esac _G_category=$1 shift case " $opt_warning_types " in *" $_G_category "*) $warning_func ${1+"$@"} ;; esac } # func_sort_ver VER1 VER2 # ----------------------- # 'sort -V' is not generally available. # Note this deviates from the version comparison in automake # in that it treats 1.5 < 1.5.0, and treats 1.4.4a < 1.4-p3a # but this should suffice as we won't be specifying old # version formats or redundant trailing .0 in bootstrap.conf. # If we did want full compatibility then we should probably # use m4_version_compare from autoconf. func_sort_ver () { $debug_cmd printf '%s\n%s\n' "$1" "$2" \ | sort -t. -k 1,1n -k 2,2n -k 3,3n -k 4,4n -k 5,5n -k 6,6n -k 7,7n -k 8,8n -k 9,9n } # func_lt_ver PREV CURR # --------------------- # Return true if PREV and CURR are in the correct order according to # func_sort_ver, otherwise false. Use it like this: # # func_lt_ver "$prev_ver" "$proposed_ver" || func_fatal_error "..." func_lt_ver () { $debug_cmd test "x$1" = x`func_sort_ver "$1" "$2" | $SED 1q` } # Local variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'before-save-hook 'time-stamp) # time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC" # time-stamp-time-zone: "UTC" # End: #! /bin/sh # Set a version string for this script. scriptversion=2014-01-07.03; # UTC # A portable, pluggable option parser for Bourne shell. # Written by Gary V. Vaughan, 2010 # Copyright (C) 2010-2015 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # This program is free software: you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation, either version 3 of the License, or # (at your option) any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # Please report bugs or propose patches to gary@gnu.org. ## ------ ## ## Usage. ## ## ------ ## # This file is a library for parsing options in your shell scripts along # with assorted other useful supporting features that you can make use # of too. # # For the simplest scripts you might need only: # # #!/bin/sh # . relative/path/to/funclib.sh # . relative/path/to/options-parser # scriptversion=1.0 # func_options ${1+"$@"} # eval set dummy "$func_options_result"; shift # ...rest of your script... # # In order for the '--version' option to work, you will need to have a # suitably formatted comment like the one at the top of this file # starting with '# Written by ' and ending with '# warranty; '. # # For '-h' and '--help' to work, you will also need a one line # description of your script's purpose in a comment directly above the # '# Written by ' line, like the one at the top of this file. # # The default options also support '--debug', which will turn on shell # execution tracing (see the comment above debug_cmd below for another # use), and '--verbose' and the func_verbose function to allow your script # to display verbose messages only when your user has specified # '--verbose'. # # After sourcing this file, you can plug processing for additional # options by amending the variables from the 'Configuration' section # below, and following the instructions in the 'Option parsing' # section further down. ## -------------- ## ## Configuration. ## ## -------------- ## # You should override these variables in your script after sourcing this # file so that they reflect the customisations you have added to the # option parser. # The usage line for option parsing errors and the start of '-h' and # '--help' output messages. You can embed shell variables for delayed # expansion at the time the message is displayed, but you will need to # quote other shell meta-characters carefully to prevent them being # expanded when the contents are evaled. usage='$progpath [OPTION]...' # Short help message in response to '-h' and '--help'. Add to this or # override it after sourcing this library to reflect the full set of # options your script accepts. usage_message="\ --debug enable verbose shell tracing -W, --warnings=CATEGORY report the warnings falling in CATEGORY [all] -v, --verbose verbosely report processing --version print version information and exit -h, --help print short or long help message and exit " # Additional text appended to 'usage_message' in response to '--help'. long_help_message=" Warning categories include: 'all' show all warnings 'none' turn off all the warnings 'error' warnings are treated as fatal errors" # Help message printed before fatal option parsing errors. fatal_help="Try '\$progname --help' for more information." ## ------------------------- ## ## Hook function management. ## ## ------------------------- ## # This section contains functions for adding, removing, and running hooks # to the main code. A hook is just a named list of of function, that can # be run in order later on. # func_hookable FUNC_NAME # ----------------------- # Declare that FUNC_NAME will run hooks added with # 'func_add_hook FUNC_NAME ...'. func_hookable () { $debug_cmd func_append hookable_fns " $1" } # func_add_hook FUNC_NAME HOOK_FUNC # --------------------------------- # Request that FUNC_NAME call HOOK_FUNC before it returns. FUNC_NAME must # first have been declared "hookable" by a call to 'func_hookable'. func_add_hook () { $debug_cmd case " $hookable_fns " in *" $1 "*) ;; *) func_fatal_error "'$1' does not accept hook functions." ;; esac eval func_append ${1}_hooks '" $2"' } # func_remove_hook FUNC_NAME HOOK_FUNC # ------------------------------------ # Remove HOOK_FUNC from the list of functions called by FUNC_NAME. func_remove_hook () { $debug_cmd eval ${1}_hooks='`$ECHO "\$'$1'_hooks" |$SED "s| '$2'||"`' } # func_run_hooks FUNC_NAME [ARG]... # --------------------------------- # Run all hook functions registered to FUNC_NAME. # It is assumed that the list of hook functions contains nothing more # than a whitespace-delimited list of legal shell function names, and # no effort is wasted trying to catch shell meta-characters or preserve # whitespace. func_run_hooks () { $debug_cmd case " $hookable_fns " in *" $1 "*) ;; *) func_fatal_error "'$1' does not support hook funcions.n" ;; esac eval _G_hook_fns=\$$1_hooks; shift for _G_hook in $_G_hook_fns; do eval $_G_hook '"$@"' # store returned options list back into positional # parameters for next 'cmd' execution. eval _G_hook_result=\$${_G_hook}_result eval set dummy "$_G_hook_result"; shift done func_quote_for_eval ${1+"$@"} func_run_hooks_result=$func_quote_for_eval_result } ## --------------- ## ## Option parsing. ## ## --------------- ## # In order to add your own option parsing hooks, you must accept the # full positional parameter list in your hook function, remove any # options that you action, and then pass back the remaining unprocessed # options in '_result', escaped suitably for # 'eval'. Like this: # # my_options_prep () # { # $debug_cmd # # # Extend the existing usage message. # usage_message=$usage_message' # -s, --silent don'\''t print informational messages # ' # # func_quote_for_eval ${1+"$@"} # my_options_prep_result=$func_quote_for_eval_result # } # func_add_hook func_options_prep my_options_prep # # # my_silent_option () # { # $debug_cmd # # # Note that for efficiency, we parse as many options as we can # # recognise in a loop before passing the remainder back to the # # caller on the first unrecognised argument we encounter. # while test $# -gt 0; do # opt=$1; shift # case $opt in # --silent|-s) opt_silent=: ;; # # Separate non-argument short options: # -s*) func_split_short_opt "$_G_opt" # set dummy "$func_split_short_opt_name" \ # "-$func_split_short_opt_arg" ${1+"$@"} # shift # ;; # *) set dummy "$_G_opt" "$*"; shift; break ;; # esac # done # # func_quote_for_eval ${1+"$@"} # my_silent_option_result=$func_quote_for_eval_result # } # func_add_hook func_parse_options my_silent_option # # # my_option_validation () # { # $debug_cmd # # $opt_silent && $opt_verbose && func_fatal_help "\ # '--silent' and '--verbose' options are mutually exclusive." # # func_quote_for_eval ${1+"$@"} # my_option_validation_result=$func_quote_for_eval_result # } # func_add_hook func_validate_options my_option_validation # # You'll alse need to manually amend $usage_message to reflect the extra # options you parse. It's preferable to append if you can, so that # multiple option parsing hooks can be added safely. # func_options [ARG]... # --------------------- # All the functions called inside func_options are hookable. See the # individual implementations for details. func_hookable func_options func_options () { $debug_cmd func_options_prep ${1+"$@"} eval func_parse_options \ ${func_options_prep_result+"$func_options_prep_result"} eval func_validate_options \ ${func_parse_options_result+"$func_parse_options_result"} eval func_run_hooks func_options \ ${func_validate_options_result+"$func_validate_options_result"} # save modified positional parameters for caller func_options_result=$func_run_hooks_result } # func_options_prep [ARG]... # -------------------------- # All initialisations required before starting the option parse loop. # Note that when calling hook functions, we pass through the list of # positional parameters. If a hook function modifies that list, and # needs to propogate that back to rest of this script, then the complete # modified list must be put in 'func_run_hooks_result' before # returning. func_hookable func_options_prep func_options_prep () { $debug_cmd # Option defaults: opt_verbose=false opt_warning_types= func_run_hooks func_options_prep ${1+"$@"} # save modified positional parameters for caller func_options_prep_result=$func_run_hooks_result } # func_parse_options [ARG]... # --------------------------- # The main option parsing loop. func_hookable func_parse_options func_parse_options () { $debug_cmd func_parse_options_result= # this just eases exit handling while test $# -gt 0; do # Defer to hook functions for initial option parsing, so they # get priority in the event of reusing an option name. func_run_hooks func_parse_options ${1+"$@"} # Adjust func_parse_options positional parameters to match eval set dummy "$func_run_hooks_result"; shift # Break out of the loop if we already parsed every option. test $# -gt 0 || break _G_opt=$1 shift case $_G_opt in --debug|-x) debug_cmd='set -x' func_echo "enabling shell trace mode" $debug_cmd ;; --no-warnings|--no-warning|--no-warn) set dummy --warnings none ${1+"$@"} shift ;; --warnings|--warning|-W) test $# = 0 && func_missing_arg $_G_opt && break case " $warning_categories $1" in *" $1 "*) # trailing space prevents matching last $1 above func_append_uniq opt_warning_types " $1" ;; *all) opt_warning_types=$warning_categories ;; *none) opt_warning_types=none warning_func=: ;; *error) opt_warning_types=$warning_categories warning_func=func_fatal_error ;; *) func_fatal_error \ "unsupported warning category: '$1'" ;; esac shift ;; --verbose|-v) opt_verbose=: ;; --version) func_version ;; -\?|-h) func_usage ;; --help) func_help ;; # Separate optargs to long options (plugins may need this): --*=*) func_split_equals "$_G_opt" set dummy "$func_split_equals_lhs" \ "$func_split_equals_rhs" ${1+"$@"} shift ;; # Separate optargs to short options: -W*) func_split_short_opt "$_G_opt" set dummy "$func_split_short_opt_name" \ "$func_split_short_opt_arg" ${1+"$@"} shift ;; # Separate non-argument short options: -\?*|-h*|-v*|-x*) func_split_short_opt "$_G_opt" set dummy "$func_split_short_opt_name" \ "-$func_split_short_opt_arg" ${1+"$@"} shift ;; --) break ;; -*) func_fatal_help "unrecognised option: '$_G_opt'" ;; *) set dummy "$_G_opt" ${1+"$@"}; shift; break ;; esac done # save modified positional parameters for caller func_quote_for_eval ${1+"$@"} func_parse_options_result=$func_quote_for_eval_result } # func_validate_options [ARG]... # ------------------------------ # Perform any sanity checks on option settings and/or unconsumed # arguments. func_hookable func_validate_options func_validate_options () { $debug_cmd # Display all warnings if -W was not given. test -n "$opt_warning_types" || opt_warning_types=" $warning_categories" func_run_hooks func_validate_options ${1+"$@"} # Bail if the options were screwed! $exit_cmd $EXIT_FAILURE # save modified positional parameters for caller func_validate_options_result=$func_run_hooks_result } ## ----------------- ## ## Helper functions. ## ## ----------------- ## # This section contains the helper functions used by the rest of the # hookable option parser framework in ascii-betical order. # func_fatal_help ARG... # ---------------------- # Echo program name prefixed message to standard error, followed by # a help hint, and exit. func_fatal_help () { $debug_cmd eval \$ECHO \""Usage: $usage"\" eval \$ECHO \""$fatal_help"\" func_error ${1+"$@"} exit $EXIT_FAILURE } # func_help # --------- # Echo long help message to standard output and exit. func_help () { $debug_cmd func_usage_message $ECHO "$long_help_message" exit 0 } # func_missing_arg ARGNAME # ------------------------ # Echo program name prefixed message to standard error and set global # exit_cmd. func_missing_arg () { $debug_cmd func_error "Missing argument for '$1'." exit_cmd=exit } # func_split_equals STRING # ------------------------ # Set func_split_equals_lhs and func_split_equals_rhs shell variables after # splitting STRING at the '=' sign. test -z "$_G_HAVE_XSI_OPS" \ && (eval 'x=a/b/c; test 5aa/bb/cc = "${#x}${x%%/*}${x%/*}${x#*/}${x##*/}"') 2>/dev/null \ && _G_HAVE_XSI_OPS=yes if test yes = "$_G_HAVE_XSI_OPS" then # This is an XSI compatible shell, allowing a faster implementation... eval 'func_split_equals () { $debug_cmd func_split_equals_lhs=${1%%=*} func_split_equals_rhs=${1#*=} test "x$func_split_equals_lhs" = "x$1" \ && func_split_equals_rhs= }' else # ...otherwise fall back to using expr, which is often a shell builtin. func_split_equals () { $debug_cmd func_split_equals_lhs=`expr "x$1" : 'x\([^=]*\)'` func_split_equals_rhs= test "x$func_split_equals_lhs" = "x$1" \ || func_split_equals_rhs=`expr "x$1" : 'x[^=]*=\(.*\)$'` } fi #func_split_equals # func_split_short_opt SHORTOPT # ----------------------------- # Set func_split_short_opt_name and func_split_short_opt_arg shell # variables after splitting SHORTOPT after the 2nd character. if test yes = "$_G_HAVE_XSI_OPS" then # This is an XSI compatible shell, allowing a faster implementation... eval 'func_split_short_opt () { $debug_cmd func_split_short_opt_arg=${1#??} func_split_short_opt_name=${1%"$func_split_short_opt_arg"} }' else # ...otherwise fall back to using expr, which is often a shell builtin. func_split_short_opt () { $debug_cmd func_split_short_opt_name=`expr "x$1" : 'x-\(.\)'` func_split_short_opt_arg=`expr "x$1" : 'x-.\(.*\)$'` } fi #func_split_short_opt # func_usage # ---------- # Echo short help message to standard output and exit. func_usage () { $debug_cmd func_usage_message $ECHO "Run '$progname --help |${PAGER-more}' for full usage" exit 0 } # func_usage_message # ------------------ # Echo short help message to standard output. func_usage_message () { $debug_cmd eval \$ECHO \""Usage: $usage"\" echo $SED -n 's|^# || /^Written by/{ x;p;x } h /^Written by/q' < "$progpath" echo eval \$ECHO \""$usage_message"\" } # func_version # ------------ # Echo version message to standard output and exit. func_version () { $debug_cmd printf '%s\n' "$progname $scriptversion" $SED -n ' /(C)/!b go :more /\./!{ N s|\n# | | b more } :go /^# Written by /,/# warranty; / { s|^# || s|^# *$|| s|\((C)\)[ 0-9,-]*[ ,-]\([1-9][0-9]* \)|\1 \2| p } /^# Written by / { s|^# || p } /^warranty; /q' < "$progpath" exit $? } # Local variables: # mode: shell-script # sh-indentation: 2 # eval: (add-hook 'before-save-hook 'time-stamp) # time-stamp-pattern: "10/scriptversion=%:y-%02m-%02d.%02H; # UTC" # time-stamp-time-zone: "UTC" # End: # Set a version string. scriptversion='(GNU libtool) 2.4.6' # func_echo ARG... # ---------------- # Libtool also displays the current mode in messages, so override # funclib.sh func_echo with this custom definition. func_echo () { $debug_cmd _G_message=$* func_echo_IFS=$IFS IFS=$nl for _G_line in $_G_message; do IFS=$func_echo_IFS $ECHO "$progname${opt_mode+: $opt_mode}: $_G_line" done IFS=$func_echo_IFS } # func_warning ARG... # ------------------- # Libtool warnings are not categorized, so override funclib.sh # func_warning with this simpler definition. func_warning () { $debug_cmd $warning_func ${1+"$@"} } ## ---------------- ## ## Options parsing. ## ## ---------------- ## # Hook in the functions to make sure our own options are parsed during # the option parsing loop. usage='$progpath [OPTION]... [MODE-ARG]...' # Short help message in response to '-h'. usage_message="Options: --config show all configuration variables --debug enable verbose shell tracing -n, --dry-run display commands without modifying any files --features display basic configuration information and exit --mode=MODE use operation mode MODE --no-warnings equivalent to '-Wnone' --preserve-dup-deps don't remove duplicate dependency libraries --quiet, --silent don't print informational messages --tag=TAG use configuration variables from tag TAG -v, --verbose print more informational messages than default --version print version information -W, --warnings=CATEGORY report the warnings falling in CATEGORY [all] -h, --help, --help-all print short, long, or detailed help message " # Additional text appended to 'usage_message' in response to '--help'. func_help () { $debug_cmd func_usage_message $ECHO "$long_help_message MODE must be one of the following: clean remove files from the build directory compile compile a source file into a libtool object execute automatically set library path, then run a program finish complete the installation of libtool libraries install install libraries or executables link create a library or an executable uninstall remove libraries from an installed directory MODE-ARGS vary depending on the MODE. When passed as first option, '--mode=MODE' may be abbreviated as 'MODE' or a unique abbreviation of that. Try '$progname --help --mode=MODE' for a more detailed description of MODE. When reporting a bug, please describe a test case to reproduce it and include the following information: host-triplet: $host shell: $SHELL compiler: $LTCC compiler flags: $LTCFLAGS linker: $LD (gnu? $with_gnu_ld) version: $progname $scriptversion Debian-2.4.6-2 automake: `($AUTOMAKE --version) 2>/dev/null |$SED 1q` autoconf: `($AUTOCONF --version) 2>/dev/null |$SED 1q` Report bugs to . GNU libtool home page: . General help using GNU software: ." exit 0 } # func_lo2o OBJECT-NAME # --------------------- # Transform OBJECT-NAME from a '.lo' suffix to the platform specific # object suffix. lo2o=s/\\.lo\$/.$objext/ o2lo=s/\\.$objext\$/.lo/ if test yes = "$_G_HAVE_XSI_OPS"; then eval 'func_lo2o () { case $1 in *.lo) func_lo2o_result=${1%.lo}.$objext ;; * ) func_lo2o_result=$1 ;; esac }' # func_xform LIBOBJ-OR-SOURCE # --------------------------- # Transform LIBOBJ-OR-SOURCE from a '.o' or '.c' (or otherwise) # suffix to a '.lo' libtool-object suffix. eval 'func_xform () { func_xform_result=${1%.*}.lo }' else # ...otherwise fall back to using sed. func_lo2o () { func_lo2o_result=`$ECHO "$1" | $SED "$lo2o"` } func_xform () { func_xform_result=`$ECHO "$1" | $SED 's|\.[^.]*$|.lo|'` } fi # func_fatal_configuration ARG... # ------------------------------- # Echo program name prefixed message to standard error, followed by # a configuration failure hint, and exit. func_fatal_configuration () { func__fatal_error ${1+"$@"} \ "See the $PACKAGE documentation for more information." \ "Fatal configuration error." } # func_config # ----------- # Display the configuration for all the tags in this script. func_config () { re_begincf='^# ### BEGIN LIBTOOL' re_endcf='^# ### END LIBTOOL' # Default configuration. $SED "1,/$re_begincf CONFIG/d;/$re_endcf CONFIG/,\$d" < "$progpath" # Now print the configurations for the tags. for tagname in $taglist; do $SED -n "/$re_begincf TAG CONFIG: $tagname\$/,/$re_endcf TAG CONFIG: $tagname\$/p" < "$progpath" done exit $? } # func_features # ------------- # Display the features supported by this script. func_features () { echo "host: $host" if test yes = "$build_libtool_libs"; then echo "enable shared libraries" else echo "disable shared libraries" fi if test yes = "$build_old_libs"; then echo "enable static libraries" else echo "disable static libraries" fi exit $? } # func_enable_tag TAGNAME # ----------------------- # Verify that TAGNAME is valid, and either flag an error and exit, or # enable the TAGNAME tag. We also add TAGNAME to the global $taglist # variable here. func_enable_tag () { # Global variable: tagname=$1 re_begincf="^# ### BEGIN LIBTOOL TAG CONFIG: $tagname\$" re_endcf="^# ### END LIBTOOL TAG CONFIG: $tagname\$" sed_extractcf=/$re_begincf/,/$re_endcf/p # Validate tagname. case $tagname in *[!-_A-Za-z0-9,/]*) func_fatal_error "invalid tag name: $tagname" ;; esac # Don't test for the "default" C tag, as we know it's # there but not specially marked. case $tagname in CC) ;; *) if $GREP "$re_begincf" "$progpath" >/dev/null 2>&1; then taglist="$taglist $tagname" # Evaluate the configuration. Be careful to quote the path # and the sed script, to avoid splitting on whitespace, but # also don't use non-portable quotes within backquotes within # quotes we have to do it in 2 steps: extractedcf=`$SED -n -e "$sed_extractcf" < "$progpath"` eval "$extractedcf" else func_error "ignoring unknown tag $tagname" fi ;; esac } # func_check_version_match # ------------------------ # Ensure that we are using m4 macros, and libtool script from the same # release of libtool. func_check_version_match () { if test "$package_revision" != "$macro_revision"; then if test "$VERSION" != "$macro_version"; then if test -z "$macro_version"; then cat >&2 <<_LT_EOF $progname: Version mismatch error. This is $PACKAGE $VERSION, but the $progname: definition of this LT_INIT comes from an older release. $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION $progname: and run autoconf again. _LT_EOF else cat >&2 <<_LT_EOF $progname: Version mismatch error. This is $PACKAGE $VERSION, but the $progname: definition of this LT_INIT comes from $PACKAGE $macro_version. $progname: You should recreate aclocal.m4 with macros from $PACKAGE $VERSION $progname: and run autoconf again. _LT_EOF fi else cat >&2 <<_LT_EOF $progname: Version mismatch error. This is $PACKAGE $VERSION, revision $package_revision, $progname: but the definition of this LT_INIT comes from revision $macro_revision. $progname: You should recreate aclocal.m4 with macros from revision $package_revision $progname: of $PACKAGE $VERSION and run autoconf again. _LT_EOF fi exit $EXIT_MISMATCH fi } # libtool_options_prep [ARG]... # ----------------------------- # Preparation for options parsed by libtool. libtool_options_prep () { $debug_mode # Option defaults: opt_config=false opt_dlopen= opt_dry_run=false opt_help=false opt_mode= opt_preserve_dup_deps=false opt_quiet=false nonopt= preserve_args= # Shorthand for --mode=foo, only valid as the first argument case $1 in clean|clea|cle|cl) shift; set dummy --mode clean ${1+"$@"}; shift ;; compile|compil|compi|comp|com|co|c) shift; set dummy --mode compile ${1+"$@"}; shift ;; execute|execut|execu|exec|exe|ex|e) shift; set dummy --mode execute ${1+"$@"}; shift ;; finish|finis|fini|fin|fi|f) shift; set dummy --mode finish ${1+"$@"}; shift ;; install|instal|insta|inst|ins|in|i) shift; set dummy --mode install ${1+"$@"}; shift ;; link|lin|li|l) shift; set dummy --mode link ${1+"$@"}; shift ;; uninstall|uninstal|uninsta|uninst|unins|unin|uni|un|u) shift; set dummy --mode uninstall ${1+"$@"}; shift ;; esac # Pass back the list of options. func_quote_for_eval ${1+"$@"} libtool_options_prep_result=$func_quote_for_eval_result } func_add_hook func_options_prep libtool_options_prep # libtool_parse_options [ARG]... # --------------------------------- # Provide handling for libtool specific options. libtool_parse_options () { $debug_cmd # Perform our own loop to consume as many options as possible in # each iteration. while test $# -gt 0; do _G_opt=$1 shift case $_G_opt in --dry-run|--dryrun|-n) opt_dry_run=: ;; --config) func_config ;; --dlopen|-dlopen) opt_dlopen="${opt_dlopen+$opt_dlopen }$1" shift ;; --preserve-dup-deps) opt_preserve_dup_deps=: ;; --features) func_features ;; --finish) set dummy --mode finish ${1+"$@"}; shift ;; --help) opt_help=: ;; --help-all) opt_help=': help-all' ;; --mode) test $# = 0 && func_missing_arg $_G_opt && break opt_mode=$1 case $1 in # Valid mode arguments: clean|compile|execute|finish|install|link|relink|uninstall) ;; # Catch anything else as an error *) func_error "invalid argument for $_G_opt" exit_cmd=exit break ;; esac shift ;; --no-silent|--no-quiet) opt_quiet=false func_append preserve_args " $_G_opt" ;; --no-warnings|--no-warning|--no-warn) opt_warning=false func_append preserve_args " $_G_opt" ;; --no-verbose) opt_verbose=false func_append preserve_args " $_G_opt" ;; --silent|--quiet) opt_quiet=: opt_verbose=false func_append preserve_args " $_G_opt" ;; --tag) test $# = 0 && func_missing_arg $_G_opt && break opt_tag=$1 func_append preserve_args " $_G_opt $1" func_enable_tag "$1" shift ;; --verbose|-v) opt_quiet=false opt_verbose=: func_append preserve_args " $_G_opt" ;; # An option not handled by this hook function: *) set dummy "$_G_opt" ${1+"$@"}; shift; break ;; esac done # save modified positional parameters for caller func_quote_for_eval ${1+"$@"} libtool_parse_options_result=$func_quote_for_eval_result } func_add_hook func_parse_options libtool_parse_options # libtool_validate_options [ARG]... # --------------------------------- # Perform any sanity checks on option settings and/or unconsumed # arguments. libtool_validate_options () { # save first non-option argument if test 0 -lt $#; then nonopt=$1 shift fi # preserve --debug test : = "$debug_cmd" || func_append preserve_args " --debug" case $host in # Solaris2 added to fix http://debbugs.gnu.org/cgi/bugreport.cgi?bug=16452 # see also: http://gcc.gnu.org/bugzilla/show_bug.cgi?id=59788 *cygwin* | *mingw* | *pw32* | *cegcc* | *solaris2* | *os2*) # don't eliminate duplications in $postdeps and $predeps opt_duplicate_compiler_generated_deps=: ;; *) opt_duplicate_compiler_generated_deps=$opt_preserve_dup_deps ;; esac $opt_help || { # Sanity checks first: func_check_version_match test yes != "$build_libtool_libs" \ && test yes != "$build_old_libs" \ && func_fatal_configuration "not configured to build any kind of library" # Darwin sucks eval std_shrext=\"$shrext_cmds\" # Only execute mode is allowed to have -dlopen flags. if test -n "$opt_dlopen" && test execute != "$opt_mode"; then func_error "unrecognized option '-dlopen'" $ECHO "$help" 1>&2 exit $EXIT_FAILURE fi # Change the help message to a mode-specific one. generic_help=$help help="Try '$progname --help --mode=$opt_mode' for more information." } # Pass back the unparsed argument list func_quote_for_eval ${1+"$@"} libtool_validate_options_result=$func_quote_for_eval_result } func_add_hook func_validate_options libtool_validate_options # Process options as early as possible so that --help and --version # can return quickly. func_options ${1+"$@"} eval set dummy "$func_options_result"; shift ## ----------- ## ## Main. ## ## ----------- ## magic='%%%MAGIC variable%%%' magic_exe='%%%MAGIC EXE variable%%%' # Global variables. extracted_archives= extracted_serial=0 # If this variable is set in any of the actions, the command in it # will be execed at the end. This prevents here-documents from being # left over by shells. exec_cmd= # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $1 _LTECHO_EOF' } # func_generated_by_libtool # True iff stdin has been generated by Libtool. This function is only # a basic sanity check; it will hardly flush out determined imposters. func_generated_by_libtool_p () { $GREP "^# Generated by .*$PACKAGE" > /dev/null 2>&1 } # func_lalib_p file # True iff FILE is a libtool '.la' library or '.lo' object file. # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_lalib_p () { test -f "$1" && $SED -e 4q "$1" 2>/dev/null | func_generated_by_libtool_p } # func_lalib_unsafe_p file # True iff FILE is a libtool '.la' library or '.lo' object file. # This function implements the same check as func_lalib_p without # resorting to external programs. To this end, it redirects stdin and # closes it afterwards, without saving the original file descriptor. # As a safety measure, use it only where a negative result would be # fatal anyway. Works if 'file' does not exist. func_lalib_unsafe_p () { lalib_p=no if test -f "$1" && test -r "$1" && exec 5<&0 <"$1"; then for lalib_p_l in 1 2 3 4 do read lalib_p_line case $lalib_p_line in \#\ Generated\ by\ *$PACKAGE* ) lalib_p=yes; break;; esac done exec 0<&5 5<&- fi test yes = "$lalib_p" } # func_ltwrapper_script_p file # True iff FILE is a libtool wrapper script # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_ltwrapper_script_p () { test -f "$1" && $lt_truncate_bin < "$1" 2>/dev/null | func_generated_by_libtool_p } # func_ltwrapper_executable_p file # True iff FILE is a libtool wrapper executable # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_ltwrapper_executable_p () { func_ltwrapper_exec_suffix= case $1 in *.exe) ;; *) func_ltwrapper_exec_suffix=.exe ;; esac $GREP "$magic_exe" "$1$func_ltwrapper_exec_suffix" >/dev/null 2>&1 } # func_ltwrapper_scriptname file # Assumes file is an ltwrapper_executable # uses $file to determine the appropriate filename for a # temporary ltwrapper_script. func_ltwrapper_scriptname () { func_dirname_and_basename "$1" "" "." func_stripname '' '.exe' "$func_basename_result" func_ltwrapper_scriptname_result=$func_dirname_result/$objdir/${func_stripname_result}_ltshwrapper } # func_ltwrapper_p file # True iff FILE is a libtool wrapper script or wrapper executable # This function is only a basic sanity check; it will hardly flush out # determined imposters. func_ltwrapper_p () { func_ltwrapper_script_p "$1" || func_ltwrapper_executable_p "$1" } # func_execute_cmds commands fail_cmd # Execute tilde-delimited COMMANDS. # If FAIL_CMD is given, eval that upon failure. # FAIL_CMD may read-access the current command in variable CMD! func_execute_cmds () { $debug_cmd save_ifs=$IFS; IFS='~' for cmd in $1; do IFS=$sp$nl eval cmd=\"$cmd\" IFS=$save_ifs func_show_eval "$cmd" "${2-:}" done IFS=$save_ifs } # func_source file # Source FILE, adding directory component if necessary. # Note that it is not necessary on cygwin/mingw to append a dot to # FILE even if both FILE and FILE.exe exist: automatic-append-.exe # behavior happens only for exec(3), not for open(2)! Also, sourcing # 'FILE.' does not work on cygwin managed mounts. func_source () { $debug_cmd case $1 in */* | *\\*) . "$1" ;; *) . "./$1" ;; esac } # func_resolve_sysroot PATH # Replace a leading = in PATH with a sysroot. Store the result into # func_resolve_sysroot_result func_resolve_sysroot () { func_resolve_sysroot_result=$1 case $func_resolve_sysroot_result in =*) func_stripname '=' '' "$func_resolve_sysroot_result" func_resolve_sysroot_result=$lt_sysroot$func_stripname_result ;; esac } # func_replace_sysroot PATH # If PATH begins with the sysroot, replace it with = and # store the result into func_replace_sysroot_result. func_replace_sysroot () { case $lt_sysroot:$1 in ?*:"$lt_sysroot"*) func_stripname "$lt_sysroot" '' "$1" func_replace_sysroot_result='='$func_stripname_result ;; *) # Including no sysroot. func_replace_sysroot_result=$1 ;; esac } # func_infer_tag arg # Infer tagged configuration to use if any are available and # if one wasn't chosen via the "--tag" command line option. # Only attempt this if the compiler in the base compile # command doesn't match the default compiler. # arg is usually of the form 'gcc ...' func_infer_tag () { $debug_cmd if test -n "$available_tags" && test -z "$tagname"; then CC_quoted= for arg in $CC; do func_append_quoted CC_quoted "$arg" done CC_expanded=`func_echo_all $CC` CC_quoted_expanded=`func_echo_all $CC_quoted` case $@ in # Blanks in the command may have been stripped by the calling shell, # but not from the CC environment variable when configure was run. " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) ;; # Blanks at the start of $base_compile will cause this to fail # if we don't check for them as well. *) for z in $available_tags; do if $GREP "^# ### BEGIN LIBTOOL TAG CONFIG: $z$" < "$progpath" > /dev/null; then # Evaluate the configuration. eval "`$SED -n -e '/^# ### BEGIN LIBTOOL TAG CONFIG: '$z'$/,/^# ### END LIBTOOL TAG CONFIG: '$z'$/p' < $progpath`" CC_quoted= for arg in $CC; do # Double-quote args containing other shell metacharacters. func_append_quoted CC_quoted "$arg" done CC_expanded=`func_echo_all $CC` CC_quoted_expanded=`func_echo_all $CC_quoted` case "$@ " in " $CC "* | "$CC "* | " $CC_expanded "* | "$CC_expanded "* | \ " $CC_quoted"* | "$CC_quoted "* | " $CC_quoted_expanded "* | "$CC_quoted_expanded "*) # The compiler in the base compile command matches # the one in the tagged configuration. # Assume this is the tagged configuration we want. tagname=$z break ;; esac fi done # If $tagname still isn't set, then no tagged configuration # was found and let the user know that the "--tag" command # line option must be used. if test -z "$tagname"; then func_echo "unable to infer tagged configuration" func_fatal_error "specify a tag with '--tag'" # else # func_verbose "using $tagname tagged configuration" fi ;; esac fi } # func_write_libtool_object output_name pic_name nonpic_name # Create a libtool object file (analogous to a ".la" file), # but don't create it if we're doing a dry run. func_write_libtool_object () { write_libobj=$1 if test yes = "$build_libtool_libs"; then write_lobj=\'$2\' else write_lobj=none fi if test yes = "$build_old_libs"; then write_oldobj=\'$3\' else write_oldobj=none fi $opt_dry_run || { cat >${write_libobj}T </dev/null` if test "$?" -eq 0 && test -n "$func_convert_core_file_wine_to_w32_tmp"; then func_convert_core_file_wine_to_w32_result=`$ECHO "$func_convert_core_file_wine_to_w32_tmp" | $SED -e "$sed_naive_backslashify"` else func_convert_core_file_wine_to_w32_result= fi fi } # end: func_convert_core_file_wine_to_w32 # func_convert_core_path_wine_to_w32 ARG # Helper function used by path conversion functions when $build is *nix, and # $host is mingw, cygwin, or some other w32 environment. Relies on a correctly # configured wine environment available, with the winepath program in $build's # $PATH. Assumes ARG has no leading or trailing path separator characters. # # ARG is path to be converted from $build format to win32. # Result is available in $func_convert_core_path_wine_to_w32_result. # Unconvertible file (directory) names in ARG are skipped; if no directory names # are convertible, then the result may be empty. func_convert_core_path_wine_to_w32 () { $debug_cmd # unfortunately, winepath doesn't convert paths, only file names func_convert_core_path_wine_to_w32_result= if test -n "$1"; then oldIFS=$IFS IFS=: for func_convert_core_path_wine_to_w32_f in $1; do IFS=$oldIFS func_convert_core_file_wine_to_w32 "$func_convert_core_path_wine_to_w32_f" if test -n "$func_convert_core_file_wine_to_w32_result"; then if test -z "$func_convert_core_path_wine_to_w32_result"; then func_convert_core_path_wine_to_w32_result=$func_convert_core_file_wine_to_w32_result else func_append func_convert_core_path_wine_to_w32_result ";$func_convert_core_file_wine_to_w32_result" fi fi done IFS=$oldIFS fi } # end: func_convert_core_path_wine_to_w32 # func_cygpath ARGS... # Wrapper around calling the cygpath program via LT_CYGPATH. This is used when # when (1) $build is *nix and Cygwin is hosted via a wine environment; or (2) # $build is MSYS and $host is Cygwin, or (3) $build is Cygwin. In case (1) or # (2), returns the Cygwin file name or path in func_cygpath_result (input # file name or path is assumed to be in w32 format, as previously converted # from $build's *nix or MSYS format). In case (3), returns the w32 file name # or path in func_cygpath_result (input file name or path is assumed to be in # Cygwin format). Returns an empty string on error. # # ARGS are passed to cygpath, with the last one being the file name or path to # be converted. # # Specify the absolute *nix (or w32) name to cygpath in the LT_CYGPATH # environment variable; do not put it in $PATH. func_cygpath () { $debug_cmd if test -n "$LT_CYGPATH" && test -f "$LT_CYGPATH"; then func_cygpath_result=`$LT_CYGPATH "$@" 2>/dev/null` if test "$?" -ne 0; then # on failure, ensure result is empty func_cygpath_result= fi else func_cygpath_result= func_error "LT_CYGPATH is empty or specifies non-existent file: '$LT_CYGPATH'" fi } #end: func_cygpath # func_convert_core_msys_to_w32 ARG # Convert file name or path ARG from MSYS format to w32 format. Return # result in func_convert_core_msys_to_w32_result. func_convert_core_msys_to_w32 () { $debug_cmd # awkward: cmd appends spaces to result func_convert_core_msys_to_w32_result=`( cmd //c echo "$1" ) 2>/dev/null | $SED -e 's/[ ]*$//' -e "$sed_naive_backslashify"` } #end: func_convert_core_msys_to_w32 # func_convert_file_check ARG1 ARG2 # Verify that ARG1 (a file name in $build format) was converted to $host # format in ARG2. Otherwise, emit an error message, but continue (resetting # func_to_host_file_result to ARG1). func_convert_file_check () { $debug_cmd if test -z "$2" && test -n "$1"; then func_error "Could not determine host file name corresponding to" func_error " '$1'" func_error "Continuing, but uninstalled executables may not work." # Fallback: func_to_host_file_result=$1 fi } # end func_convert_file_check # func_convert_path_check FROM_PATHSEP TO_PATHSEP FROM_PATH TO_PATH # Verify that FROM_PATH (a path in $build format) was converted to $host # format in TO_PATH. Otherwise, emit an error message, but continue, resetting # func_to_host_file_result to a simplistic fallback value (see below). func_convert_path_check () { $debug_cmd if test -z "$4" && test -n "$3"; then func_error "Could not determine the host path corresponding to" func_error " '$3'" func_error "Continuing, but uninstalled executables may not work." # Fallback. This is a deliberately simplistic "conversion" and # should not be "improved". See libtool.info. if test "x$1" != "x$2"; then lt_replace_pathsep_chars="s|$1|$2|g" func_to_host_path_result=`echo "$3" | $SED -e "$lt_replace_pathsep_chars"` else func_to_host_path_result=$3 fi fi } # end func_convert_path_check # func_convert_path_front_back_pathsep FRONTPAT BACKPAT REPL ORIG # Modifies func_to_host_path_result by prepending REPL if ORIG matches FRONTPAT # and appending REPL if ORIG matches BACKPAT. func_convert_path_front_back_pathsep () { $debug_cmd case $4 in $1 ) func_to_host_path_result=$3$func_to_host_path_result ;; esac case $4 in $2 ) func_append func_to_host_path_result "$3" ;; esac } # end func_convert_path_front_back_pathsep ################################################## # $build to $host FILE NAME CONVERSION FUNCTIONS # ################################################## # invoked via '$to_host_file_cmd ARG' # # In each case, ARG is the path to be converted from $build to $host format. # Result will be available in $func_to_host_file_result. # func_to_host_file ARG # Converts the file name ARG from $build format to $host format. Return result # in func_to_host_file_result. func_to_host_file () { $debug_cmd $to_host_file_cmd "$1" } # end func_to_host_file # func_to_tool_file ARG LAZY # converts the file name ARG from $build format to toolchain format. Return # result in func_to_tool_file_result. If the conversion in use is listed # in (the comma separated) LAZY, no conversion takes place. func_to_tool_file () { $debug_cmd case ,$2, in *,"$to_tool_file_cmd",*) func_to_tool_file_result=$1 ;; *) $to_tool_file_cmd "$1" func_to_tool_file_result=$func_to_host_file_result ;; esac } # end func_to_tool_file # func_convert_file_noop ARG # Copy ARG to func_to_host_file_result. func_convert_file_noop () { func_to_host_file_result=$1 } # end func_convert_file_noop # func_convert_file_msys_to_w32 ARG # Convert file name ARG from (mingw) MSYS to (mingw) w32 format; automatic # conversion to w32 is not available inside the cwrapper. Returns result in # func_to_host_file_result. func_convert_file_msys_to_w32 () { $debug_cmd func_to_host_file_result=$1 if test -n "$1"; then func_convert_core_msys_to_w32 "$1" func_to_host_file_result=$func_convert_core_msys_to_w32_result fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_msys_to_w32 # func_convert_file_cygwin_to_w32 ARG # Convert file name ARG from Cygwin to w32 format. Returns result in # func_to_host_file_result. func_convert_file_cygwin_to_w32 () { $debug_cmd func_to_host_file_result=$1 if test -n "$1"; then # because $build is cygwin, we call "the" cygpath in $PATH; no need to use # LT_CYGPATH in this case. func_to_host_file_result=`cygpath -m "$1"` fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_cygwin_to_w32 # func_convert_file_nix_to_w32 ARG # Convert file name ARG from *nix to w32 format. Requires a wine environment # and a working winepath. Returns result in func_to_host_file_result. func_convert_file_nix_to_w32 () { $debug_cmd func_to_host_file_result=$1 if test -n "$1"; then func_convert_core_file_wine_to_w32 "$1" func_to_host_file_result=$func_convert_core_file_wine_to_w32_result fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_nix_to_w32 # func_convert_file_msys_to_cygwin ARG # Convert file name ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. # Returns result in func_to_host_file_result. func_convert_file_msys_to_cygwin () { $debug_cmd func_to_host_file_result=$1 if test -n "$1"; then func_convert_core_msys_to_w32 "$1" func_cygpath -u "$func_convert_core_msys_to_w32_result" func_to_host_file_result=$func_cygpath_result fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_msys_to_cygwin # func_convert_file_nix_to_cygwin ARG # Convert file name ARG from *nix to Cygwin format. Requires Cygwin installed # in a wine environment, working winepath, and LT_CYGPATH set. Returns result # in func_to_host_file_result. func_convert_file_nix_to_cygwin () { $debug_cmd func_to_host_file_result=$1 if test -n "$1"; then # convert from *nix to w32, then use cygpath to convert from w32 to cygwin. func_convert_core_file_wine_to_w32 "$1" func_cygpath -u "$func_convert_core_file_wine_to_w32_result" func_to_host_file_result=$func_cygpath_result fi func_convert_file_check "$1" "$func_to_host_file_result" } # end func_convert_file_nix_to_cygwin ############################################# # $build to $host PATH CONVERSION FUNCTIONS # ############################################# # invoked via '$to_host_path_cmd ARG' # # In each case, ARG is the path to be converted from $build to $host format. # The result will be available in $func_to_host_path_result. # # Path separators are also converted from $build format to $host format. If # ARG begins or ends with a path separator character, it is preserved (but # converted to $host format) on output. # # All path conversion functions are named using the following convention: # file name conversion function : func_convert_file_X_to_Y () # path conversion function : func_convert_path_X_to_Y () # where, for any given $build/$host combination the 'X_to_Y' value is the # same. If conversion functions are added for new $build/$host combinations, # the two new functions must follow this pattern, or func_init_to_host_path_cmd # will break. # func_init_to_host_path_cmd # Ensures that function "pointer" variable $to_host_path_cmd is set to the # appropriate value, based on the value of $to_host_file_cmd. to_host_path_cmd= func_init_to_host_path_cmd () { $debug_cmd if test -z "$to_host_path_cmd"; then func_stripname 'func_convert_file_' '' "$to_host_file_cmd" to_host_path_cmd=func_convert_path_$func_stripname_result fi } # func_to_host_path ARG # Converts the path ARG from $build format to $host format. Return result # in func_to_host_path_result. func_to_host_path () { $debug_cmd func_init_to_host_path_cmd $to_host_path_cmd "$1" } # end func_to_host_path # func_convert_path_noop ARG # Copy ARG to func_to_host_path_result. func_convert_path_noop () { func_to_host_path_result=$1 } # end func_convert_path_noop # func_convert_path_msys_to_w32 ARG # Convert path ARG from (mingw) MSYS to (mingw) w32 format; automatic # conversion to w32 is not available inside the cwrapper. Returns result in # func_to_host_path_result. func_convert_path_msys_to_w32 () { $debug_cmd func_to_host_path_result=$1 if test -n "$1"; then # Remove leading and trailing path separator characters from ARG. MSYS # behavior is inconsistent here; cygpath turns them into '.;' and ';.'; # and winepath ignores them completely. func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" func_to_host_path_result=$func_convert_core_msys_to_w32_result func_convert_path_check : ";" \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" fi } # end func_convert_path_msys_to_w32 # func_convert_path_cygwin_to_w32 ARG # Convert path ARG from Cygwin to w32 format. Returns result in # func_to_host_file_result. func_convert_path_cygwin_to_w32 () { $debug_cmd func_to_host_path_result=$1 if test -n "$1"; then # See func_convert_path_msys_to_w32: func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_to_host_path_result=`cygpath -m -p "$func_to_host_path_tmp1"` func_convert_path_check : ";" \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" fi } # end func_convert_path_cygwin_to_w32 # func_convert_path_nix_to_w32 ARG # Convert path ARG from *nix to w32 format. Requires a wine environment and # a working winepath. Returns result in func_to_host_file_result. func_convert_path_nix_to_w32 () { $debug_cmd func_to_host_path_result=$1 if test -n "$1"; then # See func_convert_path_msys_to_w32: func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" func_to_host_path_result=$func_convert_core_path_wine_to_w32_result func_convert_path_check : ";" \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" ";" "$1" fi } # end func_convert_path_nix_to_w32 # func_convert_path_msys_to_cygwin ARG # Convert path ARG from MSYS to Cygwin format. Requires LT_CYGPATH set. # Returns result in func_to_host_file_result. func_convert_path_msys_to_cygwin () { $debug_cmd func_to_host_path_result=$1 if test -n "$1"; then # See func_convert_path_msys_to_w32: func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_msys_to_w32 "$func_to_host_path_tmp1" func_cygpath -u -p "$func_convert_core_msys_to_w32_result" func_to_host_path_result=$func_cygpath_result func_convert_path_check : : \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" : "$1" fi } # end func_convert_path_msys_to_cygwin # func_convert_path_nix_to_cygwin ARG # Convert path ARG from *nix to Cygwin format. Requires Cygwin installed in a # a wine environment, working winepath, and LT_CYGPATH set. Returns result in # func_to_host_file_result. func_convert_path_nix_to_cygwin () { $debug_cmd func_to_host_path_result=$1 if test -n "$1"; then # Remove leading and trailing path separator characters from # ARG. msys behavior is inconsistent here, cygpath turns them # into '.;' and ';.', and winepath ignores them completely. func_stripname : : "$1" func_to_host_path_tmp1=$func_stripname_result func_convert_core_path_wine_to_w32 "$func_to_host_path_tmp1" func_cygpath -u -p "$func_convert_core_path_wine_to_w32_result" func_to_host_path_result=$func_cygpath_result func_convert_path_check : : \ "$func_to_host_path_tmp1" "$func_to_host_path_result" func_convert_path_front_back_pathsep ":*" "*:" : "$1" fi } # end func_convert_path_nix_to_cygwin # func_dll_def_p FILE # True iff FILE is a Windows DLL '.def' file. # Keep in sync with _LT_DLL_DEF_P in libtool.m4 func_dll_def_p () { $debug_cmd func_dll_def_p_tmp=`$SED -n \ -e 's/^[ ]*//' \ -e '/^\(;.*\)*$/d' \ -e 's/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p' \ -e q \ "$1"` test DEF = "$func_dll_def_p_tmp" } # func_mode_compile arg... func_mode_compile () { $debug_cmd # Get the compilation command and the source file. base_compile= srcfile=$nonopt # always keep a non-empty value in "srcfile" suppress_opt=yes suppress_output= arg_mode=normal libobj= later= pie_flag= for arg do case $arg_mode in arg ) # do not "continue". Instead, add this to base_compile lastarg=$arg arg_mode=normal ;; target ) libobj=$arg arg_mode=normal continue ;; normal ) # Accept any command-line options. case $arg in -o) test -n "$libobj" && \ func_fatal_error "you cannot specify '-o' more than once" arg_mode=target continue ;; -pie | -fpie | -fPIE) func_append pie_flag " $arg" continue ;; -shared | -static | -prefer-pic | -prefer-non-pic) func_append later " $arg" continue ;; -no-suppress) suppress_opt=no continue ;; -Xcompiler) arg_mode=arg # the next one goes into the "base_compile" arg list continue # The current "srcfile" will either be retained or ;; # replaced later. I would guess that would be a bug. -Wc,*) func_stripname '-Wc,' '' "$arg" args=$func_stripname_result lastarg= save_ifs=$IFS; IFS=, for arg in $args; do IFS=$save_ifs func_append_quoted lastarg "$arg" done IFS=$save_ifs func_stripname ' ' '' "$lastarg" lastarg=$func_stripname_result # Add the arguments to base_compile. func_append base_compile " $lastarg" continue ;; *) # Accept the current argument as the source file. # The previous "srcfile" becomes the current argument. # lastarg=$srcfile srcfile=$arg ;; esac # case $arg ;; esac # case $arg_mode # Aesthetically quote the previous argument. func_append_quoted base_compile "$lastarg" done # for arg case $arg_mode in arg) func_fatal_error "you must specify an argument for -Xcompile" ;; target) func_fatal_error "you must specify a target with '-o'" ;; *) # Get the name of the library object. test -z "$libobj" && { func_basename "$srcfile" libobj=$func_basename_result } ;; esac # Recognize several different file suffixes. # If the user specifies -o file.o, it is replaced with file.lo case $libobj in *.[cCFSifmso] | \ *.ada | *.adb | *.ads | *.asm | \ *.c++ | *.cc | *.ii | *.class | *.cpp | *.cxx | \ *.[fF][09]? | *.for | *.java | *.go | *.obj | *.sx | *.cu | *.cup) func_xform "$libobj" libobj=$func_xform_result ;; esac case $libobj in *.lo) func_lo2o "$libobj"; obj=$func_lo2o_result ;; *) func_fatal_error "cannot determine name of library object from '$libobj'" ;; esac func_infer_tag $base_compile for arg in $later; do case $arg in -shared) test yes = "$build_libtool_libs" \ || func_fatal_configuration "cannot build a shared library" build_old_libs=no continue ;; -static) build_libtool_libs=no build_old_libs=yes continue ;; -prefer-pic) pic_mode=yes continue ;; -prefer-non-pic) pic_mode=no continue ;; esac done func_quote_for_eval "$libobj" test "X$libobj" != "X$func_quote_for_eval_result" \ && $ECHO "X$libobj" | $GREP '[]~#^*{};<>?"'"'"' &()|`$[]' \ && func_warning "libobj name '$libobj' may not contain shell special characters." func_dirname_and_basename "$obj" "/" "" objname=$func_basename_result xdir=$func_dirname_result lobj=$xdir$objdir/$objname test -z "$base_compile" && \ func_fatal_help "you must specify a compilation command" # Delete any leftover library objects. if test yes = "$build_old_libs"; then removelist="$obj $lobj $libobj ${libobj}T" else removelist="$lobj $libobj ${libobj}T" fi # On Cygwin there's no "real" PIC flag so we must build both object types case $host_os in cygwin* | mingw* | pw32* | os2* | cegcc*) pic_mode=default ;; esac if test no = "$pic_mode" && test pass_all != "$deplibs_check_method"; then # non-PIC code in shared libraries is not supported pic_mode=default fi # Calculate the filename of the output object if compiler does # not support -o with -c if test no = "$compiler_c_o"; then output_obj=`$ECHO "$srcfile" | $SED 's%^.*/%%; s%\.[^.]*$%%'`.$objext lockfile=$output_obj.lock else output_obj= need_locks=no lockfile= fi # Lock this critical section if it is needed # We use this script file to make the link, it avoids creating a new file if test yes = "$need_locks"; then until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do func_echo "Waiting for $lockfile to be removed" sleep 2 done elif test warn = "$need_locks"; then if test -f "$lockfile"; then $ECHO "\ *** ERROR, $lockfile exists and contains: `cat $lockfile 2>/dev/null` This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support '-c' and '-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $opt_dry_run || $RM $removelist exit $EXIT_FAILURE fi func_append removelist " $output_obj" $ECHO "$srcfile" > "$lockfile" fi $opt_dry_run || $RM $removelist func_append removelist " $lockfile" trap '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' 1 2 15 func_to_tool_file "$srcfile" func_convert_file_msys_to_w32 srcfile=$func_to_tool_file_result func_quote_for_eval "$srcfile" qsrcfile=$func_quote_for_eval_result # Only build a PIC object if we are building libtool libraries. if test yes = "$build_libtool_libs"; then # Without this assignment, base_compile gets emptied. fbsd_hideous_sh_bug=$base_compile if test no != "$pic_mode"; then command="$base_compile $qsrcfile $pic_flag" else # Don't build PIC code command="$base_compile $qsrcfile" fi func_mkdir_p "$xdir$objdir" if test -z "$output_obj"; then # Place PIC objects in $objdir func_append command " -o $lobj" fi func_show_eval_locale "$command" \ 'test -n "$output_obj" && $RM $removelist; exit $EXIT_FAILURE' if test warn = "$need_locks" && test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then $ECHO "\ *** ERROR, $lockfile contains: `cat $lockfile 2>/dev/null` but it should contain: $srcfile This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support '-c' and '-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $opt_dry_run || $RM $removelist exit $EXIT_FAILURE fi # Just move the object if needed, then go on to compile the next one if test -n "$output_obj" && test "X$output_obj" != "X$lobj"; then func_show_eval '$MV "$output_obj" "$lobj"' \ 'error=$?; $opt_dry_run || $RM $removelist; exit $error' fi # Allow error messages only from the first compilation. if test yes = "$suppress_opt"; then suppress_output=' >/dev/null 2>&1' fi fi # Only build a position-dependent object if we build old libraries. if test yes = "$build_old_libs"; then if test yes != "$pic_mode"; then # Don't build PIC code command="$base_compile $qsrcfile$pie_flag" else command="$base_compile $qsrcfile $pic_flag" fi if test yes = "$compiler_c_o"; then func_append command " -o $obj" fi # Suppress compiler output if we already did a PIC compilation. func_append command "$suppress_output" func_show_eval_locale "$command" \ '$opt_dry_run || $RM $removelist; exit $EXIT_FAILURE' if test warn = "$need_locks" && test "X`cat $lockfile 2>/dev/null`" != "X$srcfile"; then $ECHO "\ *** ERROR, $lockfile contains: `cat $lockfile 2>/dev/null` but it should contain: $srcfile This indicates that another process is trying to use the same temporary object file, and libtool could not work around it because your compiler does not support '-c' and '-o' together. If you repeat this compilation, it may succeed, by chance, but you had better avoid parallel builds (make -j) in this platform, or get a better compiler." $opt_dry_run || $RM $removelist exit $EXIT_FAILURE fi # Just move the object if needed if test -n "$output_obj" && test "X$output_obj" != "X$obj"; then func_show_eval '$MV "$output_obj" "$obj"' \ 'error=$?; $opt_dry_run || $RM $removelist; exit $error' fi fi $opt_dry_run || { func_write_libtool_object "$libobj" "$objdir/$objname" "$objname" # Unlock the critical section if it was locked if test no != "$need_locks"; then removelist=$lockfile $RM "$lockfile" fi } exit $EXIT_SUCCESS } $opt_help || { test compile = "$opt_mode" && func_mode_compile ${1+"$@"} } func_mode_help () { # We need to display help for each of the modes. case $opt_mode in "") # Generic help is extracted from the usage comments # at the start of this file. func_help ;; clean) $ECHO \ "Usage: $progname [OPTION]... --mode=clean RM [RM-OPTION]... FILE... Remove files from the build directory. RM is the name of the program to use to delete files associated with each FILE (typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed to RM. If FILE is a libtool library, object or program, all the files associated with it are deleted. Otherwise, only FILE itself is deleted using RM." ;; compile) $ECHO \ "Usage: $progname [OPTION]... --mode=compile COMPILE-COMMAND... SOURCEFILE Compile a source file into a libtool library object. This mode accepts the following additional options: -o OUTPUT-FILE set the output file name to OUTPUT-FILE -no-suppress do not suppress compiler output for multiple passes -prefer-pic try to build PIC objects only -prefer-non-pic try to build non-PIC objects only -shared do not build a '.o' file suitable for static linking -static only build a '.o' file suitable for static linking -Wc,FLAG pass FLAG directly to the compiler COMPILE-COMMAND is a command to be used in creating a 'standard' object file from the given SOURCEFILE. The output file name is determined by removing the directory component from SOURCEFILE, then substituting the C source code suffix '.c' with the library object suffix, '.lo'." ;; execute) $ECHO \ "Usage: $progname [OPTION]... --mode=execute COMMAND [ARGS]... Automatically set library path, then run a program. This mode accepts the following additional options: -dlopen FILE add the directory containing FILE to the library path This mode sets the library path environment variable according to '-dlopen' flags. If any of the ARGS are libtool executable wrappers, then they are translated into their corresponding uninstalled binary, and any of their required library directories are added to the library path. Then, COMMAND is executed, with ARGS as arguments." ;; finish) $ECHO \ "Usage: $progname [OPTION]... --mode=finish [LIBDIR]... Complete the installation of libtool libraries. Each LIBDIR is a directory that contains libtool libraries. The commands that this mode executes may require superuser privileges. Use the '--dry-run' option if you just want to see what would be executed." ;; install) $ECHO \ "Usage: $progname [OPTION]... --mode=install INSTALL-COMMAND... Install executables or libraries. INSTALL-COMMAND is the installation command. The first component should be either the 'install' or 'cp' program. The following components of INSTALL-COMMAND are treated specially: -inst-prefix-dir PREFIX-DIR Use PREFIX-DIR as a staging area for installation The rest of the components are interpreted as arguments to that command (only BSD-compatible install options are recognized)." ;; link) $ECHO \ "Usage: $progname [OPTION]... --mode=link LINK-COMMAND... Link object files or libraries together to form another library, or to create an executable program. LINK-COMMAND is a command using the C compiler that you would use to create a program from several object files. The following components of LINK-COMMAND are treated specially: -all-static do not do any dynamic linking at all -avoid-version do not add a version suffix if possible -bindir BINDIR specify path to binaries directory (for systems where libraries must be found in the PATH setting at runtime) -dlopen FILE '-dlpreopen' FILE if it cannot be dlopened at runtime -dlpreopen FILE link in FILE and add its symbols to lt_preloaded_symbols -export-dynamic allow symbols from OUTPUT-FILE to be resolved with dlsym(3) -export-symbols SYMFILE try to export only the symbols listed in SYMFILE -export-symbols-regex REGEX try to export only the symbols matching REGEX -LLIBDIR search LIBDIR for required installed libraries -lNAME OUTPUT-FILE requires the installed library libNAME -module build a library that can dlopened -no-fast-install disable the fast-install mode -no-install link a not-installable executable -no-undefined declare that a library does not refer to external symbols -o OUTPUT-FILE create OUTPUT-FILE from the specified objects -objectlist FILE use a list of object files found in FILE to specify objects -os2dllname NAME force a short DLL name on OS/2 (no effect on other OSes) -precious-files-regex REGEX don't remove output files matching REGEX -release RELEASE specify package release information -rpath LIBDIR the created library will eventually be installed in LIBDIR -R[ ]LIBDIR add LIBDIR to the runtime path of programs and libraries -shared only do dynamic linking of libtool libraries -shrext SUFFIX override the standard shared library file extension -static do not do any dynamic linking of uninstalled libtool libraries -static-libtool-libs do not do any dynamic linking of libtool libraries -version-info CURRENT[:REVISION[:AGE]] specify library version info [each variable defaults to 0] -weak LIBNAME declare that the target provides the LIBNAME interface -Wc,FLAG -Xcompiler FLAG pass linker-specific FLAG directly to the compiler -Wl,FLAG -Xlinker FLAG pass linker-specific FLAG directly to the linker -XCClinker FLAG pass link-specific FLAG to the compiler driver (CC) All other options (arguments beginning with '-') are ignored. Every other argument is treated as a filename. Files ending in '.la' are treated as uninstalled libtool libraries, other files are standard or library object files. If the OUTPUT-FILE ends in '.la', then a libtool library is created, only library objects ('.lo' files) may be specified, and '-rpath' is required, except when creating a convenience library. If OUTPUT-FILE ends in '.a' or '.lib', then a standard library is created using 'ar' and 'ranlib', or on Windows using 'lib'. If OUTPUT-FILE ends in '.lo' or '.$objext', then a reloadable object file is created, otherwise an executable program is created." ;; uninstall) $ECHO \ "Usage: $progname [OPTION]... --mode=uninstall RM [RM-OPTION]... FILE... Remove libraries from an installation directory. RM is the name of the program to use to delete files associated with each FILE (typically '/bin/rm'). RM-OPTIONS are options (such as '-f') to be passed to RM. If FILE is a libtool library, all the files associated with it are deleted. Otherwise, only FILE itself is deleted using RM." ;; *) func_fatal_help "invalid operation mode '$opt_mode'" ;; esac echo $ECHO "Try '$progname --help' for more information about other modes." } # Now that we've collected a possible --mode arg, show help if necessary if $opt_help; then if test : = "$opt_help"; then func_mode_help else { func_help noexit for opt_mode in compile link execute install finish uninstall clean; do func_mode_help done } | $SED -n '1p; 2,$s/^Usage:/ or: /p' { func_help noexit for opt_mode in compile link execute install finish uninstall clean; do echo func_mode_help done } | $SED '1d /^When reporting/,/^Report/{ H d } $x /information about other modes/d /more detailed .*MODE/d s/^Usage:.*--mode=\([^ ]*\) .*/Description of \1 mode:/' fi exit $? fi # func_mode_execute arg... func_mode_execute () { $debug_cmd # The first argument is the command name. cmd=$nonopt test -z "$cmd" && \ func_fatal_help "you must specify a COMMAND" # Handle -dlopen flags immediately. for file in $opt_dlopen; do test -f "$file" \ || func_fatal_help "'$file' is not a file" dir= case $file in *.la) func_resolve_sysroot "$file" file=$func_resolve_sysroot_result # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$file" \ || func_fatal_help "'$lib' is not a valid libtool archive" # Read the libtool library. dlname= library_names= func_source "$file" # Skip this library if it cannot be dlopened. if test -z "$dlname"; then # Warn if it was a shared library. test -n "$library_names" && \ func_warning "'$file' was not linked with '-export-dynamic'" continue fi func_dirname "$file" "" "." dir=$func_dirname_result if test -f "$dir/$objdir/$dlname"; then func_append dir "/$objdir" else if test ! -f "$dir/$dlname"; then func_fatal_error "cannot find '$dlname' in '$dir' or '$dir/$objdir'" fi fi ;; *.lo) # Just add the directory containing the .lo file. func_dirname "$file" "" "." dir=$func_dirname_result ;; *) func_warning "'-dlopen' is ignored for non-libtool libraries and objects" continue ;; esac # Get the absolute pathname. absdir=`cd "$dir" && pwd` test -n "$absdir" && dir=$absdir # Now add the directory to shlibpath_var. if eval "test -z \"\$$shlibpath_var\""; then eval "$shlibpath_var=\"\$dir\"" else eval "$shlibpath_var=\"\$dir:\$$shlibpath_var\"" fi done # This variable tells wrapper scripts just to set shlibpath_var # rather than running their programs. libtool_execute_magic=$magic # Check if any of the arguments is a wrapper script. args= for file do case $file in -* | *.la | *.lo ) ;; *) # Do a test to see if this is really a libtool program. if func_ltwrapper_script_p "$file"; then func_source "$file" # Transform arg to wrapped name. file=$progdir/$program elif func_ltwrapper_executable_p "$file"; then func_ltwrapper_scriptname "$file" func_source "$func_ltwrapper_scriptname_result" # Transform arg to wrapped name. file=$progdir/$program fi ;; esac # Quote arguments (to preserve shell metacharacters). func_append_quoted args "$file" done if $opt_dry_run; then # Display what would be done. if test -n "$shlibpath_var"; then eval "\$ECHO \"\$shlibpath_var=\$$shlibpath_var\"" echo "export $shlibpath_var" fi $ECHO "$cmd$args" exit $EXIT_SUCCESS else if test -n "$shlibpath_var"; then # Export the shlibpath_var. eval "export $shlibpath_var" fi # Restore saved environment variables for lt_var in LANG LANGUAGE LC_ALL LC_CTYPE LC_COLLATE LC_MESSAGES do eval "if test \"\${save_$lt_var+set}\" = set; then $lt_var=\$save_$lt_var; export $lt_var else $lt_unset $lt_var fi" done # Now prepare to actually exec the command. exec_cmd=\$cmd$args fi } test execute = "$opt_mode" && func_mode_execute ${1+"$@"} # func_mode_finish arg... func_mode_finish () { $debug_cmd libs= libdirs= admincmds= for opt in "$nonopt" ${1+"$@"} do if test -d "$opt"; then func_append libdirs " $opt" elif test -f "$opt"; then if func_lalib_unsafe_p "$opt"; then func_append libs " $opt" else func_warning "'$opt' is not a valid libtool archive" fi else func_fatal_error "invalid argument '$opt'" fi done if test -n "$libs"; then if test -n "$lt_sysroot"; then sysroot_regex=`$ECHO "$lt_sysroot" | $SED "$sed_make_literal_regex"` sysroot_cmd="s/\([ ']\)$sysroot_regex/\1/g;" else sysroot_cmd= fi # Remove sysroot references if $opt_dry_run; then for lib in $libs; do echo "removing references to $lt_sysroot and '=' prefixes from $lib" done else tmpdir=`func_mktempdir` for lib in $libs; do $SED -e "$sysroot_cmd s/\([ ']-[LR]\)=/\1/g; s/\([ ']\)=/\1/g" $lib \ > $tmpdir/tmp-la mv -f $tmpdir/tmp-la $lib done ${RM}r "$tmpdir" fi fi if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then for libdir in $libdirs; do if test -n "$finish_cmds"; then # Do each command in the finish commands. func_execute_cmds "$finish_cmds" 'admincmds="$admincmds '"$cmd"'"' fi if test -n "$finish_eval"; then # Do the single finish_eval. eval cmds=\"$finish_eval\" $opt_dry_run || eval "$cmds" || func_append admincmds " $cmds" fi done fi # Exit here if they wanted silent mode. $opt_quiet && exit $EXIT_SUCCESS if test -n "$finish_cmds$finish_eval" && test -n "$libdirs"; then echo "----------------------------------------------------------------------" echo "Libraries have been installed in:" for libdir in $libdirs; do $ECHO " $libdir" done echo echo "If you ever happen to want to link against installed libraries" echo "in a given directory, LIBDIR, you must either use libtool, and" echo "specify the full pathname of the library, or use the '-LLIBDIR'" echo "flag during linking and do at least one of the following:" if test -n "$shlibpath_var"; then echo " - add LIBDIR to the '$shlibpath_var' environment variable" echo " during execution" fi if test -n "$runpath_var"; then echo " - add LIBDIR to the '$runpath_var' environment variable" echo " during linking" fi if test -n "$hardcode_libdir_flag_spec"; then libdir=LIBDIR eval flag=\"$hardcode_libdir_flag_spec\" $ECHO " - use the '$flag' linker flag" fi if test -n "$admincmds"; then $ECHO " - have your system administrator run these commands:$admincmds" fi if test -f /etc/ld.so.conf; then echo " - have your system administrator add LIBDIR to '/etc/ld.so.conf'" fi echo echo "See any operating system documentation about shared libraries for" case $host in solaris2.[6789]|solaris2.1[0-9]) echo "more information, such as the ld(1), crle(1) and ld.so(8) manual" echo "pages." ;; *) echo "more information, such as the ld(1) and ld.so(8) manual pages." ;; esac echo "----------------------------------------------------------------------" fi exit $EXIT_SUCCESS } test finish = "$opt_mode" && func_mode_finish ${1+"$@"} # func_mode_install arg... func_mode_install () { $debug_cmd # There may be an optional sh(1) argument at the beginning of # install_prog (especially on Windows NT). if test "$SHELL" = "$nonopt" || test /bin/sh = "$nonopt" || # Allow the use of GNU shtool's install command. case $nonopt in *shtool*) :;; *) false;; esac then # Aesthetically quote it. func_quote_for_eval "$nonopt" install_prog="$func_quote_for_eval_result " arg=$1 shift else install_prog= arg=$nonopt fi # The real first argument should be the name of the installation program. # Aesthetically quote it. func_quote_for_eval "$arg" func_append install_prog "$func_quote_for_eval_result" install_shared_prog=$install_prog case " $install_prog " in *[\\\ /]cp\ *) install_cp=: ;; *) install_cp=false ;; esac # We need to accept at least all the BSD install flags. dest= files= opts= prev= install_type= isdir=false stripme= no_mode=: for arg do arg2= if test -n "$dest"; then func_append files " $dest" dest=$arg continue fi case $arg in -d) isdir=: ;; -f) if $install_cp; then :; else prev=$arg fi ;; -g | -m | -o) prev=$arg ;; -s) stripme=" -s" continue ;; -*) ;; *) # If the previous option needed an argument, then skip it. if test -n "$prev"; then if test X-m = "X$prev" && test -n "$install_override_mode"; then arg2=$install_override_mode no_mode=false fi prev= else dest=$arg continue fi ;; esac # Aesthetically quote the argument. func_quote_for_eval "$arg" func_append install_prog " $func_quote_for_eval_result" if test -n "$arg2"; then func_quote_for_eval "$arg2" fi func_append install_shared_prog " $func_quote_for_eval_result" done test -z "$install_prog" && \ func_fatal_help "you must specify an install program" test -n "$prev" && \ func_fatal_help "the '$prev' option requires an argument" if test -n "$install_override_mode" && $no_mode; then if $install_cp; then :; else func_quote_for_eval "$install_override_mode" func_append install_shared_prog " -m $func_quote_for_eval_result" fi fi if test -z "$files"; then if test -z "$dest"; then func_fatal_help "no file or destination specified" else func_fatal_help "you must specify a destination" fi fi # Strip any trailing slash from the destination. func_stripname '' '/' "$dest" dest=$func_stripname_result # Check to see that the destination is a directory. test -d "$dest" && isdir=: if $isdir; then destdir=$dest destname= else func_dirname_and_basename "$dest" "" "." destdir=$func_dirname_result destname=$func_basename_result # Not a directory, so check to see that there is only one file specified. set dummy $files; shift test "$#" -gt 1 && \ func_fatal_help "'$dest' is not a directory" fi case $destdir in [\\/]* | [A-Za-z]:[\\/]*) ;; *) for file in $files; do case $file in *.lo) ;; *) func_fatal_help "'$destdir' must be an absolute directory name" ;; esac done ;; esac # This variable tells wrapper scripts just to set variables rather # than running their programs. libtool_install_magic=$magic staticlibs= future_libdirs= current_libdirs= for file in $files; do # Do each installation. case $file in *.$libext) # Do the static libraries later. func_append staticlibs " $file" ;; *.la) func_resolve_sysroot "$file" file=$func_resolve_sysroot_result # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$file" \ || func_fatal_help "'$file' is not a valid libtool archive" library_names= old_library= relink_command= func_source "$file" # Add the libdir to current_libdirs if it is the destination. if test "X$destdir" = "X$libdir"; then case "$current_libdirs " in *" $libdir "*) ;; *) func_append current_libdirs " $libdir" ;; esac else # Note the libdir as a future libdir. case "$future_libdirs " in *" $libdir "*) ;; *) func_append future_libdirs " $libdir" ;; esac fi func_dirname "$file" "/" "" dir=$func_dirname_result func_append dir "$objdir" if test -n "$relink_command"; then # Determine the prefix the user has applied to our future dir. inst_prefix_dir=`$ECHO "$destdir" | $SED -e "s%$libdir\$%%"` # Don't allow the user to place us outside of our expected # location b/c this prevents finding dependent libraries that # are installed to the same prefix. # At present, this check doesn't affect windows .dll's that # are installed into $libdir/../bin (currently, that works fine) # but it's something to keep an eye on. test "$inst_prefix_dir" = "$destdir" && \ func_fatal_error "error: cannot install '$file' to a directory not ending in $libdir" if test -n "$inst_prefix_dir"; then # Stick the inst_prefix_dir data into the link command. relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%-inst-prefix-dir $inst_prefix_dir%"` else relink_command=`$ECHO "$relink_command" | $SED "s%@inst_prefix_dir@%%"` fi func_warning "relinking '$file'" func_show_eval "$relink_command" \ 'func_fatal_error "error: relink '\''$file'\'' with the above command before installing it"' fi # See the names of the shared library. set dummy $library_names; shift if test -n "$1"; then realname=$1 shift srcname=$realname test -n "$relink_command" && srcname=${realname}T # Install the shared library and build the symlinks. func_show_eval "$install_shared_prog $dir/$srcname $destdir/$realname" \ 'exit $?' tstripme=$stripme case $host_os in cygwin* | mingw* | pw32* | cegcc*) case $realname in *.dll.a) tstripme= ;; esac ;; os2*) case $realname in *_dll.a) tstripme= ;; esac ;; esac if test -n "$tstripme" && test -n "$striplib"; then func_show_eval "$striplib $destdir/$realname" 'exit $?' fi if test "$#" -gt 0; then # Delete the old symlinks, and create new ones. # Try 'ln -sf' first, because the 'ln' binary might depend on # the symlink we replace! Solaris /bin/ln does not understand -f, # so we also need to try rm && ln -s. for linkname do test "$linkname" != "$realname" \ && func_show_eval "(cd $destdir && { $LN_S -f $realname $linkname || { $RM $linkname && $LN_S $realname $linkname; }; })" done fi # Do each command in the postinstall commands. lib=$destdir/$realname func_execute_cmds "$postinstall_cmds" 'exit $?' fi # Install the pseudo-library for information purposes. func_basename "$file" name=$func_basename_result instname=$dir/${name}i func_show_eval "$install_prog $instname $destdir/$name" 'exit $?' # Maybe install the static library, too. test -n "$old_library" && func_append staticlibs " $dir/$old_library" ;; *.lo) # Install (i.e. copy) a libtool object. # Figure out destination file name, if it wasn't already specified. if test -n "$destname"; then destfile=$destdir/$destname else func_basename "$file" destfile=$func_basename_result destfile=$destdir/$destfile fi # Deduce the name of the destination old-style object file. case $destfile in *.lo) func_lo2o "$destfile" staticdest=$func_lo2o_result ;; *.$objext) staticdest=$destfile destfile= ;; *) func_fatal_help "cannot copy a libtool object to '$destfile'" ;; esac # Install the libtool object if requested. test -n "$destfile" && \ func_show_eval "$install_prog $file $destfile" 'exit $?' # Install the old object if enabled. if test yes = "$build_old_libs"; then # Deduce the name of the old-style object file. func_lo2o "$file" staticobj=$func_lo2o_result func_show_eval "$install_prog \$staticobj \$staticdest" 'exit $?' fi exit $EXIT_SUCCESS ;; *) # Figure out destination file name, if it wasn't already specified. if test -n "$destname"; then destfile=$destdir/$destname else func_basename "$file" destfile=$func_basename_result destfile=$destdir/$destfile fi # If the file is missing, and there is a .exe on the end, strip it # because it is most likely a libtool script we actually want to # install stripped_ext= case $file in *.exe) if test ! -f "$file"; then func_stripname '' '.exe' "$file" file=$func_stripname_result stripped_ext=.exe fi ;; esac # Do a test to see if this is really a libtool program. case $host in *cygwin* | *mingw*) if func_ltwrapper_executable_p "$file"; then func_ltwrapper_scriptname "$file" wrapper=$func_ltwrapper_scriptname_result else func_stripname '' '.exe' "$file" wrapper=$func_stripname_result fi ;; *) wrapper=$file ;; esac if func_ltwrapper_script_p "$wrapper"; then notinst_deplibs= relink_command= func_source "$wrapper" # Check the variables that should have been set. test -z "$generated_by_libtool_version" && \ func_fatal_error "invalid libtool wrapper script '$wrapper'" finalize=: for lib in $notinst_deplibs; do # Check to see that each library is installed. libdir= if test -f "$lib"; then func_source "$lib" fi libfile=$libdir/`$ECHO "$lib" | $SED 's%^.*/%%g'` if test -n "$libdir" && test ! -f "$libfile"; then func_warning "'$lib' has not been installed in '$libdir'" finalize=false fi done relink_command= func_source "$wrapper" outputname= if test no = "$fast_install" && test -n "$relink_command"; then $opt_dry_run || { if $finalize; then tmpdir=`func_mktempdir` func_basename "$file$stripped_ext" file=$func_basename_result outputname=$tmpdir/$file # Replace the output file specification. relink_command=`$ECHO "$relink_command" | $SED 's%@OUTPUT@%'"$outputname"'%g'` $opt_quiet || { func_quote_for_expand "$relink_command" eval "func_echo $func_quote_for_expand_result" } if eval "$relink_command"; then : else func_error "error: relink '$file' with the above command before installing it" $opt_dry_run || ${RM}r "$tmpdir" continue fi file=$outputname else func_warning "cannot relink '$file'" fi } else # Install the binary that we compiled earlier. file=`$ECHO "$file$stripped_ext" | $SED "s%\([^/]*\)$%$objdir/\1%"` fi fi # remove .exe since cygwin /usr/bin/install will append another # one anyway case $install_prog,$host in */usr/bin/install*,*cygwin*) case $file:$destfile in *.exe:*.exe) # this is ok ;; *.exe:*) destfile=$destfile.exe ;; *:*.exe) func_stripname '' '.exe' "$destfile" destfile=$func_stripname_result ;; esac ;; esac func_show_eval "$install_prog\$stripme \$file \$destfile" 'exit $?' $opt_dry_run || if test -n "$outputname"; then ${RM}r "$tmpdir" fi ;; esac done for file in $staticlibs; do func_basename "$file" name=$func_basename_result # Set up the ranlib parameters. oldlib=$destdir/$name func_to_tool_file "$oldlib" func_convert_file_msys_to_w32 tool_oldlib=$func_to_tool_file_result func_show_eval "$install_prog \$file \$oldlib" 'exit $?' if test -n "$stripme" && test -n "$old_striplib"; then func_show_eval "$old_striplib $tool_oldlib" 'exit $?' fi # Do each command in the postinstall commands. func_execute_cmds "$old_postinstall_cmds" 'exit $?' done test -n "$future_libdirs" && \ func_warning "remember to run '$progname --finish$future_libdirs'" if test -n "$current_libdirs"; then # Maybe just do a dry run. $opt_dry_run && current_libdirs=" -n$current_libdirs" exec_cmd='$SHELL "$progpath" $preserve_args --finish$current_libdirs' else exit $EXIT_SUCCESS fi } test install = "$opt_mode" && func_mode_install ${1+"$@"} # func_generate_dlsyms outputname originator pic_p # Extract symbols from dlprefiles and create ${outputname}S.o with # a dlpreopen symbol table. func_generate_dlsyms () { $debug_cmd my_outputname=$1 my_originator=$2 my_pic_p=${3-false} my_prefix=`$ECHO "$my_originator" | $SED 's%[^a-zA-Z0-9]%_%g'` my_dlsyms= if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then if test -n "$NM" && test -n "$global_symbol_pipe"; then my_dlsyms=${my_outputname}S.c else func_error "not configured to extract global symbols from dlpreopened files" fi fi if test -n "$my_dlsyms"; then case $my_dlsyms in "") ;; *.c) # Discover the nlist of each of the dlfiles. nlist=$output_objdir/$my_outputname.nm func_show_eval "$RM $nlist ${nlist}S ${nlist}T" # Parse the name list into a source file. func_verbose "creating $output_objdir/$my_dlsyms" $opt_dry_run || $ECHO > "$output_objdir/$my_dlsyms" "\ /* $my_dlsyms - symbol resolution table for '$my_outputname' dlsym emulation. */ /* Generated by $PROGRAM (GNU $PACKAGE) $VERSION */ #ifdef __cplusplus extern \"C\" { #endif #if defined __GNUC__ && (((__GNUC__ == 4) && (__GNUC_MINOR__ >= 4)) || (__GNUC__ > 4)) #pragma GCC diagnostic ignored \"-Wstrict-prototypes\" #endif /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE /* DATA imports from DLLs on WIN32 can't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT_DLSYM_CONST #elif defined __osf__ /* This system does not cope well with relocations in const data. */ # define LT_DLSYM_CONST #else # define LT_DLSYM_CONST const #endif #define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0) /* External symbol declarations for the compiler. */\ " if test yes = "$dlself"; then func_verbose "generating symbol list for '$output'" $opt_dry_run || echo ': @PROGRAM@ ' > "$nlist" # Add our own program objects to the symbol list. progfiles=`$ECHO "$objs$old_deplibs" | $SP2NL | $SED "$lo2o" | $NL2SP` for progfile in $progfiles; do func_to_tool_file "$progfile" func_convert_file_msys_to_w32 func_verbose "extracting global C symbols from '$func_to_tool_file_result'" $opt_dry_run || eval "$NM $func_to_tool_file_result | $global_symbol_pipe >> '$nlist'" done if test -n "$exclude_expsyms"; then $opt_dry_run || { eval '$EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T' eval '$MV "$nlist"T "$nlist"' } fi if test -n "$export_symbols_regex"; then $opt_dry_run || { eval '$EGREP -e "$export_symbols_regex" "$nlist" > "$nlist"T' eval '$MV "$nlist"T "$nlist"' } fi # Prepare the list of exported symbols if test -z "$export_symbols"; then export_symbols=$output_objdir/$outputname.exp $opt_dry_run || { $RM $export_symbols eval "$SED -n -e '/^: @PROGRAM@ $/d' -e 's/^.* \(.*\)$/\1/p' "'< "$nlist" > "$export_symbols"' case $host in *cygwin* | *mingw* | *cegcc* ) eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' eval 'cat "$export_symbols" >> "$output_objdir/$outputname.def"' ;; esac } else $opt_dry_run || { eval "$SED -e 's/\([].[*^$]\)/\\\\\1/g' -e 's/^/ /' -e 's/$/$/'"' < "$export_symbols" > "$output_objdir/$outputname.exp"' eval '$GREP -f "$output_objdir/$outputname.exp" < "$nlist" > "$nlist"T' eval '$MV "$nlist"T "$nlist"' case $host in *cygwin* | *mingw* | *cegcc* ) eval "echo EXPORTS "'> "$output_objdir/$outputname.def"' eval 'cat "$nlist" >> "$output_objdir/$outputname.def"' ;; esac } fi fi for dlprefile in $dlprefiles; do func_verbose "extracting global C symbols from '$dlprefile'" func_basename "$dlprefile" name=$func_basename_result case $host in *cygwin* | *mingw* | *cegcc* ) # if an import library, we need to obtain dlname if func_win32_import_lib_p "$dlprefile"; then func_tr_sh "$dlprefile" eval "curr_lafile=\$libfile_$func_tr_sh_result" dlprefile_dlbasename= if test -n "$curr_lafile" && func_lalib_p "$curr_lafile"; then # Use subshell, to avoid clobbering current variable values dlprefile_dlname=`source "$curr_lafile" && echo "$dlname"` if test -n "$dlprefile_dlname"; then func_basename "$dlprefile_dlname" dlprefile_dlbasename=$func_basename_result else # no lafile. user explicitly requested -dlpreopen . $sharedlib_from_linklib_cmd "$dlprefile" dlprefile_dlbasename=$sharedlib_from_linklib_result fi fi $opt_dry_run || { if test -n "$dlprefile_dlbasename"; then eval '$ECHO ": $dlprefile_dlbasename" >> "$nlist"' else func_warning "Could not compute DLL name from $name" eval '$ECHO ": $name " >> "$nlist"' fi func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe | $SED -e '/I __imp/d' -e 's/I __nm_/D /;s/_nm__//' >> '$nlist'" } else # not an import lib $opt_dry_run || { eval '$ECHO ": $name " >> "$nlist"' func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" } fi ;; *) $opt_dry_run || { eval '$ECHO ": $name " >> "$nlist"' func_to_tool_file "$dlprefile" func_convert_file_msys_to_w32 eval "$NM \"$func_to_tool_file_result\" 2>/dev/null | $global_symbol_pipe >> '$nlist'" } ;; esac done $opt_dry_run || { # Make sure we have at least an empty file. test -f "$nlist" || : > "$nlist" if test -n "$exclude_expsyms"; then $EGREP -v " ($exclude_expsyms)$" "$nlist" > "$nlist"T $MV "$nlist"T "$nlist" fi # Try sorting and uniquifying the output. if $GREP -v "^: " < "$nlist" | if sort -k 3 /dev/null 2>&1; then sort -k 3 else sort +2 fi | uniq > "$nlist"S; then : else $GREP -v "^: " < "$nlist" > "$nlist"S fi if test -f "$nlist"S; then eval "$global_symbol_to_cdecl"' < "$nlist"S >> "$output_objdir/$my_dlsyms"' else echo '/* NONE */' >> "$output_objdir/$my_dlsyms" fi func_show_eval '$RM "${nlist}I"' if test -n "$global_symbol_to_import"; then eval "$global_symbol_to_import"' < "$nlist"S > "$nlist"I' fi echo >> "$output_objdir/$my_dlsyms" "\ /* The mapping between symbol names and symbols. */ typedef struct { const char *name; void *address; } lt_dlsymlist; extern LT_DLSYM_CONST lt_dlsymlist lt_${my_prefix}_LTX_preloaded_symbols[];\ " if test -s "$nlist"I; then echo >> "$output_objdir/$my_dlsyms" "\ static void lt_syminit(void) { LT_DLSYM_CONST lt_dlsymlist *symbol = lt_${my_prefix}_LTX_preloaded_symbols; for (; symbol->name; ++symbol) {" $SED 's/.*/ if (STREQ (symbol->name, \"&\")) symbol->address = (void *) \&&;/' < "$nlist"I >> "$output_objdir/$my_dlsyms" echo >> "$output_objdir/$my_dlsyms" "\ } }" fi echo >> "$output_objdir/$my_dlsyms" "\ LT_DLSYM_CONST lt_dlsymlist lt_${my_prefix}_LTX_preloaded_symbols[] = { {\"$my_originator\", (void *) 0}," if test -s "$nlist"I; then echo >> "$output_objdir/$my_dlsyms" "\ {\"@INIT@\", (void *) <_syminit}," fi case $need_lib_prefix in no) eval "$global_symbol_to_c_name_address" < "$nlist" >> "$output_objdir/$my_dlsyms" ;; *) eval "$global_symbol_to_c_name_address_lib_prefix" < "$nlist" >> "$output_objdir/$my_dlsyms" ;; esac echo >> "$output_objdir/$my_dlsyms" "\ {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt_${my_prefix}_LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif\ " } # !$opt_dry_run pic_flag_for_symtable= case "$compile_command " in *" -static "*) ;; *) case $host in # compiling the symbol table file with pic_flag works around # a FreeBSD bug that causes programs to crash when -lm is # linked before any other PIC object. But we must not use # pic_flag when linking with -static. The problem exists in # FreeBSD 2.2.6 and is fixed in FreeBSD 3.1. *-*-freebsd2.*|*-*-freebsd3.0*|*-*-freebsdelf3.0*) pic_flag_for_symtable=" $pic_flag -DFREEBSD_WORKAROUND" ;; *-*-hpux*) pic_flag_for_symtable=" $pic_flag" ;; *) $my_pic_p && pic_flag_for_symtable=" $pic_flag" ;; esac ;; esac symtab_cflags= for arg in $LTCFLAGS; do case $arg in -pie | -fpie | -fPIE) ;; *) func_append symtab_cflags " $arg" ;; esac done # Now compile the dynamic symbol file. func_show_eval '(cd $output_objdir && $LTCC$symtab_cflags -c$no_builtin_flag$pic_flag_for_symtable "$my_dlsyms")' 'exit $?' # Clean up the generated files. func_show_eval '$RM "$output_objdir/$my_dlsyms" "$nlist" "${nlist}S" "${nlist}T" "${nlist}I"' # Transform the symbol file into the correct name. symfileobj=$output_objdir/${my_outputname}S.$objext case $host in *cygwin* | *mingw* | *cegcc* ) if test -f "$output_objdir/$my_outputname.def"; then compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$output_objdir/$my_outputname.def $symfileobj%"` else compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"` finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"` fi ;; *) compile_command=`$ECHO "$compile_command" | $SED "s%@SYMFILE@%$symfileobj%"` finalize_command=`$ECHO "$finalize_command" | $SED "s%@SYMFILE@%$symfileobj%"` ;; esac ;; *) func_fatal_error "unknown suffix for '$my_dlsyms'" ;; esac else # We keep going just in case the user didn't refer to # lt_preloaded_symbols. The linker will fail if global_symbol_pipe # really was required. # Nullify the symbol file. compile_command=`$ECHO "$compile_command" | $SED "s% @SYMFILE@%%"` finalize_command=`$ECHO "$finalize_command" | $SED "s% @SYMFILE@%%"` fi } # func_cygming_gnu_implib_p ARG # This predicate returns with zero status (TRUE) if # ARG is a GNU/binutils-style import library. Returns # with nonzero status (FALSE) otherwise. func_cygming_gnu_implib_p () { $debug_cmd func_to_tool_file "$1" func_convert_file_msys_to_w32 func_cygming_gnu_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $EGREP ' (_head_[A-Za-z0-9_]+_[ad]l*|[A-Za-z0-9_]+_[ad]l*_iname)$'` test -n "$func_cygming_gnu_implib_tmp" } # func_cygming_ms_implib_p ARG # This predicate returns with zero status (TRUE) if # ARG is an MS-style import library. Returns # with nonzero status (FALSE) otherwise. func_cygming_ms_implib_p () { $debug_cmd func_to_tool_file "$1" func_convert_file_msys_to_w32 func_cygming_ms_implib_tmp=`$NM "$func_to_tool_file_result" | eval "$global_symbol_pipe" | $GREP '_NULL_IMPORT_DESCRIPTOR'` test -n "$func_cygming_ms_implib_tmp" } # func_win32_libid arg # return the library type of file 'arg' # # Need a lot of goo to handle *both* DLLs and import libs # Has to be a shell function in order to 'eat' the argument # that is supplied when $file_magic_command is called. # Despite the name, also deal with 64 bit binaries. func_win32_libid () { $debug_cmd win32_libid_type=unknown win32_fileres=`file -L $1 2>/dev/null` case $win32_fileres in *ar\ archive\ import\ library*) # definitely import win32_libid_type="x86 archive import" ;; *ar\ archive*) # could be an import, or static # Keep the egrep pattern in sync with the one in _LT_CHECK_MAGIC_METHOD. if eval $OBJDUMP -f $1 | $SED -e '10q' 2>/dev/null | $EGREP 'file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' >/dev/null; then case $nm_interface in "MS dumpbin") if func_cygming_ms_implib_p "$1" || func_cygming_gnu_implib_p "$1" then win32_nmres=import else win32_nmres= fi ;; *) func_to_tool_file "$1" func_convert_file_msys_to_w32 win32_nmres=`eval $NM -f posix -A \"$func_to_tool_file_result\" | $SED -n -e ' 1,100{ / I /{ s|.*|import| p q } }'` ;; esac case $win32_nmres in import*) win32_libid_type="x86 archive import";; *) win32_libid_type="x86 archive static";; esac fi ;; *DLL*) win32_libid_type="x86 DLL" ;; *executable*) # but shell scripts are "executable" too... case $win32_fileres in *MS\ Windows\ PE\ Intel*) win32_libid_type="x86 DLL" ;; esac ;; esac $ECHO "$win32_libid_type" } # func_cygming_dll_for_implib ARG # # Platform-specific function to extract the # name of the DLL associated with the specified # import library ARG. # Invoked by eval'ing the libtool variable # $sharedlib_from_linklib_cmd # Result is available in the variable # $sharedlib_from_linklib_result func_cygming_dll_for_implib () { $debug_cmd sharedlib_from_linklib_result=`$DLLTOOL --identify-strict --identify "$1"` } # func_cygming_dll_for_implib_fallback_core SECTION_NAME LIBNAMEs # # The is the core of a fallback implementation of a # platform-specific function to extract the name of the # DLL associated with the specified import library LIBNAME. # # SECTION_NAME is either .idata$6 or .idata$7, depending # on the platform and compiler that created the implib. # # Echos the name of the DLL associated with the # specified import library. func_cygming_dll_for_implib_fallback_core () { $debug_cmd match_literal=`$ECHO "$1" | $SED "$sed_make_literal_regex"` $OBJDUMP -s --section "$1" "$2" 2>/dev/null | $SED '/^Contents of section '"$match_literal"':/{ # Place marker at beginning of archive member dllname section s/.*/====MARK====/ p d } # These lines can sometimes be longer than 43 characters, but # are always uninteresting /:[ ]*file format pe[i]\{,1\}-/d /^In archive [^:]*:/d # Ensure marker is printed /^====MARK====/p # Remove all lines with less than 43 characters /^.\{43\}/!d # From remaining lines, remove first 43 characters s/^.\{43\}//' | $SED -n ' # Join marker and all lines until next marker into a single line /^====MARK====/ b para H $ b para b :para x s/\n//g # Remove the marker s/^====MARK====// # Remove trailing dots and whitespace s/[\. \t]*$// # Print /./p' | # we now have a list, one entry per line, of the stringified # contents of the appropriate section of all members of the # archive that possess that section. Heuristic: eliminate # all those that have a first or second character that is # a '.' (that is, objdump's representation of an unprintable # character.) This should work for all archives with less than # 0x302f exports -- but will fail for DLLs whose name actually # begins with a literal '.' or a single character followed by # a '.'. # # Of those that remain, print the first one. $SED -e '/^\./d;/^.\./d;q' } # func_cygming_dll_for_implib_fallback ARG # Platform-specific function to extract the # name of the DLL associated with the specified # import library ARG. # # This fallback implementation is for use when $DLLTOOL # does not support the --identify-strict option. # Invoked by eval'ing the libtool variable # $sharedlib_from_linklib_cmd # Result is available in the variable # $sharedlib_from_linklib_result func_cygming_dll_for_implib_fallback () { $debug_cmd if func_cygming_gnu_implib_p "$1"; then # binutils import library sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$7' "$1"` elif func_cygming_ms_implib_p "$1"; then # ms-generated import library sharedlib_from_linklib_result=`func_cygming_dll_for_implib_fallback_core '.idata$6' "$1"` else # unknown sharedlib_from_linklib_result= fi } # func_extract_an_archive dir oldlib func_extract_an_archive () { $debug_cmd f_ex_an_ar_dir=$1; shift f_ex_an_ar_oldlib=$1 if test yes = "$lock_old_archive_extraction"; then lockfile=$f_ex_an_ar_oldlib.lock until $opt_dry_run || ln "$progpath" "$lockfile" 2>/dev/null; do func_echo "Waiting for $lockfile to be removed" sleep 2 done fi func_show_eval "(cd \$f_ex_an_ar_dir && $AR x \"\$f_ex_an_ar_oldlib\")" \ 'stat=$?; rm -f "$lockfile"; exit $stat' if test yes = "$lock_old_archive_extraction"; then $opt_dry_run || rm -f "$lockfile" fi if ($AR t "$f_ex_an_ar_oldlib" | sort | sort -uc >/dev/null 2>&1); then : else func_fatal_error "object name conflicts in archive: $f_ex_an_ar_dir/$f_ex_an_ar_oldlib" fi } # func_extract_archives gentop oldlib ... func_extract_archives () { $debug_cmd my_gentop=$1; shift my_oldlibs=${1+"$@"} my_oldobjs= my_xlib= my_xabs= my_xdir= for my_xlib in $my_oldlibs; do # Extract the objects. case $my_xlib in [\\/]* | [A-Za-z]:[\\/]*) my_xabs=$my_xlib ;; *) my_xabs=`pwd`"/$my_xlib" ;; esac func_basename "$my_xlib" my_xlib=$func_basename_result my_xlib_u=$my_xlib while :; do case " $extracted_archives " in *" $my_xlib_u "*) func_arith $extracted_serial + 1 extracted_serial=$func_arith_result my_xlib_u=lt$extracted_serial-$my_xlib ;; *) break ;; esac done extracted_archives="$extracted_archives $my_xlib_u" my_xdir=$my_gentop/$my_xlib_u func_mkdir_p "$my_xdir" case $host in *-darwin*) func_verbose "Extracting $my_xabs" # Do not bother doing anything if just a dry run $opt_dry_run || { darwin_orig_dir=`pwd` cd $my_xdir || exit $? darwin_archive=$my_xabs darwin_curdir=`pwd` func_basename "$darwin_archive" darwin_base_archive=$func_basename_result darwin_arches=`$LIPO -info "$darwin_archive" 2>/dev/null | $GREP Architectures 2>/dev/null || true` if test -n "$darwin_arches"; then darwin_arches=`$ECHO "$darwin_arches" | $SED -e 's/.*are://'` darwin_arch= func_verbose "$darwin_base_archive has multiple architectures $darwin_arches" for darwin_arch in $darwin_arches; do func_mkdir_p "unfat-$$/$darwin_base_archive-$darwin_arch" $LIPO -thin $darwin_arch -output "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" "$darwin_archive" cd "unfat-$$/$darwin_base_archive-$darwin_arch" func_extract_an_archive "`pwd`" "$darwin_base_archive" cd "$darwin_curdir" $RM "unfat-$$/$darwin_base_archive-$darwin_arch/$darwin_base_archive" done # $darwin_arches ## Okay now we've a bunch of thin objects, gotta fatten them up :) darwin_filelist=`find unfat-$$ -type f -name \*.o -print -o -name \*.lo -print | $SED -e "$sed_basename" | sort -u` darwin_file= darwin_files= for darwin_file in $darwin_filelist; do darwin_files=`find unfat-$$ -name $darwin_file -print | sort | $NL2SP` $LIPO -create -output "$darwin_file" $darwin_files done # $darwin_filelist $RM -rf unfat-$$ cd "$darwin_orig_dir" else cd $darwin_orig_dir func_extract_an_archive "$my_xdir" "$my_xabs" fi # $darwin_arches } # !$opt_dry_run ;; *) func_extract_an_archive "$my_xdir" "$my_xabs" ;; esac my_oldobjs="$my_oldobjs "`find $my_xdir -name \*.$objext -print -o -name \*.lo -print | sort | $NL2SP` done func_extract_archives_result=$my_oldobjs } # func_emit_wrapper [arg=no] # # Emit a libtool wrapper script on stdout. # Don't directly open a file because we may want to # incorporate the script contents within a cygwin/mingw # wrapper executable. Must ONLY be called from within # func_mode_link because it depends on a number of variables # set therein. # # ARG is the value that the WRAPPER_SCRIPT_BELONGS_IN_OBJDIR # variable will take. If 'yes', then the emitted script # will assume that the directory where it is stored is # the $objdir directory. This is a cygwin/mingw-specific # behavior. func_emit_wrapper () { func_emit_wrapper_arg1=${1-no} $ECHO "\ #! $SHELL # $output - temporary wrapper script for $objdir/$outputname # Generated by $PROGRAM (GNU $PACKAGE) $VERSION # # The $output program cannot be directly executed until all the libtool # libraries that it depends on are installed. # # This wrapper script should never be moved out of the build directory. # If it is, it will not operate correctly. # Sed substitution that helps us do robust quoting. It backslashifies # metacharacters that are still active within double-quoted strings. sed_quote_subst='$sed_quote_subst' # Be Bourne compatible if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then emulate sh NULLCMD=: # Zsh 3.x and 4.x performs word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in *posix*) set -o posix;; esac fi BIN_SH=xpg4; export BIN_SH # for Tru64 DUALCASE=1; export DUALCASE # for MKS sh # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH relink_command=\"$relink_command\" # This environment variable determines our operation mode. if test \"\$libtool_install_magic\" = \"$magic\"; then # install mode needs the following variables: generated_by_libtool_version='$macro_version' notinst_deplibs='$notinst_deplibs' else # When we are sourced in execute mode, \$file and \$ECHO are already set. if test \"\$libtool_execute_magic\" != \"$magic\"; then file=\"\$0\"" qECHO=`$ECHO "$ECHO" | $SED "$sed_quote_subst"` $ECHO "\ # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$1 _LTECHO_EOF' } ECHO=\"$qECHO\" fi # Very basic option parsing. These options are (a) specific to # the libtool wrapper, (b) are identical between the wrapper # /script/ and the wrapper /executable/ that is used only on # windows platforms, and (c) all begin with the string "--lt-" # (application programs are unlikely to have options that match # this pattern). # # There are only two supported options: --lt-debug and # --lt-dump-script. There is, deliberately, no --lt-help. # # The first argument to this parsing function should be the # script's $0 value, followed by "$@". lt_option_debug= func_parse_lt_options () { lt_script_arg0=\$0 shift for lt_opt do case \"\$lt_opt\" in --lt-debug) lt_option_debug=1 ;; --lt-dump-script) lt_dump_D=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%/[^/]*$%%'\` test \"X\$lt_dump_D\" = \"X\$lt_script_arg0\" && lt_dump_D=. lt_dump_F=\`\$ECHO \"X\$lt_script_arg0\" | $SED -e 's/^X//' -e 's%^.*/%%'\` cat \"\$lt_dump_D/\$lt_dump_F\" exit 0 ;; --lt-*) \$ECHO \"Unrecognized --lt- option: '\$lt_opt'\" 1>&2 exit 1 ;; esac done # Print the debug banner immediately: if test -n \"\$lt_option_debug\"; then echo \"$outputname:$output:\$LINENO: libtool wrapper (GNU $PACKAGE) $VERSION\" 1>&2 fi } # Used when --lt-debug. Prints its arguments to stdout # (redirection is the responsibility of the caller) func_lt_dump_args () { lt_dump_args_N=1; for lt_arg do \$ECHO \"$outputname:$output:\$LINENO: newargv[\$lt_dump_args_N]: \$lt_arg\" lt_dump_args_N=\`expr \$lt_dump_args_N + 1\` done } # Core function for launching the target application func_exec_program_core () { " case $host in # Backslashes separate directories on plain windows *-*-mingw | *-*-os2* | *-cegcc*) $ECHO "\ if test -n \"\$lt_option_debug\"; then \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir\\\\\$program\" 1>&2 func_lt_dump_args \${1+\"\$@\"} 1>&2 fi exec \"\$progdir\\\\\$program\" \${1+\"\$@\"} " ;; *) $ECHO "\ if test -n \"\$lt_option_debug\"; then \$ECHO \"$outputname:$output:\$LINENO: newargv[0]: \$progdir/\$program\" 1>&2 func_lt_dump_args \${1+\"\$@\"} 1>&2 fi exec \"\$progdir/\$program\" \${1+\"\$@\"} " ;; esac $ECHO "\ \$ECHO \"\$0: cannot exec \$program \$*\" 1>&2 exit 1 } # A function to encapsulate launching the target application # Strips options in the --lt-* namespace from \$@ and # launches target application with the remaining arguments. func_exec_program () { case \" \$* \" in *\\ --lt-*) for lt_wr_arg do case \$lt_wr_arg in --lt-*) ;; *) set x \"\$@\" \"\$lt_wr_arg\"; shift;; esac shift done ;; esac func_exec_program_core \${1+\"\$@\"} } # Parse options func_parse_lt_options \"\$0\" \${1+\"\$@\"} # Find the directory that this script lives in. thisdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*$%%'\` test \"x\$thisdir\" = \"x\$file\" && thisdir=. # Follow symbolic links until we get to the real thisdir. file=\`ls -ld \"\$file\" | $SED -n 's/.*-> //p'\` while test -n \"\$file\"; do destdir=\`\$ECHO \"\$file\" | $SED 's%/[^/]*\$%%'\` # If there was a directory component, then change thisdir. if test \"x\$destdir\" != \"x\$file\"; then case \"\$destdir\" in [\\\\/]* | [A-Za-z]:[\\\\/]*) thisdir=\"\$destdir\" ;; *) thisdir=\"\$thisdir/\$destdir\" ;; esac fi file=\`\$ECHO \"\$file\" | $SED 's%^.*/%%'\` file=\`ls -ld \"\$thisdir/\$file\" | $SED -n 's/.*-> //p'\` done # Usually 'no', except on cygwin/mingw when embedded into # the cwrapper. WRAPPER_SCRIPT_BELONGS_IN_OBJDIR=$func_emit_wrapper_arg1 if test \"\$WRAPPER_SCRIPT_BELONGS_IN_OBJDIR\" = \"yes\"; then # special case for '.' if test \"\$thisdir\" = \".\"; then thisdir=\`pwd\` fi # remove .libs from thisdir case \"\$thisdir\" in *[\\\\/]$objdir ) thisdir=\`\$ECHO \"\$thisdir\" | $SED 's%[\\\\/][^\\\\/]*$%%'\` ;; $objdir ) thisdir=. ;; esac fi # Try to get the absolute directory name. absdir=\`cd \"\$thisdir\" && pwd\` test -n \"\$absdir\" && thisdir=\"\$absdir\" " if test yes = "$fast_install"; then $ECHO "\ program=lt-'$outputname'$exeext progdir=\"\$thisdir/$objdir\" if test ! -f \"\$progdir/\$program\" || { file=\`ls -1dt \"\$progdir/\$program\" \"\$progdir/../\$program\" 2>/dev/null | $SED 1q\`; \\ test \"X\$file\" != \"X\$progdir/\$program\"; }; then file=\"\$\$-\$program\" if test ! -d \"\$progdir\"; then $MKDIR \"\$progdir\" else $RM \"\$progdir/\$file\" fi" $ECHO "\ # relink executable if necessary if test -n \"\$relink_command\"; then if relink_command_output=\`eval \$relink_command 2>&1\`; then : else \$ECHO \"\$relink_command_output\" >&2 $RM \"\$progdir/\$file\" exit 1 fi fi $MV \"\$progdir/\$file\" \"\$progdir/\$program\" 2>/dev/null || { $RM \"\$progdir/\$program\"; $MV \"\$progdir/\$file\" \"\$progdir/\$program\"; } $RM \"\$progdir/\$file\" fi" else $ECHO "\ program='$outputname' progdir=\"\$thisdir/$objdir\" " fi $ECHO "\ if test -f \"\$progdir/\$program\"; then" # fixup the dll searchpath if we need to. # # Fix the DLL searchpath if we need to. Do this before prepending # to shlibpath, because on Windows, both are PATH and uninstalled # libraries must come first. if test -n "$dllsearchpath"; then $ECHO "\ # Add the dll search path components to the executable PATH PATH=$dllsearchpath:\$PATH " fi # Export our shlibpath_var if we have one. if test yes = "$shlibpath_overrides_runpath" && test -n "$shlibpath_var" && test -n "$temp_rpath"; then $ECHO "\ # Add our own library path to $shlibpath_var $shlibpath_var=\"$temp_rpath\$$shlibpath_var\" # Some systems cannot cope with colon-terminated $shlibpath_var # The second colon is a workaround for a bug in BeOS R4 sed $shlibpath_var=\`\$ECHO \"\$$shlibpath_var\" | $SED 's/::*\$//'\` export $shlibpath_var " fi $ECHO "\ if test \"\$libtool_execute_magic\" != \"$magic\"; then # Run the actual program with our arguments. func_exec_program \${1+\"\$@\"} fi else # The program doesn't exist. \$ECHO \"\$0: error: '\$progdir/\$program' does not exist\" 1>&2 \$ECHO \"This script is just a wrapper for \$program.\" 1>&2 \$ECHO \"See the $PACKAGE documentation for more information.\" 1>&2 exit 1 fi fi\ " } # func_emit_cwrapperexe_src # emit the source code for a wrapper executable on stdout # Must ONLY be called from within func_mode_link because # it depends on a number of variable set therein. func_emit_cwrapperexe_src () { cat < #include #ifdef _MSC_VER # include # include # include #else # include # include # ifdef __CYGWIN__ # include # endif #endif #include #include #include #include #include #include #include #include #define STREQ(s1, s2) (strcmp ((s1), (s2)) == 0) /* declarations of non-ANSI functions */ #if defined __MINGW32__ # ifdef __STRICT_ANSI__ int _putenv (const char *); # endif #elif defined __CYGWIN__ # ifdef __STRICT_ANSI__ char *realpath (const char *, char *); int putenv (char *); int setenv (const char *, const char *, int); # endif /* #elif defined other_platform || defined ... */ #endif /* portability defines, excluding path handling macros */ #if defined _MSC_VER # define setmode _setmode # define stat _stat # define chmod _chmod # define getcwd _getcwd # define putenv _putenv # define S_IXUSR _S_IEXEC #elif defined __MINGW32__ # define setmode _setmode # define stat _stat # define chmod _chmod # define getcwd _getcwd # define putenv _putenv #elif defined __CYGWIN__ # define HAVE_SETENV # define FOPEN_WB "wb" /* #elif defined other platforms ... */ #endif #if defined PATH_MAX # define LT_PATHMAX PATH_MAX #elif defined MAXPATHLEN # define LT_PATHMAX MAXPATHLEN #else # define LT_PATHMAX 1024 #endif #ifndef S_IXOTH # define S_IXOTH 0 #endif #ifndef S_IXGRP # define S_IXGRP 0 #endif /* path handling portability macros */ #ifndef DIR_SEPARATOR # define DIR_SEPARATOR '/' # define PATH_SEPARATOR ':' #endif #if defined _WIN32 || defined __MSDOS__ || defined __DJGPP__ || \ defined __OS2__ # define HAVE_DOS_BASED_FILE_SYSTEM # define FOPEN_WB "wb" # ifndef DIR_SEPARATOR_2 # define DIR_SEPARATOR_2 '\\' # endif # ifndef PATH_SEPARATOR_2 # define PATH_SEPARATOR_2 ';' # endif #endif #ifndef DIR_SEPARATOR_2 # define IS_DIR_SEPARATOR(ch) ((ch) == DIR_SEPARATOR) #else /* DIR_SEPARATOR_2 */ # define IS_DIR_SEPARATOR(ch) \ (((ch) == DIR_SEPARATOR) || ((ch) == DIR_SEPARATOR_2)) #endif /* DIR_SEPARATOR_2 */ #ifndef PATH_SEPARATOR_2 # define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR) #else /* PATH_SEPARATOR_2 */ # define IS_PATH_SEPARATOR(ch) ((ch) == PATH_SEPARATOR_2) #endif /* PATH_SEPARATOR_2 */ #ifndef FOPEN_WB # define FOPEN_WB "w" #endif #ifndef _O_BINARY # define _O_BINARY 0 #endif #define XMALLOC(type, num) ((type *) xmalloc ((num) * sizeof(type))) #define XFREE(stale) do { \ if (stale) { free (stale); stale = 0; } \ } while (0) #if defined LT_DEBUGWRAPPER static int lt_debug = 1; #else static int lt_debug = 0; #endif const char *program_name = "libtool-wrapper"; /* in case xstrdup fails */ void *xmalloc (size_t num); char *xstrdup (const char *string); const char *base_name (const char *name); char *find_executable (const char *wrapper); char *chase_symlinks (const char *pathspec); int make_executable (const char *path); int check_executable (const char *path); char *strendzap (char *str, const char *pat); void lt_debugprintf (const char *file, int line, const char *fmt, ...); void lt_fatal (const char *file, int line, const char *message, ...); static const char *nonnull (const char *s); static const char *nonempty (const char *s); void lt_setenv (const char *name, const char *value); char *lt_extend_str (const char *orig_value, const char *add, int to_end); void lt_update_exe_path (const char *name, const char *value); void lt_update_lib_path (const char *name, const char *value); char **prepare_spawn (char **argv); void lt_dump_script (FILE *f); EOF cat <= 0) && (st.st_mode & (S_IXUSR | S_IXGRP | S_IXOTH))) return 1; else return 0; } int make_executable (const char *path) { int rval = 0; struct stat st; lt_debugprintf (__FILE__, __LINE__, "(make_executable): %s\n", nonempty (path)); if ((!path) || (!*path)) return 0; if (stat (path, &st) >= 0) { rval = chmod (path, st.st_mode | S_IXOTH | S_IXGRP | S_IXUSR); } return rval; } /* Searches for the full path of the wrapper. Returns newly allocated full path name if found, NULL otherwise Does not chase symlinks, even on platforms that support them. */ char * find_executable (const char *wrapper) { int has_slash = 0; const char *p; const char *p_next; /* static buffer for getcwd */ char tmp[LT_PATHMAX + 1]; size_t tmp_len; char *concat_name; lt_debugprintf (__FILE__, __LINE__, "(find_executable): %s\n", nonempty (wrapper)); if ((wrapper == NULL) || (*wrapper == '\0')) return NULL; /* Absolute path? */ #if defined HAVE_DOS_BASED_FILE_SYSTEM if (isalpha ((unsigned char) wrapper[0]) && wrapper[1] == ':') { concat_name = xstrdup (wrapper); if (check_executable (concat_name)) return concat_name; XFREE (concat_name); } else { #endif if (IS_DIR_SEPARATOR (wrapper[0])) { concat_name = xstrdup (wrapper); if (check_executable (concat_name)) return concat_name; XFREE (concat_name); } #if defined HAVE_DOS_BASED_FILE_SYSTEM } #endif for (p = wrapper; *p; p++) if (*p == '/') { has_slash = 1; break; } if (!has_slash) { /* no slashes; search PATH */ const char *path = getenv ("PATH"); if (path != NULL) { for (p = path; *p; p = p_next) { const char *q; size_t p_len; for (q = p; *q; q++) if (IS_PATH_SEPARATOR (*q)) break; p_len = (size_t) (q - p); p_next = (*q == '\0' ? q : q + 1); if (p_len == 0) { /* empty path: current directory */ if (getcwd (tmp, LT_PATHMAX) == NULL) lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", nonnull (strerror (errno))); tmp_len = strlen (tmp); concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); memcpy (concat_name, tmp, tmp_len); concat_name[tmp_len] = '/'; strcpy (concat_name + tmp_len + 1, wrapper); } else { concat_name = XMALLOC (char, p_len + 1 + strlen (wrapper) + 1); memcpy (concat_name, p, p_len); concat_name[p_len] = '/'; strcpy (concat_name + p_len + 1, wrapper); } if (check_executable (concat_name)) return concat_name; XFREE (concat_name); } } /* not found in PATH; assume curdir */ } /* Relative path | not found in path: prepend cwd */ if (getcwd (tmp, LT_PATHMAX) == NULL) lt_fatal (__FILE__, __LINE__, "getcwd failed: %s", nonnull (strerror (errno))); tmp_len = strlen (tmp); concat_name = XMALLOC (char, tmp_len + 1 + strlen (wrapper) + 1); memcpy (concat_name, tmp, tmp_len); concat_name[tmp_len] = '/'; strcpy (concat_name + tmp_len + 1, wrapper); if (check_executable (concat_name)) return concat_name; XFREE (concat_name); return NULL; } char * chase_symlinks (const char *pathspec) { #ifndef S_ISLNK return xstrdup (pathspec); #else char buf[LT_PATHMAX]; struct stat s; char *tmp_pathspec = xstrdup (pathspec); char *p; int has_symlinks = 0; while (strlen (tmp_pathspec) && !has_symlinks) { lt_debugprintf (__FILE__, __LINE__, "checking path component for symlinks: %s\n", tmp_pathspec); if (lstat (tmp_pathspec, &s) == 0) { if (S_ISLNK (s.st_mode) != 0) { has_symlinks = 1; break; } /* search backwards for last DIR_SEPARATOR */ p = tmp_pathspec + strlen (tmp_pathspec) - 1; while ((p > tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) p--; if ((p == tmp_pathspec) && (!IS_DIR_SEPARATOR (*p))) { /* no more DIR_SEPARATORS left */ break; } *p = '\0'; } else { lt_fatal (__FILE__, __LINE__, "error accessing file \"%s\": %s", tmp_pathspec, nonnull (strerror (errno))); } } XFREE (tmp_pathspec); if (!has_symlinks) { return xstrdup (pathspec); } tmp_pathspec = realpath (pathspec, buf); if (tmp_pathspec == 0) { lt_fatal (__FILE__, __LINE__, "could not follow symlinks for %s", pathspec); } return xstrdup (tmp_pathspec); #endif } char * strendzap (char *str, const char *pat) { size_t len, patlen; assert (str != NULL); assert (pat != NULL); len = strlen (str); patlen = strlen (pat); if (patlen <= len) { str += len - patlen; if (STREQ (str, pat)) *str = '\0'; } return str; } void lt_debugprintf (const char *file, int line, const char *fmt, ...) { va_list args; if (lt_debug) { (void) fprintf (stderr, "%s:%s:%d: ", program_name, file, line); va_start (args, fmt); (void) vfprintf (stderr, fmt, args); va_end (args); } } static void lt_error_core (int exit_status, const char *file, int line, const char *mode, const char *message, va_list ap) { fprintf (stderr, "%s:%s:%d: %s: ", program_name, file, line, mode); vfprintf (stderr, message, ap); fprintf (stderr, ".\n"); if (exit_status >= 0) exit (exit_status); } void lt_fatal (const char *file, int line, const char *message, ...) { va_list ap; va_start (ap, message); lt_error_core (EXIT_FAILURE, file, line, "FATAL", message, ap); va_end (ap); } static const char * nonnull (const char *s) { return s ? s : "(null)"; } static const char * nonempty (const char *s) { return (s && !*s) ? "(empty)" : nonnull (s); } void lt_setenv (const char *name, const char *value) { lt_debugprintf (__FILE__, __LINE__, "(lt_setenv) setting '%s' to '%s'\n", nonnull (name), nonnull (value)); { #ifdef HAVE_SETENV /* always make a copy, for consistency with !HAVE_SETENV */ char *str = xstrdup (value); setenv (name, str, 1); #else size_t len = strlen (name) + 1 + strlen (value) + 1; char *str = XMALLOC (char, len); sprintf (str, "%s=%s", name, value); if (putenv (str) != EXIT_SUCCESS) { XFREE (str); } #endif } } char * lt_extend_str (const char *orig_value, const char *add, int to_end) { char *new_value; if (orig_value && *orig_value) { size_t orig_value_len = strlen (orig_value); size_t add_len = strlen (add); new_value = XMALLOC (char, add_len + orig_value_len + 1); if (to_end) { strcpy (new_value, orig_value); strcpy (new_value + orig_value_len, add); } else { strcpy (new_value, add); strcpy (new_value + add_len, orig_value); } } else { new_value = xstrdup (add); } return new_value; } void lt_update_exe_path (const char *name, const char *value) { lt_debugprintf (__FILE__, __LINE__, "(lt_update_exe_path) modifying '%s' by prepending '%s'\n", nonnull (name), nonnull (value)); if (name && *name && value && *value) { char *new_value = lt_extend_str (getenv (name), value, 0); /* some systems can't cope with a ':'-terminated path #' */ size_t len = strlen (new_value); while ((len > 0) && IS_PATH_SEPARATOR (new_value[len-1])) { new_value[--len] = '\0'; } lt_setenv (name, new_value); XFREE (new_value); } } void lt_update_lib_path (const char *name, const char *value) { lt_debugprintf (__FILE__, __LINE__, "(lt_update_lib_path) modifying '%s' by prepending '%s'\n", nonnull (name), nonnull (value)); if (name && *name && value && *value) { char *new_value = lt_extend_str (getenv (name), value, 0); lt_setenv (name, new_value); XFREE (new_value); } } EOF case $host_os in mingw*) cat <<"EOF" /* Prepares an argument vector before calling spawn(). Note that spawn() does not by itself call the command interpreter (getenv ("COMSPEC") != NULL ? getenv ("COMSPEC") : ({ OSVERSIONINFO v; v.dwOSVersionInfoSize = sizeof(OSVERSIONINFO); GetVersionEx(&v); v.dwPlatformId == VER_PLATFORM_WIN32_NT; }) ? "cmd.exe" : "command.com"). Instead it simply concatenates the arguments, separated by ' ', and calls CreateProcess(). We must quote the arguments since Win32 CreateProcess() interprets characters like ' ', '\t', '\\', '"' (but not '<' and '>') in a special way: - Space and tab are interpreted as delimiters. They are not treated as delimiters if they are surrounded by double quotes: "...". - Unescaped double quotes are removed from the input. Their only effect is that within double quotes, space and tab are treated like normal characters. - Backslashes not followed by double quotes are not special. - But 2*n+1 backslashes followed by a double quote become n backslashes followed by a double quote (n >= 0): \" -> " \\\" -> \" \\\\\" -> \\" */ #define SHELL_SPECIAL_CHARS "\"\\ \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037" #define SHELL_SPACE_CHARS " \001\002\003\004\005\006\007\010\011\012\013\014\015\016\017\020\021\022\023\024\025\026\027\030\031\032\033\034\035\036\037" char ** prepare_spawn (char **argv) { size_t argc; char **new_argv; size_t i; /* Count number of arguments. */ for (argc = 0; argv[argc] != NULL; argc++) ; /* Allocate new argument vector. */ new_argv = XMALLOC (char *, argc + 1); /* Put quoted arguments into the new argument vector. */ for (i = 0; i < argc; i++) { const char *string = argv[i]; if (string[0] == '\0') new_argv[i] = xstrdup ("\"\""); else if (strpbrk (string, SHELL_SPECIAL_CHARS) != NULL) { int quote_around = (strpbrk (string, SHELL_SPACE_CHARS) != NULL); size_t length; unsigned int backslashes; const char *s; char *quoted_string; char *p; length = 0; backslashes = 0; if (quote_around) length++; for (s = string; *s != '\0'; s++) { char c = *s; if (c == '"') length += backslashes + 1; length++; if (c == '\\') backslashes++; else backslashes = 0; } if (quote_around) length += backslashes + 1; quoted_string = XMALLOC (char, length + 1); p = quoted_string; backslashes = 0; if (quote_around) *p++ = '"'; for (s = string; *s != '\0'; s++) { char c = *s; if (c == '"') { unsigned int j; for (j = backslashes + 1; j > 0; j--) *p++ = '\\'; } *p++ = c; if (c == '\\') backslashes++; else backslashes = 0; } if (quote_around) { unsigned int j; for (j = backslashes; j > 0; j--) *p++ = '\\'; *p++ = '"'; } *p = '\0'; new_argv[i] = quoted_string; } else new_argv[i] = (char *) string; } new_argv[argc] = NULL; return new_argv; } EOF ;; esac cat <<"EOF" void lt_dump_script (FILE* f) { EOF func_emit_wrapper yes | $SED -n -e ' s/^\(.\{79\}\)\(..*\)/\1\ \2/ h s/\([\\"]\)/\\\1/g s/$/\\n/ s/\([^\n]*\).*/ fputs ("\1", f);/p g D' cat <<"EOF" } EOF } # end: func_emit_cwrapperexe_src # func_win32_import_lib_p ARG # True if ARG is an import lib, as indicated by $file_magic_cmd func_win32_import_lib_p () { $debug_cmd case `eval $file_magic_cmd \"\$1\" 2>/dev/null | $SED -e 10q` in *import*) : ;; *) false ;; esac } # func_suncc_cstd_abi # !!ONLY CALL THIS FOR SUN CC AFTER $compile_command IS FULLY EXPANDED!! # Several compiler flags select an ABI that is incompatible with the # Cstd library. Avoid specifying it if any are in CXXFLAGS. func_suncc_cstd_abi () { $debug_cmd case " $compile_command " in *" -compat=g "*|*\ -std=c++[0-9][0-9]\ *|*" -library=stdcxx4 "*|*" -library=stlport4 "*) suncc_use_cstd_abi=no ;; *) suncc_use_cstd_abi=yes ;; esac } # func_mode_link arg... func_mode_link () { $debug_cmd case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) # It is impossible to link a dll without this setting, and # we shouldn't force the makefile maintainer to figure out # what system we are compiling for in order to pass an extra # flag for every libtool invocation. # allow_undefined=no # FIXME: Unfortunately, there are problems with the above when trying # to make a dll that has undefined symbols, in which case not # even a static library is built. For now, we need to specify # -no-undefined on the libtool link line when we can be certain # that all symbols are satisfied, otherwise we get a static library. allow_undefined=yes ;; *) allow_undefined=yes ;; esac libtool_args=$nonopt base_compile="$nonopt $@" compile_command=$nonopt finalize_command=$nonopt compile_rpath= finalize_rpath= compile_shlibpath= finalize_shlibpath= convenience= old_convenience= deplibs= old_deplibs= compiler_flags= linker_flags= dllsearchpath= lib_search_path=`pwd` inst_prefix_dir= new_inherited_linker_flags= avoid_version=no bindir= dlfiles= dlprefiles= dlself=no export_dynamic=no export_symbols= export_symbols_regex= generated= libobjs= ltlibs= module=no no_install=no objs= os2dllname= non_pic_objects= precious_files_regex= prefer_static_libs=no preload=false prev= prevarg= release= rpath= xrpath= perm_rpath= temp_rpath= thread_safe=no vinfo= vinfo_number=no weak_libs= single_module=$wl-single_module func_infer_tag $base_compile # We need to know -static, to get the right output filenames. for arg do case $arg in -shared) test yes != "$build_libtool_libs" \ && func_fatal_configuration "cannot build a shared library" build_old_libs=no break ;; -all-static | -static | -static-libtool-libs) case $arg in -all-static) if test yes = "$build_libtool_libs" && test -z "$link_static_flag"; then func_warning "complete static linking is impossible in this configuration" fi if test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi prefer_static_libs=yes ;; -static) if test -z "$pic_flag" && test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi prefer_static_libs=built ;; -static-libtool-libs) if test -z "$pic_flag" && test -n "$link_static_flag"; then dlopen_self=$dlopen_self_static fi prefer_static_libs=yes ;; esac build_libtool_libs=no build_old_libs=yes break ;; esac done # See if our shared archives depend on static archives. test -n "$old_archive_from_new_cmds" && build_old_libs=yes # Go through the arguments, transforming them on the way. while test "$#" -gt 0; do arg=$1 shift func_quote_for_eval "$arg" qarg=$func_quote_for_eval_unquoted_result func_append libtool_args " $func_quote_for_eval_result" # If the previous option needs an argument, assign it. if test -n "$prev"; then case $prev in output) func_append compile_command " @OUTPUT@" func_append finalize_command " @OUTPUT@" ;; esac case $prev in bindir) bindir=$arg prev= continue ;; dlfiles|dlprefiles) $preload || { # Add the symbol object into the linking commands. func_append compile_command " @SYMFILE@" func_append finalize_command " @SYMFILE@" preload=: } case $arg in *.la | *.lo) ;; # We handle these cases below. force) if test no = "$dlself"; then dlself=needless export_dynamic=yes fi prev= continue ;; self) if test dlprefiles = "$prev"; then dlself=yes elif test dlfiles = "$prev" && test yes != "$dlopen_self"; then dlself=yes else dlself=needless export_dynamic=yes fi prev= continue ;; *) if test dlfiles = "$prev"; then func_append dlfiles " $arg" else func_append dlprefiles " $arg" fi prev= continue ;; esac ;; expsyms) export_symbols=$arg test -f "$arg" \ || func_fatal_error "symbol file '$arg' does not exist" prev= continue ;; expsyms_regex) export_symbols_regex=$arg prev= continue ;; framework) case $host in *-*-darwin*) case "$deplibs " in *" $qarg.ltframework "*) ;; *) func_append deplibs " $qarg.ltframework" # this is fixed later ;; esac ;; esac prev= continue ;; inst_prefix) inst_prefix_dir=$arg prev= continue ;; mllvm) # Clang does not use LLVM to link, so we can simply discard any # '-mllvm $arg' options when doing the link step. prev= continue ;; objectlist) if test -f "$arg"; then save_arg=$arg moreargs= for fil in `cat "$save_arg"` do # func_append moreargs " $fil" arg=$fil # A libtool-controlled object. # Check to see that this really is a libtool object. if func_lalib_unsafe_p "$arg"; then pic_object= non_pic_object= # Read the .lo file func_source "$arg" if test -z "$pic_object" || test -z "$non_pic_object" || test none = "$pic_object" && test none = "$non_pic_object"; then func_fatal_error "cannot find name of object for '$arg'" fi # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir=$func_dirname_result if test none != "$pic_object"; then # Prepend the subdirectory the object is found in. pic_object=$xdir$pic_object if test dlfiles = "$prev"; then if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then func_append dlfiles " $pic_object" prev= continue else # If libtool objects are unsupported, then we need to preload. prev=dlprefiles fi fi # CHECK ME: I think I busted this. -Ossama if test dlprefiles = "$prev"; then # Preload the old-style object. func_append dlprefiles " $pic_object" prev= fi # A PIC object. func_append libobjs " $pic_object" arg=$pic_object fi # Non-PIC object. if test none != "$non_pic_object"; then # Prepend the subdirectory the object is found in. non_pic_object=$xdir$non_pic_object # A standard non-PIC object func_append non_pic_objects " $non_pic_object" if test -z "$pic_object" || test none = "$pic_object"; then arg=$non_pic_object fi else # If the PIC object exists, use it instead. # $xdir was prepended to $pic_object above. non_pic_object=$pic_object func_append non_pic_objects " $non_pic_object" fi else # Only an error if not doing a dry-run. if $opt_dry_run; then # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir=$func_dirname_result func_lo2o "$arg" pic_object=$xdir$objdir/$func_lo2o_result non_pic_object=$xdir$func_lo2o_result func_append libobjs " $pic_object" func_append non_pic_objects " $non_pic_object" else func_fatal_error "'$arg' is not a valid libtool object" fi fi done else func_fatal_error "link input file '$arg' does not exist" fi arg=$save_arg prev= continue ;; os2dllname) os2dllname=$arg prev= continue ;; precious_regex) precious_files_regex=$arg prev= continue ;; release) release=-$arg prev= continue ;; rpath | xrpath) # We need an absolute path. case $arg in [\\/]* | [A-Za-z]:[\\/]*) ;; *) func_fatal_error "only absolute run-paths are allowed" ;; esac if test rpath = "$prev"; then case "$rpath " in *" $arg "*) ;; *) func_append rpath " $arg" ;; esac else case "$xrpath " in *" $arg "*) ;; *) func_append xrpath " $arg" ;; esac fi prev= continue ;; shrext) shrext_cmds=$arg prev= continue ;; weak) func_append weak_libs " $arg" prev= continue ;; xcclinker) func_append linker_flags " $qarg" func_append compiler_flags " $qarg" prev= func_append compile_command " $qarg" func_append finalize_command " $qarg" continue ;; xcompiler) func_append compiler_flags " $qarg" prev= func_append compile_command " $qarg" func_append finalize_command " $qarg" continue ;; xlinker) func_append linker_flags " $qarg" func_append compiler_flags " $wl$qarg" prev= func_append compile_command " $wl$qarg" func_append finalize_command " $wl$qarg" continue ;; *) eval "$prev=\"\$arg\"" prev= continue ;; esac fi # test -n "$prev" prevarg=$arg case $arg in -all-static) if test -n "$link_static_flag"; then # See comment for -static flag below, for more details. func_append compile_command " $link_static_flag" func_append finalize_command " $link_static_flag" fi continue ;; -allow-undefined) # FIXME: remove this flag sometime in the future. func_fatal_error "'-allow-undefined' must not be used because it is the default" ;; -avoid-version) avoid_version=yes continue ;; -bindir) prev=bindir continue ;; -dlopen) prev=dlfiles continue ;; -dlpreopen) prev=dlprefiles continue ;; -export-dynamic) export_dynamic=yes continue ;; -export-symbols | -export-symbols-regex) if test -n "$export_symbols" || test -n "$export_symbols_regex"; then func_fatal_error "more than one -exported-symbols argument is not allowed" fi if test X-export-symbols = "X$arg"; then prev=expsyms else prev=expsyms_regex fi continue ;; -framework) prev=framework continue ;; -inst-prefix-dir) prev=inst_prefix continue ;; # The native IRIX linker understands -LANG:*, -LIST:* and -LNO:* # so, if we see these flags be careful not to treat them like -L -L[A-Z][A-Z]*:*) case $with_gcc/$host in no/*-*-irix* | /*-*-irix*) func_append compile_command " $arg" func_append finalize_command " $arg" ;; esac continue ;; -L*) func_stripname "-L" '' "$arg" if test -z "$func_stripname_result"; then if test "$#" -gt 0; then func_fatal_error "require no space between '-L' and '$1'" else func_fatal_error "need path for '-L' option" fi fi func_resolve_sysroot "$func_stripname_result" dir=$func_resolve_sysroot_result # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) ;; *) absdir=`cd "$dir" && pwd` test -z "$absdir" && \ func_fatal_error "cannot determine absolute directory name of '$dir'" dir=$absdir ;; esac case "$deplibs " in *" -L$dir "* | *" $arg "*) # Will only happen for absolute or sysroot arguments ;; *) # Preserve sysroot, but never include relative directories case $dir in [\\/]* | [A-Za-z]:[\\/]* | =*) func_append deplibs " $arg" ;; *) func_append deplibs " -L$dir" ;; esac func_append lib_search_path " $dir" ;; esac case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) testbindir=`$ECHO "$dir" | $SED 's*/lib$*/bin*'` case :$dllsearchpath: in *":$dir:"*) ;; ::) dllsearchpath=$dir;; *) func_append dllsearchpath ":$dir";; esac case :$dllsearchpath: in *":$testbindir:"*) ;; ::) dllsearchpath=$testbindir;; *) func_append dllsearchpath ":$testbindir";; esac ;; esac continue ;; -l*) if test X-lc = "X$arg" || test X-lm = "X$arg"; then case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-beos* | *-cegcc* | *-*-haiku*) # These systems don't actually have a C or math library (as such) continue ;; *-*-os2*) # These systems don't actually have a C library (as such) test X-lc = "X$arg" && continue ;; *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*) # Do not include libc due to us having libc/libc_r. test X-lc = "X$arg" && continue ;; *-*-rhapsody* | *-*-darwin1.[012]) # Rhapsody C and math libraries are in the System framework func_append deplibs " System.ltframework" continue ;; *-*-sco3.2v5* | *-*-sco5v6*) # Causes problems with __ctype test X-lc = "X$arg" && continue ;; *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) # Compiler inserts libc in the correct place for threads to work test X-lc = "X$arg" && continue ;; esac elif test X-lc_r = "X$arg"; then case $host in *-*-openbsd* | *-*-freebsd* | *-*-dragonfly* | *-*-bitrig*) # Do not include libc_r directly, use -pthread flag. continue ;; esac fi func_append deplibs " $arg" continue ;; -mllvm) prev=mllvm continue ;; -module) module=yes continue ;; # Tru64 UNIX uses -model [arg] to determine the layout of C++ # classes, name mangling, and exception handling. # Darwin uses the -arch flag to determine output architecture. -model|-arch|-isysroot|--sysroot) func_append compiler_flags " $arg" func_append compile_command " $arg" func_append finalize_command " $arg" prev=xcompiler continue ;; -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \ |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*) func_append compiler_flags " $arg" func_append compile_command " $arg" func_append finalize_command " $arg" case "$new_inherited_linker_flags " in *" $arg "*) ;; * ) func_append new_inherited_linker_flags " $arg" ;; esac continue ;; -multi_module) single_module=$wl-multi_module continue ;; -no-fast-install) fast_install=no continue ;; -no-install) case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-darwin* | *-cegcc*) # The PATH hackery in wrapper scripts is required on Windows # and Darwin in order for the loader to find any dlls it needs. func_warning "'-no-install' is ignored for $host" func_warning "assuming '-no-fast-install' instead" fast_install=no ;; *) no_install=yes ;; esac continue ;; -no-undefined) allow_undefined=no continue ;; -objectlist) prev=objectlist continue ;; -os2dllname) prev=os2dllname continue ;; -o) prev=output ;; -precious-files-regex) prev=precious_regex continue ;; -release) prev=release continue ;; -rpath) prev=rpath continue ;; -R) prev=xrpath continue ;; -R*) func_stripname '-R' '' "$arg" dir=$func_stripname_result # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) ;; =*) func_stripname '=' '' "$dir" dir=$lt_sysroot$func_stripname_result ;; *) func_fatal_error "only absolute run-paths are allowed" ;; esac case "$xrpath " in *" $dir "*) ;; *) func_append xrpath " $dir" ;; esac continue ;; -shared) # The effects of -shared are defined in a previous loop. continue ;; -shrext) prev=shrext continue ;; -static | -static-libtool-libs) # The effects of -static are defined in a previous loop. # We used to do the same as -all-static on platforms that # didn't have a PIC flag, but the assumption that the effects # would be equivalent was wrong. It would break on at least # Digital Unix and AIX. continue ;; -thread-safe) thread_safe=yes continue ;; -version-info) prev=vinfo continue ;; -version-number) prev=vinfo vinfo_number=yes continue ;; -weak) prev=weak continue ;; -Wc,*) func_stripname '-Wc,' '' "$arg" args=$func_stripname_result arg= save_ifs=$IFS; IFS=, for flag in $args; do IFS=$save_ifs func_quote_for_eval "$flag" func_append arg " $func_quote_for_eval_result" func_append compiler_flags " $func_quote_for_eval_result" done IFS=$save_ifs func_stripname ' ' '' "$arg" arg=$func_stripname_result ;; -Wl,*) func_stripname '-Wl,' '' "$arg" args=$func_stripname_result arg= save_ifs=$IFS; IFS=, for flag in $args; do IFS=$save_ifs func_quote_for_eval "$flag" func_append arg " $wl$func_quote_for_eval_result" func_append compiler_flags " $wl$func_quote_for_eval_result" func_append linker_flags " $func_quote_for_eval_result" done IFS=$save_ifs func_stripname ' ' '' "$arg" arg=$func_stripname_result ;; -Xcompiler) prev=xcompiler continue ;; -Xlinker) prev=xlinker continue ;; -XCClinker) prev=xcclinker continue ;; # -msg_* for osf cc -msg_*) func_quote_for_eval "$arg" arg=$func_quote_for_eval_result ;; # Flags to be passed through unchanged, with rationale: # -64, -mips[0-9] enable 64-bit mode for the SGI compiler # -r[0-9][0-9]* specify processor for the SGI compiler # -xarch=*, -xtarget=* enable 64-bit mode for the Sun compiler # +DA*, +DD* enable 64-bit mode for the HP compiler # -q* compiler args for the IBM compiler # -m*, -t[45]*, -txscale* architecture-specific flags for GCC # -F/path path to uninstalled frameworks, gcc on darwin # -p, -pg, --coverage, -fprofile-* profiling flags for GCC # -fstack-protector* stack protector flags for GCC # @file GCC response files # -tp=* Portland pgcc target processor selection # --sysroot=* for sysroot support # -O*, -g*, -flto*, -fwhopr*, -fuse-linker-plugin GCC link-time optimization # -specs=* GCC specs files # -stdlib=* select c++ std lib with clang # -fsanitize=* Clang/GCC memory and address sanitizer -64|-mips[0-9]|-r[0-9][0-9]*|-xarch=*|-xtarget=*|+DA*|+DD*|-q*|-m*| \ -t[45]*|-txscale*|-p|-pg|--coverage|-fprofile-*|-F*|@*|-tp=*|--sysroot=*| \ -O*|-g*|-flto*|-fwhopr*|-fuse-linker-plugin|-fstack-protector*|-stdlib=*| \ -specs=*|-fsanitize=*) func_quote_for_eval "$arg" arg=$func_quote_for_eval_result func_append compile_command " $arg" func_append finalize_command " $arg" func_append compiler_flags " $arg" continue ;; -Z*) if test os2 = "`expr $host : '.*\(os2\)'`"; then # OS/2 uses -Zxxx to specify OS/2-specific options compiler_flags="$compiler_flags $arg" func_append compile_command " $arg" func_append finalize_command " $arg" case $arg in -Zlinker | -Zstack) prev=xcompiler ;; esac continue else # Otherwise treat like 'Some other compiler flag' below func_quote_for_eval "$arg" arg=$func_quote_for_eval_result fi ;; # Some other compiler flag. -* | +*) func_quote_for_eval "$arg" arg=$func_quote_for_eval_result ;; *.$objext) # A standard object. func_append objs " $arg" ;; *.lo) # A libtool-controlled object. # Check to see that this really is a libtool object. if func_lalib_unsafe_p "$arg"; then pic_object= non_pic_object= # Read the .lo file func_source "$arg" if test -z "$pic_object" || test -z "$non_pic_object" || test none = "$pic_object" && test none = "$non_pic_object"; then func_fatal_error "cannot find name of object for '$arg'" fi # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir=$func_dirname_result test none = "$pic_object" || { # Prepend the subdirectory the object is found in. pic_object=$xdir$pic_object if test dlfiles = "$prev"; then if test yes = "$build_libtool_libs" && test yes = "$dlopen_support"; then func_append dlfiles " $pic_object" prev= continue else # If libtool objects are unsupported, then we need to preload. prev=dlprefiles fi fi # CHECK ME: I think I busted this. -Ossama if test dlprefiles = "$prev"; then # Preload the old-style object. func_append dlprefiles " $pic_object" prev= fi # A PIC object. func_append libobjs " $pic_object" arg=$pic_object } # Non-PIC object. if test none != "$non_pic_object"; then # Prepend the subdirectory the object is found in. non_pic_object=$xdir$non_pic_object # A standard non-PIC object func_append non_pic_objects " $non_pic_object" if test -z "$pic_object" || test none = "$pic_object"; then arg=$non_pic_object fi else # If the PIC object exists, use it instead. # $xdir was prepended to $pic_object above. non_pic_object=$pic_object func_append non_pic_objects " $non_pic_object" fi else # Only an error if not doing a dry-run. if $opt_dry_run; then # Extract subdirectory from the argument. func_dirname "$arg" "/" "" xdir=$func_dirname_result func_lo2o "$arg" pic_object=$xdir$objdir/$func_lo2o_result non_pic_object=$xdir$func_lo2o_result func_append libobjs " $pic_object" func_append non_pic_objects " $non_pic_object" else func_fatal_error "'$arg' is not a valid libtool object" fi fi ;; *.$libext) # An archive. func_append deplibs " $arg" func_append old_deplibs " $arg" continue ;; *.la) # A libtool-controlled library. func_resolve_sysroot "$arg" if test dlfiles = "$prev"; then # This library was specified with -dlopen. func_append dlfiles " $func_resolve_sysroot_result" prev= elif test dlprefiles = "$prev"; then # The library was specified with -dlpreopen. func_append dlprefiles " $func_resolve_sysroot_result" prev= else func_append deplibs " $func_resolve_sysroot_result" fi continue ;; # Some other compiler argument. *) # Unknown arguments in both finalize_command and compile_command need # to be aesthetically quoted because they are evaled later. func_quote_for_eval "$arg" arg=$func_quote_for_eval_result ;; esac # arg # Now actually substitute the argument into the commands. if test -n "$arg"; then func_append compile_command " $arg" func_append finalize_command " $arg" fi done # argument parsing loop test -n "$prev" && \ func_fatal_help "the '$prevarg' option requires an argument" if test yes = "$export_dynamic" && test -n "$export_dynamic_flag_spec"; then eval arg=\"$export_dynamic_flag_spec\" func_append compile_command " $arg" func_append finalize_command " $arg" fi oldlibs= # calculate the name of the file, without its directory func_basename "$output" outputname=$func_basename_result libobjs_save=$libobjs if test -n "$shlibpath_var"; then # get the directories listed in $shlibpath_var eval shlib_search_path=\`\$ECHO \"\$$shlibpath_var\" \| \$SED \'s/:/ /g\'\` else shlib_search_path= fi eval sys_lib_search_path=\"$sys_lib_search_path_spec\" eval sys_lib_dlsearch_path=\"$sys_lib_dlsearch_path_spec\" # Definition is injected by LT_CONFIG during libtool generation. func_munge_path_list sys_lib_dlsearch_path "$LT_SYS_LIBRARY_PATH" func_dirname "$output" "/" "" output_objdir=$func_dirname_result$objdir func_to_tool_file "$output_objdir/" tool_output_objdir=$func_to_tool_file_result # Create the object directory. func_mkdir_p "$output_objdir" # Determine the type of output case $output in "") func_fatal_help "you must specify an output file" ;; *.$libext) linkmode=oldlib ;; *.lo | *.$objext) linkmode=obj ;; *.la) linkmode=lib ;; *) linkmode=prog ;; # Anything else should be a program. esac specialdeplibs= libs= # Find all interdependent deplibs by searching for libraries # that are linked more than once (e.g. -la -lb -la) for deplib in $deplibs; do if $opt_preserve_dup_deps; then case "$libs " in *" $deplib "*) func_append specialdeplibs " $deplib" ;; esac fi func_append libs " $deplib" done if test lib = "$linkmode"; then libs="$predeps $libs $compiler_lib_search_path $postdeps" # Compute libraries that are listed more than once in $predeps # $postdeps and mark them as special (i.e., whose duplicates are # not to be eliminated). pre_post_deps= if $opt_duplicate_compiler_generated_deps; then for pre_post_dep in $predeps $postdeps; do case "$pre_post_deps " in *" $pre_post_dep "*) func_append specialdeplibs " $pre_post_deps" ;; esac func_append pre_post_deps " $pre_post_dep" done fi pre_post_deps= fi deplibs= newdependency_libs= newlib_search_path= need_relink=no # whether we're linking any uninstalled libtool libraries notinst_deplibs= # not-installed libtool libraries notinst_path= # paths that contain not-installed libtool libraries case $linkmode in lib) passes="conv dlpreopen link" for file in $dlfiles $dlprefiles; do case $file in *.la) ;; *) func_fatal_help "libraries can '-dlopen' only libtool libraries: $file" ;; esac done ;; prog) compile_deplibs= finalize_deplibs= alldeplibs=false newdlfiles= newdlprefiles= passes="conv scan dlopen dlpreopen link" ;; *) passes="conv" ;; esac for pass in $passes; do # The preopen pass in lib mode reverses $deplibs; put it back here # so that -L comes before libs that need it for instance... if test lib,link = "$linkmode,$pass"; then ## FIXME: Find the place where the list is rebuilt in the wrong ## order, and fix it there properly tmp_deplibs= for deplib in $deplibs; do tmp_deplibs="$deplib $tmp_deplibs" done deplibs=$tmp_deplibs fi if test lib,link = "$linkmode,$pass" || test prog,scan = "$linkmode,$pass"; then libs=$deplibs deplibs= fi if test prog = "$linkmode"; then case $pass in dlopen) libs=$dlfiles ;; dlpreopen) libs=$dlprefiles ;; link) libs="$deplibs %DEPLIBS%" test "X$link_all_deplibs" != Xno && libs="$libs $dependency_libs" ;; esac fi if test lib,dlpreopen = "$linkmode,$pass"; then # Collect and forward deplibs of preopened libtool libs for lib in $dlprefiles; do # Ignore non-libtool-libs dependency_libs= func_resolve_sysroot "$lib" case $lib in *.la) func_source "$func_resolve_sysroot_result" ;; esac # Collect preopened libtool deplibs, except any this library # has declared as weak libs for deplib in $dependency_libs; do func_basename "$deplib" deplib_base=$func_basename_result case " $weak_libs " in *" $deplib_base "*) ;; *) func_append deplibs " $deplib" ;; esac done done libs=$dlprefiles fi if test dlopen = "$pass"; then # Collect dlpreopened libraries save_deplibs=$deplibs deplibs= fi for deplib in $libs; do lib= found=false case $deplib in -mt|-mthreads|-kthread|-Kthread|-pthread|-pthreads|--thread-safe \ |-threads|-fopenmp|-openmp|-mp|-xopenmp|-omp|-qsmp=*) if test prog,link = "$linkmode,$pass"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else func_append compiler_flags " $deplib" if test lib = "$linkmode"; then case "$new_inherited_linker_flags " in *" $deplib "*) ;; * ) func_append new_inherited_linker_flags " $deplib" ;; esac fi fi continue ;; -l*) if test lib != "$linkmode" && test prog != "$linkmode"; then func_warning "'-l' is ignored for archives/objects" continue fi func_stripname '-l' '' "$deplib" name=$func_stripname_result if test lib = "$linkmode"; then searchdirs="$newlib_search_path $lib_search_path $compiler_lib_search_dirs $sys_lib_search_path $shlib_search_path" else searchdirs="$newlib_search_path $lib_search_path $sys_lib_search_path $shlib_search_path" fi for searchdir in $searchdirs; do for search_ext in .la $std_shrext .so .a; do # Search the libtool library lib=$searchdir/lib$name$search_ext if test -f "$lib"; then if test .la = "$search_ext"; then found=: else found=false fi break 2 fi done done if $found; then # deplib is a libtool library # If $allow_libtool_libs_with_static_runtimes && $deplib is a stdlib, # We need to do some special things here, and not later. if test yes = "$allow_libtool_libs_with_static_runtimes"; then case " $predeps $postdeps " in *" $deplib "*) if func_lalib_p "$lib"; then library_names= old_library= func_source "$lib" for l in $old_library $library_names; do ll=$l done if test "X$ll" = "X$old_library"; then # only static version available found=false func_dirname "$lib" "" "." ladir=$func_dirname_result lib=$ladir/$old_library if test prog,link = "$linkmode,$pass"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs" fi continue fi fi ;; *) ;; esac fi else # deplib doesn't seem to be a libtool library if test prog,link = "$linkmode,$pass"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" test lib = "$linkmode" && newdependency_libs="$deplib $newdependency_libs" fi continue fi ;; # -l *.ltframework) if test prog,link = "$linkmode,$pass"; then compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else deplibs="$deplib $deplibs" if test lib = "$linkmode"; then case "$new_inherited_linker_flags " in *" $deplib "*) ;; * ) func_append new_inherited_linker_flags " $deplib" ;; esac fi fi continue ;; -L*) case $linkmode in lib) deplibs="$deplib $deplibs" test conv = "$pass" && continue newdependency_libs="$deplib $newdependency_libs" func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result" func_append newlib_search_path " $func_resolve_sysroot_result" ;; prog) if test conv = "$pass"; then deplibs="$deplib $deplibs" continue fi if test scan = "$pass"; then deplibs="$deplib $deplibs" else compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" fi func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result" func_append newlib_search_path " $func_resolve_sysroot_result" ;; *) func_warning "'-L' is ignored for archives/objects" ;; esac # linkmode continue ;; # -L -R*) if test link = "$pass"; then func_stripname '-R' '' "$deplib" func_resolve_sysroot "$func_stripname_result" dir=$func_resolve_sysroot_result # Make sure the xrpath contains only unique directories. case "$xrpath " in *" $dir "*) ;; *) func_append xrpath " $dir" ;; esac fi deplibs="$deplib $deplibs" continue ;; *.la) func_resolve_sysroot "$deplib" lib=$func_resolve_sysroot_result ;; *.$libext) if test conv = "$pass"; then deplibs="$deplib $deplibs" continue fi case $linkmode in lib) # Linking convenience modules into shared libraries is allowed, # but linking other static libraries is non-portable. case " $dlpreconveniencelibs " in *" $deplib "*) ;; *) valid_a_lib=false case $deplibs_check_method in match_pattern*) set dummy $deplibs_check_method; shift match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` if eval "\$ECHO \"$deplib\"" 2>/dev/null | $SED 10q \ | $EGREP "$match_pattern_regex" > /dev/null; then valid_a_lib=: fi ;; pass_all) valid_a_lib=: ;; esac if $valid_a_lib; then echo $ECHO "*** Warning: Linking the shared library $output against the" $ECHO "*** static library $deplib is not portable!" deplibs="$deplib $deplibs" else echo $ECHO "*** Warning: Trying to link with static lib archive $deplib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have" echo "*** because the file extensions .$libext of this argument makes me believe" echo "*** that it is just a static archive that I should not use here." fi ;; esac continue ;; prog) if test link != "$pass"; then deplibs="$deplib $deplibs" else compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" fi continue ;; esac # linkmode ;; # *.$libext *.lo | *.$objext) if test conv = "$pass"; then deplibs="$deplib $deplibs" elif test prog = "$linkmode"; then if test dlpreopen = "$pass" || test yes != "$dlopen_support" || test no = "$build_libtool_libs"; then # If there is no dlopen support or we're linking statically, # we need to preload. func_append newdlprefiles " $deplib" compile_deplibs="$deplib $compile_deplibs" finalize_deplibs="$deplib $finalize_deplibs" else func_append newdlfiles " $deplib" fi fi continue ;; %DEPLIBS%) alldeplibs=: continue ;; esac # case $deplib $found || test -f "$lib" \ || func_fatal_error "cannot find the library '$lib' or unhandled argument '$deplib'" # Check to see that this really is a libtool archive. func_lalib_unsafe_p "$lib" \ || func_fatal_error "'$lib' is not a valid libtool archive" func_dirname "$lib" "" "." ladir=$func_dirname_result dlname= dlopen= dlpreopen= libdir= library_names= old_library= inherited_linker_flags= # If the library was installed with an old release of libtool, # it will not redefine variables installed, or shouldnotlink installed=yes shouldnotlink=no avoidtemprpath= # Read the .la file func_source "$lib" # Convert "-framework foo" to "foo.ltframework" if test -n "$inherited_linker_flags"; then tmp_inherited_linker_flags=`$ECHO "$inherited_linker_flags" | $SED 's/-framework \([^ $]*\)/\1.ltframework/g'` for tmp_inherited_linker_flag in $tmp_inherited_linker_flags; do case " $new_inherited_linker_flags " in *" $tmp_inherited_linker_flag "*) ;; *) func_append new_inherited_linker_flags " $tmp_inherited_linker_flag";; esac done fi dependency_libs=`$ECHO " $dependency_libs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` if test lib,link = "$linkmode,$pass" || test prog,scan = "$linkmode,$pass" || { test prog != "$linkmode" && test lib != "$linkmode"; }; then test -n "$dlopen" && func_append dlfiles " $dlopen" test -n "$dlpreopen" && func_append dlprefiles " $dlpreopen" fi if test conv = "$pass"; then # Only check for convenience libraries deplibs="$lib $deplibs" if test -z "$libdir"; then if test -z "$old_library"; then func_fatal_error "cannot find name of link library for '$lib'" fi # It is a libtool convenience library, so add in its objects. func_append convenience " $ladir/$objdir/$old_library" func_append old_convenience " $ladir/$objdir/$old_library" tmp_libs= for deplib in $dependency_libs; do deplibs="$deplib $deplibs" if $opt_preserve_dup_deps; then case "$tmp_libs " in *" $deplib "*) func_append specialdeplibs " $deplib" ;; esac fi func_append tmp_libs " $deplib" done elif test prog != "$linkmode" && test lib != "$linkmode"; then func_fatal_error "'$lib' is not a convenience library" fi continue fi # $pass = conv # Get the name of the library we link against. linklib= if test -n "$old_library" && { test yes = "$prefer_static_libs" || test built,no = "$prefer_static_libs,$installed"; }; then linklib=$old_library else for l in $old_library $library_names; do linklib=$l done fi if test -z "$linklib"; then func_fatal_error "cannot find name of link library for '$lib'" fi # This library was specified with -dlopen. if test dlopen = "$pass"; then test -z "$libdir" \ && func_fatal_error "cannot -dlopen a convenience library: '$lib'" if test -z "$dlname" || test yes != "$dlopen_support" || test no = "$build_libtool_libs" then # If there is no dlname, no dlopen support or we're linking # statically, we need to preload. We also need to preload any # dependent libraries so libltdl's deplib preloader doesn't # bomb out in the load deplibs phase. func_append dlprefiles " $lib $dependency_libs" else func_append newdlfiles " $lib" fi continue fi # $pass = dlopen # We need an absolute path. case $ladir in [\\/]* | [A-Za-z]:[\\/]*) abs_ladir=$ladir ;; *) abs_ladir=`cd "$ladir" && pwd` if test -z "$abs_ladir"; then func_warning "cannot determine absolute directory name of '$ladir'" func_warning "passing it literally to the linker, although it might fail" abs_ladir=$ladir fi ;; esac func_basename "$lib" laname=$func_basename_result # Find the relevant object directory and library name. if test yes = "$installed"; then if test ! -f "$lt_sysroot$libdir/$linklib" && test -f "$abs_ladir/$linklib"; then func_warning "library '$lib' was moved." dir=$ladir absdir=$abs_ladir libdir=$abs_ladir else dir=$lt_sysroot$libdir absdir=$lt_sysroot$libdir fi test yes = "$hardcode_automatic" && avoidtemprpath=yes else if test ! -f "$ladir/$objdir/$linklib" && test -f "$abs_ladir/$linklib"; then dir=$ladir absdir=$abs_ladir # Remove this search path later func_append notinst_path " $abs_ladir" else dir=$ladir/$objdir absdir=$abs_ladir/$objdir # Remove this search path later func_append notinst_path " $abs_ladir" fi fi # $installed = yes func_stripname 'lib' '.la' "$laname" name=$func_stripname_result # This library was specified with -dlpreopen. if test dlpreopen = "$pass"; then if test -z "$libdir" && test prog = "$linkmode"; then func_fatal_error "only libraries may -dlpreopen a convenience library: '$lib'" fi case $host in # special handling for platforms with PE-DLLs. *cygwin* | *mingw* | *cegcc* ) # Linker will automatically link against shared library if both # static and shared are present. Therefore, ensure we extract # symbols from the import library if a shared library is present # (otherwise, the dlopen module name will be incorrect). We do # this by putting the import library name into $newdlprefiles. # We recover the dlopen module name by 'saving' the la file # name in a special purpose variable, and (later) extracting the # dlname from the la file. if test -n "$dlname"; then func_tr_sh "$dir/$linklib" eval "libfile_$func_tr_sh_result=\$abs_ladir/\$laname" func_append newdlprefiles " $dir/$linklib" else func_append newdlprefiles " $dir/$old_library" # Keep a list of preopened convenience libraries to check # that they are being used correctly in the link pass. test -z "$libdir" && \ func_append dlpreconveniencelibs " $dir/$old_library" fi ;; * ) # Prefer using a static library (so that no silly _DYNAMIC symbols # are required to link). if test -n "$old_library"; then func_append newdlprefiles " $dir/$old_library" # Keep a list of preopened convenience libraries to check # that they are being used correctly in the link pass. test -z "$libdir" && \ func_append dlpreconveniencelibs " $dir/$old_library" # Otherwise, use the dlname, so that lt_dlopen finds it. elif test -n "$dlname"; then func_append newdlprefiles " $dir/$dlname" else func_append newdlprefiles " $dir/$linklib" fi ;; esac fi # $pass = dlpreopen if test -z "$libdir"; then # Link the convenience library if test lib = "$linkmode"; then deplibs="$dir/$old_library $deplibs" elif test prog,link = "$linkmode,$pass"; then compile_deplibs="$dir/$old_library $compile_deplibs" finalize_deplibs="$dir/$old_library $finalize_deplibs" else deplibs="$lib $deplibs" # used for prog,scan pass fi continue fi if test prog = "$linkmode" && test link != "$pass"; then func_append newlib_search_path " $ladir" deplibs="$lib $deplibs" linkalldeplibs=false if test no != "$link_all_deplibs" || test -z "$library_names" || test no = "$build_libtool_libs"; then linkalldeplibs=: fi tmp_libs= for deplib in $dependency_libs; do case $deplib in -L*) func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result" func_append newlib_search_path " $func_resolve_sysroot_result" ;; esac # Need to link against all dependency_libs? if $linkalldeplibs; then deplibs="$deplib $deplibs" else # Need to hardcode shared library paths # or/and link against static libraries newdependency_libs="$deplib $newdependency_libs" fi if $opt_preserve_dup_deps; then case "$tmp_libs " in *" $deplib "*) func_append specialdeplibs " $deplib" ;; esac fi func_append tmp_libs " $deplib" done # for deplib continue fi # $linkmode = prog... if test prog,link = "$linkmode,$pass"; then if test -n "$library_names" && { { test no = "$prefer_static_libs" || test built,yes = "$prefer_static_libs,$installed"; } || test -z "$old_library"; }; then # We need to hardcode the library path if test -n "$shlibpath_var" && test -z "$avoidtemprpath"; then # Make sure the rpath contains only unique directories. case $temp_rpath: in *"$absdir:"*) ;; *) func_append temp_rpath "$absdir:" ;; esac fi # Hardcode the library path. # Skip directories that are in the system default run-time # search path. case " $sys_lib_dlsearch_path " in *" $absdir "*) ;; *) case "$compile_rpath " in *" $absdir "*) ;; *) func_append compile_rpath " $absdir" ;; esac ;; esac case " $sys_lib_dlsearch_path " in *" $libdir "*) ;; *) case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac ;; esac fi # $linkmode,$pass = prog,link... if $alldeplibs && { test pass_all = "$deplibs_check_method" || { test yes = "$build_libtool_libs" && test -n "$library_names"; }; }; then # We only need to search for static libraries continue fi fi link_static=no # Whether the deplib will be linked statically use_static_libs=$prefer_static_libs if test built = "$use_static_libs" && test yes = "$installed"; then use_static_libs=no fi if test -n "$library_names" && { test no = "$use_static_libs" || test -z "$old_library"; }; then case $host in *cygwin* | *mingw* | *cegcc* | *os2*) # No point in relinking DLLs because paths are not encoded func_append notinst_deplibs " $lib" need_relink=no ;; *) if test no = "$installed"; then func_append notinst_deplibs " $lib" need_relink=yes fi ;; esac # This is a shared library # Warn about portability, can't link against -module's on some # systems (darwin). Don't bleat about dlopened modules though! dlopenmodule= for dlpremoduletest in $dlprefiles; do if test "X$dlpremoduletest" = "X$lib"; then dlopenmodule=$dlpremoduletest break fi done if test -z "$dlopenmodule" && test yes = "$shouldnotlink" && test link = "$pass"; then echo if test prog = "$linkmode"; then $ECHO "*** Warning: Linking the executable $output against the loadable module" else $ECHO "*** Warning: Linking the shared library $output against the loadable module" fi $ECHO "*** $linklib is not portable!" fi if test lib = "$linkmode" && test yes = "$hardcode_into_libs"; then # Hardcode the library path. # Skip directories that are in the system default run-time # search path. case " $sys_lib_dlsearch_path " in *" $absdir "*) ;; *) case "$compile_rpath " in *" $absdir "*) ;; *) func_append compile_rpath " $absdir" ;; esac ;; esac case " $sys_lib_dlsearch_path " in *" $libdir "*) ;; *) case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac ;; esac fi if test -n "$old_archive_from_expsyms_cmds"; then # figure out the soname set dummy $library_names shift realname=$1 shift libname=`eval "\\$ECHO \"$libname_spec\""` # use dlname if we got it. it's perfectly good, no? if test -n "$dlname"; then soname=$dlname elif test -n "$soname_spec"; then # bleh windows case $host in *cygwin* | mingw* | *cegcc* | *os2*) func_arith $current - $age major=$func_arith_result versuffix=-$major ;; esac eval soname=\"$soname_spec\" else soname=$realname fi # Make a new name for the extract_expsyms_cmds to use soroot=$soname func_basename "$soroot" soname=$func_basename_result func_stripname 'lib' '.dll' "$soname" newlib=libimp-$func_stripname_result.a # If the library has no export list, then create one now if test -f "$output_objdir/$soname-def"; then : else func_verbose "extracting exported symbol list from '$soname'" func_execute_cmds "$extract_expsyms_cmds" 'exit $?' fi # Create $newlib if test -f "$output_objdir/$newlib"; then :; else func_verbose "generating import library for '$soname'" func_execute_cmds "$old_archive_from_expsyms_cmds" 'exit $?' fi # make sure the library variables are pointing to the new library dir=$output_objdir linklib=$newlib fi # test -n "$old_archive_from_expsyms_cmds" if test prog = "$linkmode" || test relink != "$opt_mode"; then add_shlibpath= add_dir= add= lib_linked=yes case $hardcode_action in immediate | unsupported) if test no = "$hardcode_direct"; then add=$dir/$linklib case $host in *-*-sco3.2v5.0.[024]*) add_dir=-L$dir ;; *-*-sysv4*uw2*) add_dir=-L$dir ;; *-*-sysv5OpenUNIX* | *-*-sysv5UnixWare7.[01].[10]* | \ *-*-unixware7*) add_dir=-L$dir ;; *-*-darwin* ) # if the lib is a (non-dlopened) module then we cannot # link against it, someone is ignoring the earlier warnings if /usr/bin/file -L $add 2> /dev/null | $GREP ": [^:]* bundle" >/dev/null; then if test "X$dlopenmodule" != "X$lib"; then $ECHO "*** Warning: lib $linklib is a module, not a shared library" if test -z "$old_library"; then echo echo "*** And there doesn't seem to be a static archive available" echo "*** The link will probably fail, sorry" else add=$dir/$old_library fi elif test -n "$old_library"; then add=$dir/$old_library fi fi esac elif test no = "$hardcode_minus_L"; then case $host in *-*-sunos*) add_shlibpath=$dir ;; esac add_dir=-L$dir add=-l$name elif test no = "$hardcode_shlibpath_var"; then add_shlibpath=$dir add=-l$name else lib_linked=no fi ;; relink) if test yes = "$hardcode_direct" && test no = "$hardcode_direct_absolute"; then add=$dir/$linklib elif test yes = "$hardcode_minus_L"; then add_dir=-L$absdir # Try looking first in the location we're being installed to. if test -n "$inst_prefix_dir"; then case $libdir in [\\/]*) func_append add_dir " -L$inst_prefix_dir$libdir" ;; esac fi add=-l$name elif test yes = "$hardcode_shlibpath_var"; then add_shlibpath=$dir add=-l$name else lib_linked=no fi ;; *) lib_linked=no ;; esac if test yes != "$lib_linked"; then func_fatal_configuration "unsupported hardcode properties" fi if test -n "$add_shlibpath"; then case :$compile_shlibpath: in *":$add_shlibpath:"*) ;; *) func_append compile_shlibpath "$add_shlibpath:" ;; esac fi if test prog = "$linkmode"; then test -n "$add_dir" && compile_deplibs="$add_dir $compile_deplibs" test -n "$add" && compile_deplibs="$add $compile_deplibs" else test -n "$add_dir" && deplibs="$add_dir $deplibs" test -n "$add" && deplibs="$add $deplibs" if test yes != "$hardcode_direct" && test yes != "$hardcode_minus_L" && test yes = "$hardcode_shlibpath_var"; then case :$finalize_shlibpath: in *":$libdir:"*) ;; *) func_append finalize_shlibpath "$libdir:" ;; esac fi fi fi if test prog = "$linkmode" || test relink = "$opt_mode"; then add_shlibpath= add_dir= add= # Finalize command for both is simple: just hardcode it. if test yes = "$hardcode_direct" && test no = "$hardcode_direct_absolute"; then add=$libdir/$linklib elif test yes = "$hardcode_minus_L"; then add_dir=-L$libdir add=-l$name elif test yes = "$hardcode_shlibpath_var"; then case :$finalize_shlibpath: in *":$libdir:"*) ;; *) func_append finalize_shlibpath "$libdir:" ;; esac add=-l$name elif test yes = "$hardcode_automatic"; then if test -n "$inst_prefix_dir" && test -f "$inst_prefix_dir$libdir/$linklib"; then add=$inst_prefix_dir$libdir/$linklib else add=$libdir/$linklib fi else # We cannot seem to hardcode it, guess we'll fake it. add_dir=-L$libdir # Try looking first in the location we're being installed to. if test -n "$inst_prefix_dir"; then case $libdir in [\\/]*) func_append add_dir " -L$inst_prefix_dir$libdir" ;; esac fi add=-l$name fi if test prog = "$linkmode"; then test -n "$add_dir" && finalize_deplibs="$add_dir $finalize_deplibs" test -n "$add" && finalize_deplibs="$add $finalize_deplibs" else test -n "$add_dir" && deplibs="$add_dir $deplibs" test -n "$add" && deplibs="$add $deplibs" fi fi elif test prog = "$linkmode"; then # Here we assume that one of hardcode_direct or hardcode_minus_L # is not unsupported. This is valid on all known static and # shared platforms. if test unsupported != "$hardcode_direct"; then test -n "$old_library" && linklib=$old_library compile_deplibs="$dir/$linklib $compile_deplibs" finalize_deplibs="$dir/$linklib $finalize_deplibs" else compile_deplibs="-l$name -L$dir $compile_deplibs" finalize_deplibs="-l$name -L$dir $finalize_deplibs" fi elif test yes = "$build_libtool_libs"; then # Not a shared library if test pass_all != "$deplibs_check_method"; then # We're trying link a shared library against a static one # but the system doesn't support it. # Just print a warning and add the library to dependency_libs so # that the program can be linked against the static library. echo $ECHO "*** Warning: This system cannot link to static lib archive $lib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have." if test yes = "$module"; then echo "*** But as you try to build a module library, libtool will still create " echo "*** a static module, that should work as long as the dlopening application" echo "*** is linked with the -dlopen flag to resolve symbols at runtime." if test -z "$global_symbol_pipe"; then echo echo "*** However, this would only work if libtool was able to extract symbol" echo "*** lists from a program, using 'nm' or equivalent, but libtool could" echo "*** not find such a program. So, this module is probably useless." echo "*** 'nm' from GNU binutils and a full rebuild may help." fi if test no = "$build_old_libs"; then build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi fi else deplibs="$dir/$old_library $deplibs" link_static=yes fi fi # link shared/static library? if test lib = "$linkmode"; then if test -n "$dependency_libs" && { test yes != "$hardcode_into_libs" || test yes = "$build_old_libs" || test yes = "$link_static"; }; then # Extract -R from dependency_libs temp_deplibs= for libdir in $dependency_libs; do case $libdir in -R*) func_stripname '-R' '' "$libdir" temp_xrpath=$func_stripname_result case " $xrpath " in *" $temp_xrpath "*) ;; *) func_append xrpath " $temp_xrpath";; esac;; *) func_append temp_deplibs " $libdir";; esac done dependency_libs=$temp_deplibs fi func_append newlib_search_path " $absdir" # Link against this library test no = "$link_static" && newdependency_libs="$abs_ladir/$laname $newdependency_libs" # ... and its dependency_libs tmp_libs= for deplib in $dependency_libs; do newdependency_libs="$deplib $newdependency_libs" case $deplib in -L*) func_stripname '-L' '' "$deplib" func_resolve_sysroot "$func_stripname_result";; *) func_resolve_sysroot "$deplib" ;; esac if $opt_preserve_dup_deps; then case "$tmp_libs " in *" $func_resolve_sysroot_result "*) func_append specialdeplibs " $func_resolve_sysroot_result" ;; esac fi func_append tmp_libs " $func_resolve_sysroot_result" done if test no != "$link_all_deplibs"; then # Add the search paths of all dependency libraries for deplib in $dependency_libs; do path= case $deplib in -L*) path=$deplib ;; *.la) func_resolve_sysroot "$deplib" deplib=$func_resolve_sysroot_result func_dirname "$deplib" "" "." dir=$func_dirname_result # We need an absolute path. case $dir in [\\/]* | [A-Za-z]:[\\/]*) absdir=$dir ;; *) absdir=`cd "$dir" && pwd` if test -z "$absdir"; then func_warning "cannot determine absolute directory name of '$dir'" absdir=$dir fi ;; esac if $GREP "^installed=no" $deplib > /dev/null; then case $host in *-*-darwin*) depdepl= eval deplibrary_names=`$SED -n -e 's/^library_names=\(.*\)$/\1/p' $deplib` if test -n "$deplibrary_names"; then for tmp in $deplibrary_names; do depdepl=$tmp done if test -f "$absdir/$objdir/$depdepl"; then depdepl=$absdir/$objdir/$depdepl darwin_install_name=`$OTOOL -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` if test -z "$darwin_install_name"; then darwin_install_name=`$OTOOL64 -L $depdepl | awk '{if (NR == 2) {print $1;exit}}'` fi func_append compiler_flags " $wl-dylib_file $wl$darwin_install_name:$depdepl" func_append linker_flags " -dylib_file $darwin_install_name:$depdepl" path= fi fi ;; *) path=-L$absdir/$objdir ;; esac else eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $deplib` test -z "$libdir" && \ func_fatal_error "'$deplib' is not a valid libtool archive" test "$absdir" != "$libdir" && \ func_warning "'$deplib' seems to be moved" path=-L$absdir fi ;; esac case " $deplibs " in *" $path "*) ;; *) deplibs="$path $deplibs" ;; esac done fi # link_all_deplibs != no fi # linkmode = lib done # for deplib in $libs if test link = "$pass"; then if test prog = "$linkmode"; then compile_deplibs="$new_inherited_linker_flags $compile_deplibs" finalize_deplibs="$new_inherited_linker_flags $finalize_deplibs" else compiler_flags="$compiler_flags "`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` fi fi dependency_libs=$newdependency_libs if test dlpreopen = "$pass"; then # Link the dlpreopened libraries before other libraries for deplib in $save_deplibs; do deplibs="$deplib $deplibs" done fi if test dlopen != "$pass"; then test conv = "$pass" || { # Make sure lib_search_path contains only unique directories. lib_search_path= for dir in $newlib_search_path; do case "$lib_search_path " in *" $dir "*) ;; *) func_append lib_search_path " $dir" ;; esac done newlib_search_path= } if test prog,link = "$linkmode,$pass"; then vars="compile_deplibs finalize_deplibs" else vars=deplibs fi for var in $vars dependency_libs; do # Add libraries to $var in reverse order eval tmp_libs=\"\$$var\" new_libs= for deplib in $tmp_libs; do # FIXME: Pedantically, this is the right thing to do, so # that some nasty dependency loop isn't accidentally # broken: #new_libs="$deplib $new_libs" # Pragmatically, this seems to cause very few problems in # practice: case $deplib in -L*) new_libs="$deplib $new_libs" ;; -R*) ;; *) # And here is the reason: when a library appears more # than once as an explicit dependence of a library, or # is implicitly linked in more than once by the # compiler, it is considered special, and multiple # occurrences thereof are not removed. Compare this # with having the same library being listed as a # dependency of multiple other libraries: in this case, # we know (pedantically, we assume) the library does not # need to be listed more than once, so we keep only the # last copy. This is not always right, but it is rare # enough that we require users that really mean to play # such unportable linking tricks to link the library # using -Wl,-lname, so that libtool does not consider it # for duplicate removal. case " $specialdeplibs " in *" $deplib "*) new_libs="$deplib $new_libs" ;; *) case " $new_libs " in *" $deplib "*) ;; *) new_libs="$deplib $new_libs" ;; esac ;; esac ;; esac done tmp_libs= for deplib in $new_libs; do case $deplib in -L*) case " $tmp_libs " in *" $deplib "*) ;; *) func_append tmp_libs " $deplib" ;; esac ;; *) func_append tmp_libs " $deplib" ;; esac done eval $var=\"$tmp_libs\" done # for var fi # Add Sun CC postdeps if required: test CXX = "$tagname" && { case $host_os in linux*) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 func_suncc_cstd_abi if test no != "$suncc_use_cstd_abi"; then func_append postdeps ' -library=Cstd -library=Crun' fi ;; esac ;; solaris*) func_cc_basename "$CC" case $func_cc_basename_result in CC* | sunCC*) func_suncc_cstd_abi if test no != "$suncc_use_cstd_abi"; then func_append postdeps ' -library=Cstd -library=Crun' fi ;; esac ;; esac } # Last step: remove runtime libs from dependency_libs # (they stay in deplibs) tmp_libs= for i in $dependency_libs; do case " $predeps $postdeps $compiler_lib_search_path " in *" $i "*) i= ;; esac if test -n "$i"; then func_append tmp_libs " $i" fi done dependency_libs=$tmp_libs done # for pass if test prog = "$linkmode"; then dlfiles=$newdlfiles fi if test prog = "$linkmode" || test lib = "$linkmode"; then dlprefiles=$newdlprefiles fi case $linkmode in oldlib) if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then func_warning "'-dlopen' is ignored for archives" fi case " $deplibs" in *\ -l* | *\ -L*) func_warning "'-l' and '-L' are ignored for archives" ;; esac test -n "$rpath" && \ func_warning "'-rpath' is ignored for archives" test -n "$xrpath" && \ func_warning "'-R' is ignored for archives" test -n "$vinfo" && \ func_warning "'-version-info/-version-number' is ignored for archives" test -n "$release" && \ func_warning "'-release' is ignored for archives" test -n "$export_symbols$export_symbols_regex" && \ func_warning "'-export-symbols' is ignored for archives" # Now set the variables for building old libraries. build_libtool_libs=no oldlibs=$output func_append objs "$old_deplibs" ;; lib) # Make sure we only generate libraries of the form 'libNAME.la'. case $outputname in lib*) func_stripname 'lib' '.la' "$outputname" name=$func_stripname_result eval shared_ext=\"$shrext_cmds\" eval libname=\"$libname_spec\" ;; *) test no = "$module" \ && func_fatal_help "libtool library '$output' must begin with 'lib'" if test no != "$need_lib_prefix"; then # Add the "lib" prefix for modules if required func_stripname '' '.la' "$outputname" name=$func_stripname_result eval shared_ext=\"$shrext_cmds\" eval libname=\"$libname_spec\" else func_stripname '' '.la' "$outputname" libname=$func_stripname_result fi ;; esac if test -n "$objs"; then if test pass_all != "$deplibs_check_method"; then func_fatal_error "cannot build libtool library '$output' from non-libtool objects on this host:$objs" else echo $ECHO "*** Warning: Linking the shared library $output against the non-libtool" $ECHO "*** objects $objs is not portable!" func_append libobjs " $objs" fi fi test no = "$dlself" \ || func_warning "'-dlopen self' is ignored for libtool libraries" set dummy $rpath shift test 1 -lt "$#" \ && func_warning "ignoring multiple '-rpath's for a libtool library" install_libdir=$1 oldlibs= if test -z "$rpath"; then if test yes = "$build_libtool_libs"; then # Building a libtool convenience library. # Some compilers have problems with a '.al' extension so # convenience libraries should have the same extension an # archive normally would. oldlibs="$output_objdir/$libname.$libext $oldlibs" build_libtool_libs=convenience build_old_libs=yes fi test -n "$vinfo" && \ func_warning "'-version-info/-version-number' is ignored for convenience libraries" test -n "$release" && \ func_warning "'-release' is ignored for convenience libraries" else # Parse the version information argument. save_ifs=$IFS; IFS=: set dummy $vinfo 0 0 0 shift IFS=$save_ifs test -n "$7" && \ func_fatal_help "too many parameters to '-version-info'" # convert absolute version numbers to libtool ages # this retains compatibility with .la files and attempts # to make the code below a bit more comprehensible case $vinfo_number in yes) number_major=$1 number_minor=$2 number_revision=$3 # # There are really only two kinds -- those that # use the current revision as the major version # and those that subtract age and use age as # a minor version. But, then there is irix # that has an extra 1 added just for fun # case $version_type in # correct linux to gnu/linux during the next big refactor darwin|freebsd-elf|linux|osf|windows|none) func_arith $number_major + $number_minor current=$func_arith_result age=$number_minor revision=$number_revision ;; freebsd-aout|qnx|sunos) current=$number_major revision=$number_minor age=0 ;; irix|nonstopux) func_arith $number_major + $number_minor current=$func_arith_result age=$number_minor revision=$number_minor lt_irix_increment=no ;; *) func_fatal_configuration "$modename: unknown library version type '$version_type'" ;; esac ;; no) current=$1 revision=$2 age=$3 ;; esac # Check that each of the things are valid numbers. case $current in 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; *) func_error "CURRENT '$current' must be a nonnegative integer" func_fatal_error "'$vinfo' is not valid version information" ;; esac case $revision in 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; *) func_error "REVISION '$revision' must be a nonnegative integer" func_fatal_error "'$vinfo' is not valid version information" ;; esac case $age in 0|[1-9]|[1-9][0-9]|[1-9][0-9][0-9]|[1-9][0-9][0-9][0-9]|[1-9][0-9][0-9][0-9][0-9]) ;; *) func_error "AGE '$age' must be a nonnegative integer" func_fatal_error "'$vinfo' is not valid version information" ;; esac if test "$age" -gt "$current"; then func_error "AGE '$age' is greater than the current interface number '$current'" func_fatal_error "'$vinfo' is not valid version information" fi # Calculate the version variables. major= versuffix= verstring= case $version_type in none) ;; darwin) # Like Linux, but with the current version available in # verstring for coding it into the library header func_arith $current - $age major=.$func_arith_result versuffix=$major.$age.$revision # Darwin ld doesn't like 0 for these options... func_arith $current + 1 minor_current=$func_arith_result xlcverstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" # On Darwin other compilers case $CC in nagfor*) verstring="$wl-compatibility_version $wl$minor_current $wl-current_version $wl$minor_current.$revision" ;; *) verstring="-compatibility_version $minor_current -current_version $minor_current.$revision" ;; esac ;; freebsd-aout) major=.$current versuffix=.$current.$revision ;; freebsd-elf) func_arith $current - $age major=.$func_arith_result versuffix=$major.$age.$revision ;; irix | nonstopux) if test no = "$lt_irix_increment"; then func_arith $current - $age else func_arith $current - $age + 1 fi major=$func_arith_result case $version_type in nonstopux) verstring_prefix=nonstopux ;; *) verstring_prefix=sgi ;; esac verstring=$verstring_prefix$major.$revision # Add in all the interfaces that we are compatible with. loop=$revision while test 0 -ne "$loop"; do func_arith $revision - $loop iface=$func_arith_result func_arith $loop - 1 loop=$func_arith_result verstring=$verstring_prefix$major.$iface:$verstring done # Before this point, $major must not contain '.'. major=.$major versuffix=$major.$revision ;; linux) # correct to gnu/linux during the next big refactor func_arith $current - $age major=.$func_arith_result versuffix=$major.$age.$revision ;; osf) func_arith $current - $age major=.$func_arith_result versuffix=.$current.$age.$revision verstring=$current.$age.$revision # Add in all the interfaces that we are compatible with. loop=$age while test 0 -ne "$loop"; do func_arith $current - $loop iface=$func_arith_result func_arith $loop - 1 loop=$func_arith_result verstring=$verstring:$iface.0 done # Make executables depend on our current version. func_append verstring ":$current.0" ;; qnx) major=.$current versuffix=.$current ;; sco) major=.$current versuffix=.$current ;; sunos) major=.$current versuffix=.$current.$revision ;; windows) # Use '-' rather than '.', since we only want one # extension on DOS 8.3 file systems. func_arith $current - $age major=$func_arith_result versuffix=-$major ;; *) func_fatal_configuration "unknown library version type '$version_type'" ;; esac # Clear the version info if we defaulted, and they specified a release. if test -z "$vinfo" && test -n "$release"; then major= case $version_type in darwin) # we can't check for "0.0" in archive_cmds due to quoting # problems, so we reset it completely verstring= ;; *) verstring=0.0 ;; esac if test no = "$need_version"; then versuffix= else versuffix=.0.0 fi fi # Remove version info from name if versioning should be avoided if test yes,no = "$avoid_version,$need_version"; then major= versuffix= verstring= fi # Check to see if the archive will have undefined symbols. if test yes = "$allow_undefined"; then if test unsupported = "$allow_undefined_flag"; then if test yes = "$build_old_libs"; then func_warning "undefined symbols not allowed in $host shared libraries; building static only" build_libtool_libs=no else func_fatal_error "can't build $host shared library unless -no-undefined is specified" fi fi else # Don't allow undefined symbols. allow_undefined_flag=$no_undefined_flag fi fi func_generate_dlsyms "$libname" "$libname" : func_append libobjs " $symfileobj" test " " = "$libobjs" && libobjs= if test relink != "$opt_mode"; then # Remove our outputs, but don't remove object files since they # may have been created when compiling PIC objects. removelist= tempremovelist=`$ECHO "$output_objdir/*"` for p in $tempremovelist; do case $p in *.$objext | *.gcno) ;; $output_objdir/$outputname | $output_objdir/$libname.* | $output_objdir/$libname$release.*) if test -n "$precious_files_regex"; then if $ECHO "$p" | $EGREP -e "$precious_files_regex" >/dev/null 2>&1 then continue fi fi func_append removelist " $p" ;; *) ;; esac done test -n "$removelist" && \ func_show_eval "${RM}r \$removelist" fi # Now set the variables for building old libraries. if test yes = "$build_old_libs" && test convenience != "$build_libtool_libs"; then func_append oldlibs " $output_objdir/$libname.$libext" # Transform .lo files to .o files. oldobjs="$objs "`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; $lo2o" | $NL2SP` fi # Eliminate all temporary directories. #for path in $notinst_path; do # lib_search_path=`$ECHO "$lib_search_path " | $SED "s% $path % %g"` # deplibs=`$ECHO "$deplibs " | $SED "s% -L$path % %g"` # dependency_libs=`$ECHO "$dependency_libs " | $SED "s% -L$path % %g"` #done if test -n "$xrpath"; then # If the user specified any rpath flags, then add them. temp_xrpath= for libdir in $xrpath; do func_replace_sysroot "$libdir" func_append temp_xrpath " -R$func_replace_sysroot_result" case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac done if test yes != "$hardcode_into_libs" || test yes = "$build_old_libs"; then dependency_libs="$temp_xrpath $dependency_libs" fi fi # Make sure dlfiles contains only unique files that won't be dlpreopened old_dlfiles=$dlfiles dlfiles= for lib in $old_dlfiles; do case " $dlprefiles $dlfiles " in *" $lib "*) ;; *) func_append dlfiles " $lib" ;; esac done # Make sure dlprefiles contains only unique files old_dlprefiles=$dlprefiles dlprefiles= for lib in $old_dlprefiles; do case "$dlprefiles " in *" $lib "*) ;; *) func_append dlprefiles " $lib" ;; esac done if test yes = "$build_libtool_libs"; then if test -n "$rpath"; then case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-*-beos* | *-cegcc* | *-*-haiku*) # these systems don't actually have a c library (as such)! ;; *-*-rhapsody* | *-*-darwin1.[012]) # Rhapsody C library is in the System framework func_append deplibs " System.ltframework" ;; *-*-netbsd*) # Don't link with libc until the a.out ld.so is fixed. ;; *-*-openbsd* | *-*-freebsd* | *-*-dragonfly*) # Do not include libc due to us having libc/libc_r. ;; *-*-sco3.2v5* | *-*-sco5v6*) # Causes problems with __ctype ;; *-*-sysv4.2uw2* | *-*-sysv5* | *-*-unixware* | *-*-OpenUNIX*) # Compiler inserts libc in the correct place for threads to work ;; *) # Add libc to deplibs on all other systems if necessary. if test yes = "$build_libtool_need_lc"; then func_append deplibs " -lc" fi ;; esac fi # Transform deplibs into only deplibs that can be linked in shared. name_save=$name libname_save=$libname release_save=$release versuffix_save=$versuffix major_save=$major # I'm not sure if I'm treating the release correctly. I think # release should show up in the -l (ie -lgmp5) so we don't want to # add it in twice. Is that correct? release= versuffix= major= newdeplibs= droppeddeps=no case $deplibs_check_method in pass_all) # Don't check for shared/static. Everything works. # This might be a little naive. We might want to check # whether the library exists or not. But this is on # osf3 & osf4 and I'm not really sure... Just # implementing what was already the behavior. newdeplibs=$deplibs ;; test_compile) # This code stresses the "libraries are programs" paradigm to its # limits. Maybe even breaks it. We compile a program, linking it # against the deplibs as a proxy for the library. Then we can check # whether they linked in statically or dynamically with ldd. $opt_dry_run || $RM conftest.c cat > conftest.c </dev/null` $nocaseglob else potential_libs=`ls $i/$libnameglob[.-]* 2>/dev/null` fi for potent_lib in $potential_libs; do # Follow soft links. if ls -lLd "$potent_lib" 2>/dev/null | $GREP " -> " >/dev/null; then continue fi # The statement above tries to avoid entering an # endless loop below, in case of cyclic links. # We might still enter an endless loop, since a link # loop can be closed while we follow links, # but so what? potlib=$potent_lib while test -h "$potlib" 2>/dev/null; do potliblink=`ls -ld $potlib | $SED 's/.* -> //'` case $potliblink in [\\/]* | [A-Za-z]:[\\/]*) potlib=$potliblink;; *) potlib=`$ECHO "$potlib" | $SED 's|[^/]*$||'`"$potliblink";; esac done if eval $file_magic_cmd \"\$potlib\" 2>/dev/null | $SED -e 10q | $EGREP "$file_magic_regex" > /dev/null; then func_append newdeplibs " $a_deplib" a_deplib= break 2 fi done done fi if test -n "$a_deplib"; then droppeddeps=yes echo $ECHO "*** Warning: linker path does not have real file for library $a_deplib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have" echo "*** because I did check the linker path looking for a file starting" if test -z "$potlib"; then $ECHO "*** with $libname but no candidates were found. (...for file magic test)" else $ECHO "*** with $libname and none of the candidates passed a file format test" $ECHO "*** using a file magic. Last file checked: $potlib" fi fi ;; *) # Add a -L argument. func_append newdeplibs " $a_deplib" ;; esac done # Gone through all deplibs. ;; match_pattern*) set dummy $deplibs_check_method; shift match_pattern_regex=`expr "$deplibs_check_method" : "$1 \(.*\)"` for a_deplib in $deplibs; do case $a_deplib in -l*) func_stripname -l '' "$a_deplib" name=$func_stripname_result if test yes = "$allow_libtool_libs_with_static_runtimes"; then case " $predeps $postdeps " in *" $a_deplib "*) func_append newdeplibs " $a_deplib" a_deplib= ;; esac fi if test -n "$a_deplib"; then libname=`eval "\\$ECHO \"$libname_spec\""` for i in $lib_search_path $sys_lib_search_path $shlib_search_path; do potential_libs=`ls $i/$libname[.-]* 2>/dev/null` for potent_lib in $potential_libs; do potlib=$potent_lib # see symlink-check above in file_magic test if eval "\$ECHO \"$potent_lib\"" 2>/dev/null | $SED 10q | \ $EGREP "$match_pattern_regex" > /dev/null; then func_append newdeplibs " $a_deplib" a_deplib= break 2 fi done done fi if test -n "$a_deplib"; then droppeddeps=yes echo $ECHO "*** Warning: linker path does not have real file for library $a_deplib." echo "*** I have the capability to make that library automatically link in when" echo "*** you link to this library. But I can only do this if you have a" echo "*** shared version of the library, which you do not appear to have" echo "*** because I did check the linker path looking for a file starting" if test -z "$potlib"; then $ECHO "*** with $libname but no candidates were found. (...for regex pattern test)" else $ECHO "*** with $libname and none of the candidates passed a file format test" $ECHO "*** using a regex pattern. Last file checked: $potlib" fi fi ;; *) # Add a -L argument. func_append newdeplibs " $a_deplib" ;; esac done # Gone through all deplibs. ;; none | unknown | *) newdeplibs= tmp_deplibs=`$ECHO " $deplibs" | $SED 's/ -lc$//; s/ -[LR][^ ]*//g'` if test yes = "$allow_libtool_libs_with_static_runtimes"; then for i in $predeps $postdeps; do # can't use Xsed below, because $i might contain '/' tmp_deplibs=`$ECHO " $tmp_deplibs" | $SED "s|$i||"` done fi case $tmp_deplibs in *[!\ \ ]*) echo if test none = "$deplibs_check_method"; then echo "*** Warning: inter-library dependencies are not supported in this platform." else echo "*** Warning: inter-library dependencies are not known to be supported." fi echo "*** All declared inter-library dependencies are being dropped." droppeddeps=yes ;; esac ;; esac versuffix=$versuffix_save major=$major_save release=$release_save libname=$libname_save name=$name_save case $host in *-*-rhapsody* | *-*-darwin1.[012]) # On Rhapsody replace the C library with the System framework newdeplibs=`$ECHO " $newdeplibs" | $SED 's/ -lc / System.ltframework /'` ;; esac if test yes = "$droppeddeps"; then if test yes = "$module"; then echo echo "*** Warning: libtool could not satisfy all declared inter-library" $ECHO "*** dependencies of module $libname. Therefore, libtool will create" echo "*** a static module, that should work as long as the dlopening" echo "*** application is linked with the -dlopen flag." if test -z "$global_symbol_pipe"; then echo echo "*** However, this would only work if libtool was able to extract symbol" echo "*** lists from a program, using 'nm' or equivalent, but libtool could" echo "*** not find such a program. So, this module is probably useless." echo "*** 'nm' from GNU binutils and a full rebuild may help." fi if test no = "$build_old_libs"; then oldlibs=$output_objdir/$libname.$libext build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi else echo "*** The inter-library dependencies that have been dropped here will be" echo "*** automatically added whenever a program is linked with this library" echo "*** or is declared to -dlopen it." if test no = "$allow_undefined"; then echo echo "*** Since this library must not contain undefined symbols," echo "*** because either the platform does not support them or" echo "*** it was explicitly requested with -no-undefined," echo "*** libtool will only create a static version of it." if test no = "$build_old_libs"; then oldlibs=$output_objdir/$libname.$libext build_libtool_libs=module build_old_libs=yes else build_libtool_libs=no fi fi fi fi # Done checking deplibs! deplibs=$newdeplibs fi # Time to change all our "foo.ltframework" stuff back to "-framework foo" case $host in *-*-darwin*) newdeplibs=`$ECHO " $newdeplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` new_inherited_linker_flags=`$ECHO " $new_inherited_linker_flags" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` deplibs=`$ECHO " $deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ;; esac # move library search paths that coincide with paths to not yet # installed libraries to the beginning of the library search list new_libs= for path in $notinst_path; do case " $new_libs " in *" -L$path/$objdir "*) ;; *) case " $deplibs " in *" -L$path/$objdir "*) func_append new_libs " -L$path/$objdir" ;; esac ;; esac done for deplib in $deplibs; do case $deplib in -L*) case " $new_libs " in *" $deplib "*) ;; *) func_append new_libs " $deplib" ;; esac ;; *) func_append new_libs " $deplib" ;; esac done deplibs=$new_libs # All the library-specific variables (install_libdir is set above). library_names= old_library= dlname= # Test again, we may have decided not to build it any more if test yes = "$build_libtool_libs"; then # Remove $wl instances when linking with ld. # FIXME: should test the right _cmds variable. case $archive_cmds in *\$LD\ *) wl= ;; esac if test yes = "$hardcode_into_libs"; then # Hardcode the library paths hardcode_libdirs= dep_rpath= rpath=$finalize_rpath test relink = "$opt_mode" || rpath=$compile_rpath$rpath for libdir in $rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then func_replace_sysroot "$libdir" libdir=$func_replace_sysroot_result if test -z "$hardcode_libdirs"; then hardcode_libdirs=$libdir else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" func_append dep_rpath " $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; *) func_append perm_rpath " $libdir" ;; esac fi done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir=$hardcode_libdirs eval "dep_rpath=\"$hardcode_libdir_flag_spec\"" fi if test -n "$runpath_var" && test -n "$perm_rpath"; then # We should set the runpath_var. rpath= for dir in $perm_rpath; do func_append rpath "$dir:" done eval "$runpath_var='$rpath\$$runpath_var'; export $runpath_var" fi test -n "$dep_rpath" && deplibs="$dep_rpath $deplibs" fi shlibpath=$finalize_shlibpath test relink = "$opt_mode" || shlibpath=$compile_shlibpath$shlibpath if test -n "$shlibpath"; then eval "$shlibpath_var='$shlibpath\$$shlibpath_var'; export $shlibpath_var" fi # Get the real and link names of the library. eval shared_ext=\"$shrext_cmds\" eval library_names=\"$library_names_spec\" set dummy $library_names shift realname=$1 shift if test -n "$soname_spec"; then eval soname=\"$soname_spec\" else soname=$realname fi if test -z "$dlname"; then dlname=$soname fi lib=$output_objdir/$realname linknames= for link do func_append linknames " $link" done # Use standard objects if they are pic test -z "$pic_flag" && libobjs=`$ECHO "$libobjs" | $SP2NL | $SED "$lo2o" | $NL2SP` test "X$libobjs" = "X " && libobjs= delfiles= if test -n "$export_symbols" && test -n "$include_expsyms"; then $opt_dry_run || cp "$export_symbols" "$output_objdir/$libname.uexp" export_symbols=$output_objdir/$libname.uexp func_append delfiles " $export_symbols" fi orig_export_symbols= case $host_os in cygwin* | mingw* | cegcc*) if test -n "$export_symbols" && test -z "$export_symbols_regex"; then # exporting using user supplied symfile func_dll_def_p "$export_symbols" || { # and it's NOT already a .def file. Must figure out # which of the given symbols are data symbols and tag # them as such. So, trigger use of export_symbols_cmds. # export_symbols gets reassigned inside the "prepare # the list of exported symbols" if statement, so the # include_expsyms logic still works. orig_export_symbols=$export_symbols export_symbols= always_export_symbols=yes } fi ;; esac # Prepare the list of exported symbols if test -z "$export_symbols"; then if test yes = "$always_export_symbols" || test -n "$export_symbols_regex"; then func_verbose "generating symbol list for '$libname.la'" export_symbols=$output_objdir/$libname.exp $opt_dry_run || $RM $export_symbols cmds=$export_symbols_cmds save_ifs=$IFS; IFS='~' for cmd1 in $cmds; do IFS=$save_ifs # Take the normal branch if the nm_file_list_spec branch # doesn't work or if tool conversion is not needed. case $nm_file_list_spec~$to_tool_file_cmd in *~func_convert_file_noop | *~func_convert_file_msys_to_w32 | ~*) try_normal_branch=yes eval cmd=\"$cmd1\" func_len " $cmd" len=$func_len_result ;; *) try_normal_branch=no ;; esac if test yes = "$try_normal_branch" \ && { test "$len" -lt "$max_cmd_len" \ || test "$max_cmd_len" -le -1; } then func_show_eval "$cmd" 'exit $?' skipped_export=false elif test -n "$nm_file_list_spec"; then func_basename "$output" output_la=$func_basename_result save_libobjs=$libobjs save_output=$output output=$output_objdir/$output_la.nm func_to_tool_file "$output" libobjs=$nm_file_list_spec$func_to_tool_file_result func_append delfiles " $output" func_verbose "creating $NM input file list: $output" for obj in $save_libobjs; do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" done > "$output" eval cmd=\"$cmd1\" func_show_eval "$cmd" 'exit $?' output=$save_output libobjs=$save_libobjs skipped_export=false else # The command line is too long to execute in one step. func_verbose "using reloadable object file for export list..." skipped_export=: # Break out early, otherwise skipped_export may be # set to false by a later but shorter cmd. break fi done IFS=$save_ifs if test -n "$export_symbols_regex" && test : != "$skipped_export"; then func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' func_show_eval '$MV "${export_symbols}T" "$export_symbols"' fi fi fi if test -n "$export_symbols" && test -n "$include_expsyms"; then tmp_export_symbols=$export_symbols test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' fi if test : != "$skipped_export" && test -n "$orig_export_symbols"; then # The given exports_symbols file has to be filtered, so filter it. func_verbose "filter symbol list for '$libname.la' to tag DATA exports" # FIXME: $output_objdir/$libname.filter potentially contains lots of # 's' commands, which not all seds can handle. GNU sed should be fine # though. Also, the filter scales superlinearly with the number of # global variables. join(1) would be nice here, but unfortunately # isn't a blessed tool. $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter func_append delfiles " $export_symbols $output_objdir/$libname.filter" export_symbols=$output_objdir/$libname.def $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols fi tmp_deplibs= for test_deplib in $deplibs; do case " $convenience " in *" $test_deplib "*) ;; *) func_append tmp_deplibs " $test_deplib" ;; esac done deplibs=$tmp_deplibs if test -n "$convenience"; then if test -n "$whole_archive_flag_spec" && test yes = "$compiler_needs_object" && test -z "$libobjs"; then # extract the archives, so we have objects to list. # TODO: could optimize this to just extract one archive. whole_archive_flag_spec= fi if test -n "$whole_archive_flag_spec"; then save_libobjs=$libobjs eval libobjs=\"\$libobjs $whole_archive_flag_spec\" test "X$libobjs" = "X " && libobjs= else gentop=$output_objdir/${outputname}x func_append generated " $gentop" func_extract_archives $gentop $convenience func_append libobjs " $func_extract_archives_result" test "X$libobjs" = "X " && libobjs= fi fi if test yes = "$thread_safe" && test -n "$thread_safe_flag_spec"; then eval flag=\"$thread_safe_flag_spec\" func_append linker_flags " $flag" fi # Make a backup of the uninstalled library when relinking if test relink = "$opt_mode"; then $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}U && $MV $realname ${realname}U)' || exit $? fi # Do each of the archive commands. if test yes = "$module" && test -n "$module_cmds"; then if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then eval test_cmds=\"$module_expsym_cmds\" cmds=$module_expsym_cmds else eval test_cmds=\"$module_cmds\" cmds=$module_cmds fi else if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then eval test_cmds=\"$archive_expsym_cmds\" cmds=$archive_expsym_cmds else eval test_cmds=\"$archive_cmds\" cmds=$archive_cmds fi fi if test : != "$skipped_export" && func_len " $test_cmds" && len=$func_len_result && test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then : else # The command line is too long to link in one step, link piecewise # or, if using GNU ld and skipped_export is not :, use a linker # script. # Save the value of $output and $libobjs because we want to # use them later. If we have whole_archive_flag_spec, we # want to use save_libobjs as it was before # whole_archive_flag_spec was expanded, because we can't # assume the linker understands whole_archive_flag_spec. # This may have to be revisited, in case too many # convenience libraries get linked in and end up exceeding # the spec. if test -z "$convenience" || test -z "$whole_archive_flag_spec"; then save_libobjs=$libobjs fi save_output=$output func_basename "$output" output_la=$func_basename_result # Clear the reloadable object creation command queue and # initialize k to one. test_cmds= concat_cmds= objlist= last_robj= k=1 if test -n "$save_libobjs" && test : != "$skipped_export" && test yes = "$with_gnu_ld"; then output=$output_objdir/$output_la.lnkscript func_verbose "creating GNU ld script: $output" echo 'INPUT (' > $output for obj in $save_libobjs do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" >> $output done echo ')' >> $output func_append delfiles " $output" func_to_tool_file "$output" output=$func_to_tool_file_result elif test -n "$save_libobjs" && test : != "$skipped_export" && test -n "$file_list_spec"; then output=$output_objdir/$output_la.lnk func_verbose "creating linker input file list: $output" : > $output set x $save_libobjs shift firstobj= if test yes = "$compiler_needs_object"; then firstobj="$1 " shift fi for obj do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" >> $output done func_append delfiles " $output" func_to_tool_file "$output" output=$firstobj\"$file_list_spec$func_to_tool_file_result\" else if test -n "$save_libobjs"; then func_verbose "creating reloadable object files..." output=$output_objdir/$output_la-$k.$objext eval test_cmds=\"$reload_cmds\" func_len " $test_cmds" len0=$func_len_result len=$len0 # Loop over the list of objects to be linked. for obj in $save_libobjs do func_len " $obj" func_arith $len + $func_len_result len=$func_arith_result if test -z "$objlist" || test "$len" -lt "$max_cmd_len"; then func_append objlist " $obj" else # The command $test_cmds is almost too long, add a # command to the queue. if test 1 -eq "$k"; then # The first file doesn't have a previous command to add. reload_objs=$objlist eval concat_cmds=\"$reload_cmds\" else # All subsequent reloadable object files will link in # the last one created. reload_objs="$objlist $last_robj" eval concat_cmds=\"\$concat_cmds~$reload_cmds~\$RM $last_robj\" fi last_robj=$output_objdir/$output_la-$k.$objext func_arith $k + 1 k=$func_arith_result output=$output_objdir/$output_la-$k.$objext objlist=" $obj" func_len " $last_robj" func_arith $len0 + $func_len_result len=$func_arith_result fi done # Handle the remaining objects by creating one last # reloadable object file. All subsequent reloadable object # files will link in the last one created. test -z "$concat_cmds" || concat_cmds=$concat_cmds~ reload_objs="$objlist $last_robj" eval concat_cmds=\"\$concat_cmds$reload_cmds\" if test -n "$last_robj"; then eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" fi func_append delfiles " $output" else output= fi ${skipped_export-false} && { func_verbose "generating symbol list for '$libname.la'" export_symbols=$output_objdir/$libname.exp $opt_dry_run || $RM $export_symbols libobjs=$output # Append the command to create the export file. test -z "$concat_cmds" || concat_cmds=$concat_cmds~ eval concat_cmds=\"\$concat_cmds$export_symbols_cmds\" if test -n "$last_robj"; then eval concat_cmds=\"\$concat_cmds~\$RM $last_robj\" fi } test -n "$save_libobjs" && func_verbose "creating a temporary reloadable object file: $output" # Loop through the commands generated above and execute them. save_ifs=$IFS; IFS='~' for cmd in $concat_cmds; do IFS=$save_ifs $opt_quiet || { func_quote_for_expand "$cmd" eval "func_echo $func_quote_for_expand_result" } $opt_dry_run || eval "$cmd" || { lt_exit=$? # Restore the uninstalled library and exit if test relink = "$opt_mode"; then ( cd "$output_objdir" && \ $RM "${realname}T" && \ $MV "${realname}U" "$realname" ) fi exit $lt_exit } done IFS=$save_ifs if test -n "$export_symbols_regex" && ${skipped_export-false}; then func_show_eval '$EGREP -e "$export_symbols_regex" "$export_symbols" > "${export_symbols}T"' func_show_eval '$MV "${export_symbols}T" "$export_symbols"' fi fi ${skipped_export-false} && { if test -n "$export_symbols" && test -n "$include_expsyms"; then tmp_export_symbols=$export_symbols test -n "$orig_export_symbols" && tmp_export_symbols=$orig_export_symbols $opt_dry_run || eval '$ECHO "$include_expsyms" | $SP2NL >> "$tmp_export_symbols"' fi if test -n "$orig_export_symbols"; then # The given exports_symbols file has to be filtered, so filter it. func_verbose "filter symbol list for '$libname.la' to tag DATA exports" # FIXME: $output_objdir/$libname.filter potentially contains lots of # 's' commands, which not all seds can handle. GNU sed should be fine # though. Also, the filter scales superlinearly with the number of # global variables. join(1) would be nice here, but unfortunately # isn't a blessed tool. $opt_dry_run || $SED -e '/[ ,]DATA/!d;s,\(.*\)\([ \,].*\),s|^\1$|\1\2|,' < $export_symbols > $output_objdir/$libname.filter func_append delfiles " $export_symbols $output_objdir/$libname.filter" export_symbols=$output_objdir/$libname.def $opt_dry_run || $SED -f $output_objdir/$libname.filter < $orig_export_symbols > $export_symbols fi } libobjs=$output # Restore the value of output. output=$save_output if test -n "$convenience" && test -n "$whole_archive_flag_spec"; then eval libobjs=\"\$libobjs $whole_archive_flag_spec\" test "X$libobjs" = "X " && libobjs= fi # Expand the library linking commands again to reset the # value of $libobjs for piecewise linking. # Do each of the archive commands. if test yes = "$module" && test -n "$module_cmds"; then if test -n "$export_symbols" && test -n "$module_expsym_cmds"; then cmds=$module_expsym_cmds else cmds=$module_cmds fi else if test -n "$export_symbols" && test -n "$archive_expsym_cmds"; then cmds=$archive_expsym_cmds else cmds=$archive_cmds fi fi fi if test -n "$delfiles"; then # Append the command to remove temporary files to $cmds. eval cmds=\"\$cmds~\$RM $delfiles\" fi # Add any objects from preloaded convenience libraries if test -n "$dlprefiles"; then gentop=$output_objdir/${outputname}x func_append generated " $gentop" func_extract_archives $gentop $dlprefiles func_append libobjs " $func_extract_archives_result" test "X$libobjs" = "X " && libobjs= fi save_ifs=$IFS; IFS='~' for cmd in $cmds; do IFS=$sp$nl eval cmd=\"$cmd\" IFS=$save_ifs $opt_quiet || { func_quote_for_expand "$cmd" eval "func_echo $func_quote_for_expand_result" } $opt_dry_run || eval "$cmd" || { lt_exit=$? # Restore the uninstalled library and exit if test relink = "$opt_mode"; then ( cd "$output_objdir" && \ $RM "${realname}T" && \ $MV "${realname}U" "$realname" ) fi exit $lt_exit } done IFS=$save_ifs # Restore the uninstalled library and exit if test relink = "$opt_mode"; then $opt_dry_run || eval '(cd $output_objdir && $RM ${realname}T && $MV $realname ${realname}T && $MV ${realname}U $realname)' || exit $? if test -n "$convenience"; then if test -z "$whole_archive_flag_spec"; then func_show_eval '${RM}r "$gentop"' fi fi exit $EXIT_SUCCESS fi # Create links to the real library. for linkname in $linknames; do if test "$realname" != "$linkname"; then func_show_eval '(cd "$output_objdir" && $RM "$linkname" && $LN_S "$realname" "$linkname")' 'exit $?' fi done # If -module or -export-dynamic was specified, set the dlname. if test yes = "$module" || test yes = "$export_dynamic"; then # On all known operating systems, these are identical. dlname=$soname fi fi ;; obj) if test -n "$dlfiles$dlprefiles" || test no != "$dlself"; then func_warning "'-dlopen' is ignored for objects" fi case " $deplibs" in *\ -l* | *\ -L*) func_warning "'-l' and '-L' are ignored for objects" ;; esac test -n "$rpath" && \ func_warning "'-rpath' is ignored for objects" test -n "$xrpath" && \ func_warning "'-R' is ignored for objects" test -n "$vinfo" && \ func_warning "'-version-info' is ignored for objects" test -n "$release" && \ func_warning "'-release' is ignored for objects" case $output in *.lo) test -n "$objs$old_deplibs" && \ func_fatal_error "cannot build library object '$output' from non-libtool objects" libobj=$output func_lo2o "$libobj" obj=$func_lo2o_result ;; *) libobj= obj=$output ;; esac # Delete the old objects. $opt_dry_run || $RM $obj $libobj # Objects from convenience libraries. This assumes # single-version convenience libraries. Whenever we create # different ones for PIC/non-PIC, this we'll have to duplicate # the extraction. reload_conv_objs= gentop= # if reload_cmds runs $LD directly, get rid of -Wl from # whole_archive_flag_spec and hope we can get by with turning comma # into space. case $reload_cmds in *\$LD[\ \$]*) wl= ;; esac if test -n "$convenience"; then if test -n "$whole_archive_flag_spec"; then eval tmp_whole_archive_flags=\"$whole_archive_flag_spec\" test -n "$wl" || tmp_whole_archive_flags=`$ECHO "$tmp_whole_archive_flags" | $SED 's|,| |g'` reload_conv_objs=$reload_objs\ $tmp_whole_archive_flags else gentop=$output_objdir/${obj}x func_append generated " $gentop" func_extract_archives $gentop $convenience reload_conv_objs="$reload_objs $func_extract_archives_result" fi fi # If we're not building shared, we need to use non_pic_objs test yes = "$build_libtool_libs" || libobjs=$non_pic_objects # Create the old-style object. reload_objs=$objs$old_deplibs' '`$ECHO "$libobjs" | $SP2NL | $SED "/\.$libext$/d; /\.lib$/d; $lo2o" | $NL2SP`' '$reload_conv_objs output=$obj func_execute_cmds "$reload_cmds" 'exit $?' # Exit if we aren't doing a library object file. if test -z "$libobj"; then if test -n "$gentop"; then func_show_eval '${RM}r "$gentop"' fi exit $EXIT_SUCCESS fi test yes = "$build_libtool_libs" || { if test -n "$gentop"; then func_show_eval '${RM}r "$gentop"' fi # Create an invalid libtool object if no PIC, so that we don't # accidentally link it into a program. # $show "echo timestamp > $libobj" # $opt_dry_run || eval "echo timestamp > $libobj" || exit $? exit $EXIT_SUCCESS } if test -n "$pic_flag" || test default != "$pic_mode"; then # Only do commands if we really have different PIC objects. reload_objs="$libobjs $reload_conv_objs" output=$libobj func_execute_cmds "$reload_cmds" 'exit $?' fi if test -n "$gentop"; then func_show_eval '${RM}r "$gentop"' fi exit $EXIT_SUCCESS ;; prog) case $host in *cygwin*) func_stripname '' '.exe' "$output" output=$func_stripname_result.exe;; esac test -n "$vinfo" && \ func_warning "'-version-info' is ignored for programs" test -n "$release" && \ func_warning "'-release' is ignored for programs" $preload \ && test unknown,unknown,unknown = "$dlopen_support,$dlopen_self,$dlopen_self_static" \ && func_warning "'LT_INIT([dlopen])' not used. Assuming no dlopen support." case $host in *-*-rhapsody* | *-*-darwin1.[012]) # On Rhapsody replace the C library is the System framework compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's/ -lc / System.ltframework /'` finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's/ -lc / System.ltframework /'` ;; esac case $host in *-*-darwin*) # Don't allow lazy linking, it breaks C++ global constructors # But is supposedly fixed on 10.4 or later (yay!). if test CXX = "$tagname"; then case ${MACOSX_DEPLOYMENT_TARGET-10.0} in 10.[0123]) func_append compile_command " $wl-bind_at_load" func_append finalize_command " $wl-bind_at_load" ;; esac fi # Time to change all our "foo.ltframework" stuff back to "-framework foo" compile_deplibs=`$ECHO " $compile_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` finalize_deplibs=`$ECHO " $finalize_deplibs" | $SED 's% \([^ $]*\).ltframework% -framework \1%g'` ;; esac # move library search paths that coincide with paths to not yet # installed libraries to the beginning of the library search list new_libs= for path in $notinst_path; do case " $new_libs " in *" -L$path/$objdir "*) ;; *) case " $compile_deplibs " in *" -L$path/$objdir "*) func_append new_libs " -L$path/$objdir" ;; esac ;; esac done for deplib in $compile_deplibs; do case $deplib in -L*) case " $new_libs " in *" $deplib "*) ;; *) func_append new_libs " $deplib" ;; esac ;; *) func_append new_libs " $deplib" ;; esac done compile_deplibs=$new_libs func_append compile_command " $compile_deplibs" func_append finalize_command " $finalize_deplibs" if test -n "$rpath$xrpath"; then # If the user specified any rpath flags, then add them. for libdir in $rpath $xrpath; do # This is the magic to use -rpath. case "$finalize_rpath " in *" $libdir "*) ;; *) func_append finalize_rpath " $libdir" ;; esac done fi # Now hardcode the library paths rpath= hardcode_libdirs= for libdir in $compile_rpath $finalize_rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then if test -z "$hardcode_libdirs"; then hardcode_libdirs=$libdir else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" func_append rpath " $flag" fi elif test -n "$runpath_var"; then case "$perm_rpath " in *" $libdir "*) ;; *) func_append perm_rpath " $libdir" ;; esac fi case $host in *-*-cygwin* | *-*-mingw* | *-*-pw32* | *-*-os2* | *-cegcc*) testbindir=`$ECHO "$libdir" | $SED -e 's*/lib$*/bin*'` case :$dllsearchpath: in *":$libdir:"*) ;; ::) dllsearchpath=$libdir;; *) func_append dllsearchpath ":$libdir";; esac case :$dllsearchpath: in *":$testbindir:"*) ;; ::) dllsearchpath=$testbindir;; *) func_append dllsearchpath ":$testbindir";; esac ;; esac done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir=$hardcode_libdirs eval rpath=\" $hardcode_libdir_flag_spec\" fi compile_rpath=$rpath rpath= hardcode_libdirs= for libdir in $finalize_rpath; do if test -n "$hardcode_libdir_flag_spec"; then if test -n "$hardcode_libdir_separator"; then if test -z "$hardcode_libdirs"; then hardcode_libdirs=$libdir else # Just accumulate the unique libdirs. case $hardcode_libdir_separator$hardcode_libdirs$hardcode_libdir_separator in *"$hardcode_libdir_separator$libdir$hardcode_libdir_separator"*) ;; *) func_append hardcode_libdirs "$hardcode_libdir_separator$libdir" ;; esac fi else eval flag=\"$hardcode_libdir_flag_spec\" func_append rpath " $flag" fi elif test -n "$runpath_var"; then case "$finalize_perm_rpath " in *" $libdir "*) ;; *) func_append finalize_perm_rpath " $libdir" ;; esac fi done # Substitute the hardcoded libdirs into the rpath. if test -n "$hardcode_libdir_separator" && test -n "$hardcode_libdirs"; then libdir=$hardcode_libdirs eval rpath=\" $hardcode_libdir_flag_spec\" fi finalize_rpath=$rpath if test -n "$libobjs" && test yes = "$build_old_libs"; then # Transform all the library objects into standard objects. compile_command=`$ECHO "$compile_command" | $SP2NL | $SED "$lo2o" | $NL2SP` finalize_command=`$ECHO "$finalize_command" | $SP2NL | $SED "$lo2o" | $NL2SP` fi func_generate_dlsyms "$outputname" "@PROGRAM@" false # template prelinking step if test -n "$prelink_cmds"; then func_execute_cmds "$prelink_cmds" 'exit $?' fi wrappers_required=: case $host in *cegcc* | *mingw32ce*) # Disable wrappers for cegcc and mingw32ce hosts, we are cross compiling anyway. wrappers_required=false ;; *cygwin* | *mingw* ) test yes = "$build_libtool_libs" || wrappers_required=false ;; *) if test no = "$need_relink" || test yes != "$build_libtool_libs"; then wrappers_required=false fi ;; esac $wrappers_required || { # Replace the output file specification. compile_command=`$ECHO "$compile_command" | $SED 's%@OUTPUT@%'"$output"'%g'` link_command=$compile_command$compile_rpath # We have no uninstalled library dependencies, so finalize right now. exit_status=0 func_show_eval "$link_command" 'exit_status=$?' if test -n "$postlink_cmds"; then func_to_tool_file "$output" postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` func_execute_cmds "$postlink_cmds" 'exit $?' fi # Delete the generated files. if test -f "$output_objdir/${outputname}S.$objext"; then func_show_eval '$RM "$output_objdir/${outputname}S.$objext"' fi exit $exit_status } if test -n "$compile_shlibpath$finalize_shlibpath"; then compile_command="$shlibpath_var=\"$compile_shlibpath$finalize_shlibpath\$$shlibpath_var\" $compile_command" fi if test -n "$finalize_shlibpath"; then finalize_command="$shlibpath_var=\"$finalize_shlibpath\$$shlibpath_var\" $finalize_command" fi compile_var= finalize_var= if test -n "$runpath_var"; then if test -n "$perm_rpath"; then # We should set the runpath_var. rpath= for dir in $perm_rpath; do func_append rpath "$dir:" done compile_var="$runpath_var=\"$rpath\$$runpath_var\" " fi if test -n "$finalize_perm_rpath"; then # We should set the runpath_var. rpath= for dir in $finalize_perm_rpath; do func_append rpath "$dir:" done finalize_var="$runpath_var=\"$rpath\$$runpath_var\" " fi fi if test yes = "$no_install"; then # We don't need to create a wrapper script. link_command=$compile_var$compile_command$compile_rpath # Replace the output file specification. link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output"'%g'` # Delete the old output file. $opt_dry_run || $RM $output # Link the executable and exit func_show_eval "$link_command" 'exit $?' if test -n "$postlink_cmds"; then func_to_tool_file "$output" postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` func_execute_cmds "$postlink_cmds" 'exit $?' fi exit $EXIT_SUCCESS fi case $hardcode_action,$fast_install in relink,*) # Fast installation is not supported link_command=$compile_var$compile_command$compile_rpath relink_command=$finalize_var$finalize_command$finalize_rpath func_warning "this platform does not like uninstalled shared libraries" func_warning "'$output' will be relinked during installation" ;; *,yes) link_command=$finalize_var$compile_command$finalize_rpath relink_command=`$ECHO "$compile_var$compile_command$compile_rpath" | $SED 's%@OUTPUT@%\$progdir/\$file%g'` ;; *,no) link_command=$compile_var$compile_command$compile_rpath relink_command=$finalize_var$finalize_command$finalize_rpath ;; *,needless) link_command=$finalize_var$compile_command$finalize_rpath relink_command= ;; esac # Replace the output file specification. link_command=`$ECHO "$link_command" | $SED 's%@OUTPUT@%'"$output_objdir/$outputname"'%g'` # Delete the old output files. $opt_dry_run || $RM $output $output_objdir/$outputname $output_objdir/lt-$outputname func_show_eval "$link_command" 'exit $?' if test -n "$postlink_cmds"; then func_to_tool_file "$output_objdir/$outputname" postlink_cmds=`func_echo_all "$postlink_cmds" | $SED -e 's%@OUTPUT@%'"$output_objdir/$outputname"'%g' -e 's%@TOOL_OUTPUT@%'"$func_to_tool_file_result"'%g'` func_execute_cmds "$postlink_cmds" 'exit $?' fi # Now create the wrapper script. func_verbose "creating $output" # Quote the relink command for shipping. if test -n "$relink_command"; then # Preserve any variables that may affect compiler behavior for var in $variables_saved_for_relink; do if eval test -z \"\${$var+set}\"; then relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" elif eval var_value=\$$var; test -z "$var_value"; then relink_command="$var=; export $var; $relink_command" else func_quote_for_eval "$var_value" relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" fi done relink_command="(cd `pwd`; $relink_command)" relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` fi # Only actually do things if not in dry run mode. $opt_dry_run || { # win32 will think the script is a binary if it has # a .exe suffix, so we strip it off here. case $output in *.exe) func_stripname '' '.exe' "$output" output=$func_stripname_result ;; esac # test for cygwin because mv fails w/o .exe extensions case $host in *cygwin*) exeext=.exe func_stripname '' '.exe' "$outputname" outputname=$func_stripname_result ;; *) exeext= ;; esac case $host in *cygwin* | *mingw* ) func_dirname_and_basename "$output" "" "." output_name=$func_basename_result output_path=$func_dirname_result cwrappersource=$output_path/$objdir/lt-$output_name.c cwrapper=$output_path/$output_name.exe $RM $cwrappersource $cwrapper trap "$RM $cwrappersource $cwrapper; exit $EXIT_FAILURE" 1 2 15 func_emit_cwrapperexe_src > $cwrappersource # The wrapper executable is built using the $host compiler, # because it contains $host paths and files. If cross- # compiling, it, like the target executable, must be # executed on the $host or under an emulation environment. $opt_dry_run || { $LTCC $LTCFLAGS -o $cwrapper $cwrappersource $STRIP $cwrapper } # Now, create the wrapper script for func_source use: func_ltwrapper_scriptname $cwrapper $RM $func_ltwrapper_scriptname_result trap "$RM $func_ltwrapper_scriptname_result; exit $EXIT_FAILURE" 1 2 15 $opt_dry_run || { # note: this script will not be executed, so do not chmod. if test "x$build" = "x$host"; then $cwrapper --lt-dump-script > $func_ltwrapper_scriptname_result else func_emit_wrapper no > $func_ltwrapper_scriptname_result fi } ;; * ) $RM $output trap "$RM $output; exit $EXIT_FAILURE" 1 2 15 func_emit_wrapper no > $output chmod +x $output ;; esac } exit $EXIT_SUCCESS ;; esac # See if we need to build an old-fashioned archive. for oldlib in $oldlibs; do case $build_libtool_libs in convenience) oldobjs="$libobjs_save $symfileobj" addlibs=$convenience build_libtool_libs=no ;; module) oldobjs=$libobjs_save addlibs=$old_convenience build_libtool_libs=no ;; *) oldobjs="$old_deplibs $non_pic_objects" $preload && test -f "$symfileobj" \ && func_append oldobjs " $symfileobj" addlibs=$old_convenience ;; esac if test -n "$addlibs"; then gentop=$output_objdir/${outputname}x func_append generated " $gentop" func_extract_archives $gentop $addlibs func_append oldobjs " $func_extract_archives_result" fi # Do each command in the archive commands. if test -n "$old_archive_from_new_cmds" && test yes = "$build_libtool_libs"; then cmds=$old_archive_from_new_cmds else # Add any objects from preloaded convenience libraries if test -n "$dlprefiles"; then gentop=$output_objdir/${outputname}x func_append generated " $gentop" func_extract_archives $gentop $dlprefiles func_append oldobjs " $func_extract_archives_result" fi # POSIX demands no paths to be encoded in archives. We have # to avoid creating archives with duplicate basenames if we # might have to extract them afterwards, e.g., when creating a # static archive out of a convenience library, or when linking # the entirety of a libtool archive into another (currently # not supported by libtool). if (for obj in $oldobjs do func_basename "$obj" $ECHO "$func_basename_result" done | sort | sort -uc >/dev/null 2>&1); then : else echo "copying selected object files to avoid basename conflicts..." gentop=$output_objdir/${outputname}x func_append generated " $gentop" func_mkdir_p "$gentop" save_oldobjs=$oldobjs oldobjs= counter=1 for obj in $save_oldobjs do func_basename "$obj" objbase=$func_basename_result case " $oldobjs " in " ") oldobjs=$obj ;; *[\ /]"$objbase "*) while :; do # Make sure we don't pick an alternate name that also # overlaps. newobj=lt$counter-$objbase func_arith $counter + 1 counter=$func_arith_result case " $oldobjs " in *[\ /]"$newobj "*) ;; *) if test ! -f "$gentop/$newobj"; then break; fi ;; esac done func_show_eval "ln $obj $gentop/$newobj || cp $obj $gentop/$newobj" func_append oldobjs " $gentop/$newobj" ;; *) func_append oldobjs " $obj" ;; esac done fi func_to_tool_file "$oldlib" func_convert_file_msys_to_w32 tool_oldlib=$func_to_tool_file_result eval cmds=\"$old_archive_cmds\" func_len " $cmds" len=$func_len_result if test "$len" -lt "$max_cmd_len" || test "$max_cmd_len" -le -1; then cmds=$old_archive_cmds elif test -n "$archiver_list_spec"; then func_verbose "using command file archive linking..." for obj in $oldobjs do func_to_tool_file "$obj" $ECHO "$func_to_tool_file_result" done > $output_objdir/$libname.libcmd func_to_tool_file "$output_objdir/$libname.libcmd" oldobjs=" $archiver_list_spec$func_to_tool_file_result" cmds=$old_archive_cmds else # the command line is too long to link in one step, link in parts func_verbose "using piecewise archive linking..." save_RANLIB=$RANLIB RANLIB=: objlist= concat_cmds= save_oldobjs=$oldobjs oldobjs= # Is there a better way of finding the last object in the list? for obj in $save_oldobjs do last_oldobj=$obj done eval test_cmds=\"$old_archive_cmds\" func_len " $test_cmds" len0=$func_len_result len=$len0 for obj in $save_oldobjs do func_len " $obj" func_arith $len + $func_len_result len=$func_arith_result func_append objlist " $obj" if test "$len" -lt "$max_cmd_len"; then : else # the above command should be used before it gets too long oldobjs=$objlist if test "$obj" = "$last_oldobj"; then RANLIB=$save_RANLIB fi test -z "$concat_cmds" || concat_cmds=$concat_cmds~ eval concat_cmds=\"\$concat_cmds$old_archive_cmds\" objlist= len=$len0 fi done RANLIB=$save_RANLIB oldobjs=$objlist if test -z "$oldobjs"; then eval cmds=\"\$concat_cmds\" else eval cmds=\"\$concat_cmds~\$old_archive_cmds\" fi fi fi func_execute_cmds "$cmds" 'exit $?' done test -n "$generated" && \ func_show_eval "${RM}r$generated" # Now create the libtool archive. case $output in *.la) old_library= test yes = "$build_old_libs" && old_library=$libname.$libext func_verbose "creating $output" # Preserve any variables that may affect compiler behavior for var in $variables_saved_for_relink; do if eval test -z \"\${$var+set}\"; then relink_command="{ test -z \"\${$var+set}\" || $lt_unset $var || { $var=; export $var; }; }; $relink_command" elif eval var_value=\$$var; test -z "$var_value"; then relink_command="$var=; export $var; $relink_command" else func_quote_for_eval "$var_value" relink_command="$var=$func_quote_for_eval_result; export $var; $relink_command" fi done # Quote the link command for shipping. relink_command="(cd `pwd`; $SHELL \"$progpath\" $preserve_args --mode=relink $libtool_args @inst_prefix_dir@)" relink_command=`$ECHO "$relink_command" | $SED "$sed_quote_subst"` if test yes = "$hardcode_automatic"; then relink_command= fi # Only create the output if not a dry run. $opt_dry_run || { for installed in no yes; do if test yes = "$installed"; then if test -z "$install_libdir"; then break fi output=$output_objdir/${outputname}i # Replace all uninstalled libtool libraries with the installed ones newdependency_libs= for deplib in $dependency_libs; do case $deplib in *.la) func_basename "$deplib" name=$func_basename_result func_resolve_sysroot "$deplib" eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $func_resolve_sysroot_result` test -z "$libdir" && \ func_fatal_error "'$deplib' is not a valid libtool archive" func_append newdependency_libs " ${lt_sysroot:+=}$libdir/$name" ;; -L*) func_stripname -L '' "$deplib" func_replace_sysroot "$func_stripname_result" func_append newdependency_libs " -L$func_replace_sysroot_result" ;; -R*) func_stripname -R '' "$deplib" func_replace_sysroot "$func_stripname_result" func_append newdependency_libs " -R$func_replace_sysroot_result" ;; *) func_append newdependency_libs " $deplib" ;; esac done dependency_libs=$newdependency_libs newdlfiles= for lib in $dlfiles; do case $lib in *.la) func_basename "$lib" name=$func_basename_result eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib` test -z "$libdir" && \ func_fatal_error "'$lib' is not a valid libtool archive" func_append newdlfiles " ${lt_sysroot:+=}$libdir/$name" ;; *) func_append newdlfiles " $lib" ;; esac done dlfiles=$newdlfiles newdlprefiles= for lib in $dlprefiles; do case $lib in *.la) # Only pass preopened files to the pseudo-archive (for # eventual linking with the app. that links it) if we # didn't already link the preopened objects directly into # the library: func_basename "$lib" name=$func_basename_result eval libdir=`$SED -n -e 's/^libdir=\(.*\)$/\1/p' $lib` test -z "$libdir" && \ func_fatal_error "'$lib' is not a valid libtool archive" func_append newdlprefiles " ${lt_sysroot:+=}$libdir/$name" ;; esac done dlprefiles=$newdlprefiles else newdlfiles= for lib in $dlfiles; do case $lib in [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;; *) abs=`pwd`"/$lib" ;; esac func_append newdlfiles " $abs" done dlfiles=$newdlfiles newdlprefiles= for lib in $dlprefiles; do case $lib in [\\/]* | [A-Za-z]:[\\/]*) abs=$lib ;; *) abs=`pwd`"/$lib" ;; esac func_append newdlprefiles " $abs" done dlprefiles=$newdlprefiles fi $RM $output # place dlname in correct position for cygwin # In fact, it would be nice if we could use this code for all target # systems that can't hard-code library paths into their executables # and that have no shared library path variable independent of PATH, # but it turns out we can't easily determine that from inspecting # libtool variables, so we have to hard-code the OSs to which it # applies here; at the moment, that means platforms that use the PE # object format with DLL files. See the long comment at the top of # tests/bindir.at for full details. tdlname=$dlname case $host,$output,$installed,$module,$dlname in *cygwin*,*lai,yes,no,*.dll | *mingw*,*lai,yes,no,*.dll | *cegcc*,*lai,yes,no,*.dll) # If a -bindir argument was supplied, place the dll there. if test -n "$bindir"; then func_relative_path "$install_libdir" "$bindir" tdlname=$func_relative_path_result/$dlname else # Otherwise fall back on heuristic. tdlname=../bin/$dlname fi ;; esac $ECHO > $output "\ # $outputname - a libtool library file # Generated by $PROGRAM (GNU $PACKAGE) $VERSION # # Please DO NOT delete this file! # It is necessary for linking the library. # The name that we can dlopen(3). dlname='$tdlname' # Names of this library. library_names='$library_names' # The name of the static archive. old_library='$old_library' # Linker flags that cannot go in dependency_libs. inherited_linker_flags='$new_inherited_linker_flags' # Libraries that this one depends upon. dependency_libs='$dependency_libs' # Names of additional weak libraries provided by this library weak_library_names='$weak_libs' # Version information for $libname. current=$current age=$age revision=$revision # Is this an already installed library? installed=$installed # Should we warn about portability when linking against -modules? shouldnotlink=$module # Files to dlopen/dlpreopen dlopen='$dlfiles' dlpreopen='$dlprefiles' # Directory that this library needs to be installed in: libdir='$install_libdir'" if test no,yes = "$installed,$need_relink"; then $ECHO >> $output "\ relink_command=\"$relink_command\"" fi done } # Do a symbolic link so that the libtool archive can be found in # LD_LIBRARY_PATH before the program is installed. func_show_eval '( cd "$output_objdir" && $RM "$outputname" && $LN_S "../$outputname" "$outputname" )' 'exit $?' ;; esac exit $EXIT_SUCCESS } if test link = "$opt_mode" || test relink = "$opt_mode"; then func_mode_link ${1+"$@"} fi # func_mode_uninstall arg... func_mode_uninstall () { $debug_cmd RM=$nonopt files= rmforce=false exit_status=0 # This variable tells wrapper scripts just to set variables rather # than running their programs. libtool_install_magic=$magic for arg do case $arg in -f) func_append RM " $arg"; rmforce=: ;; -*) func_append RM " $arg" ;; *) func_append files " $arg" ;; esac done test -z "$RM" && \ func_fatal_help "you must specify an RM program" rmdirs= for file in $files; do func_dirname "$file" "" "." dir=$func_dirname_result if test . = "$dir"; then odir=$objdir else odir=$dir/$objdir fi func_basename "$file" name=$func_basename_result test uninstall = "$opt_mode" && odir=$dir # Remember odir for removal later, being careful to avoid duplicates if test clean = "$opt_mode"; then case " $rmdirs " in *" $odir "*) ;; *) func_append rmdirs " $odir" ;; esac fi # Don't error if the file doesn't exist and rm -f was used. if { test -L "$file"; } >/dev/null 2>&1 || { test -h "$file"; } >/dev/null 2>&1 || test -f "$file"; then : elif test -d "$file"; then exit_status=1 continue elif $rmforce; then continue fi rmfiles=$file case $name in *.la) # Possibly a libtool archive, so verify it. if func_lalib_p "$file"; then func_source $dir/$name # Delete the libtool libraries and symlinks. for n in $library_names; do func_append rmfiles " $odir/$n" done test -n "$old_library" && func_append rmfiles " $odir/$old_library" case $opt_mode in clean) case " $library_names " in *" $dlname "*) ;; *) test -n "$dlname" && func_append rmfiles " $odir/$dlname" ;; esac test -n "$libdir" && func_append rmfiles " $odir/$name $odir/${name}i" ;; uninstall) if test -n "$library_names"; then # Do each command in the postuninstall commands. func_execute_cmds "$postuninstall_cmds" '$rmforce || exit_status=1' fi if test -n "$old_library"; then # Do each command in the old_postuninstall commands. func_execute_cmds "$old_postuninstall_cmds" '$rmforce || exit_status=1' fi # FIXME: should reinstall the best remaining shared library. ;; esac fi ;; *.lo) # Possibly a libtool object, so verify it. if func_lalib_p "$file"; then # Read the .lo file func_source $dir/$name # Add PIC object to the list of files to remove. if test -n "$pic_object" && test none != "$pic_object"; then func_append rmfiles " $dir/$pic_object" fi # Add non-PIC object to the list of files to remove. if test -n "$non_pic_object" && test none != "$non_pic_object"; then func_append rmfiles " $dir/$non_pic_object" fi fi ;; *) if test clean = "$opt_mode"; then noexename=$name case $file in *.exe) func_stripname '' '.exe' "$file" file=$func_stripname_result func_stripname '' '.exe' "$name" noexename=$func_stripname_result # $file with .exe has already been added to rmfiles, # add $file without .exe func_append rmfiles " $file" ;; esac # Do a test to see if this is a libtool program. if func_ltwrapper_p "$file"; then if func_ltwrapper_executable_p "$file"; then func_ltwrapper_scriptname "$file" relink_command= func_source $func_ltwrapper_scriptname_result func_append rmfiles " $func_ltwrapper_scriptname_result" else relink_command= func_source $dir/$noexename fi # note $name still contains .exe if it was in $file originally # as does the version of $file that was added into $rmfiles func_append rmfiles " $odir/$name $odir/${name}S.$objext" if test yes = "$fast_install" && test -n "$relink_command"; then func_append rmfiles " $odir/lt-$name" fi if test "X$noexename" != "X$name"; then func_append rmfiles " $odir/lt-$noexename.c" fi fi fi ;; esac func_show_eval "$RM $rmfiles" 'exit_status=1' done # Try to remove the $objdir's in the directories where we deleted files for dir in $rmdirs; do if test -d "$dir"; then func_show_eval "rmdir $dir >/dev/null 2>&1" fi done exit $exit_status } if test uninstall = "$opt_mode" || test clean = "$opt_mode"; then func_mode_uninstall ${1+"$@"} fi test -z "$opt_mode" && { help=$generic_help func_fatal_help "you must specify a MODE" } test -z "$exec_cmd" && \ func_fatal_help "invalid operation mode '$opt_mode'" if test -n "$exec_cmd"; then eval exec "$exec_cmd" exit $EXIT_FAILURE fi exit $exit_status # The TAGs below are defined such that we never get into a situation # where we disable both kinds of libraries. Given conflicting # choices, we go for a static library, that is the most portable, # since we can't tell whether shared libraries were disabled because # the user asked for that or because the platform doesn't support # them. This is particularly important on AIX, because we don't # support having both static and shared libraries enabled at the same # time on that platform, so we default to a shared-only configuration. # If a disable-shared tag is given, we'll fallback to a static-only # configuration. But we'll never go from static-only to shared-only. # ### BEGIN LIBTOOL TAG CONFIG: disable-shared build_libtool_libs=no build_old_libs=yes # ### END LIBTOOL TAG CONFIG: disable-shared # ### BEGIN LIBTOOL TAG CONFIG: disable-static build_old_libs=`case $build_libtool_libs in yes) echo no;; *) echo yes;; esac` # ### END LIBTOOL TAG CONFIG: disable-static # Local Variables: # mode:shell-script # sh-indentation:2 # End: dar-2.6.8/COPYING0000644000175000017520000004325413606427316010273 00000000000000 GNU GENERAL PUBLIC LICENSE Version 2, June 1991 Copyright (C) 1989, 1991 Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA Everyone is permitted to copy and distribute verbatim copies of this license document, but changing it is not allowed. Preamble The licenses for most software are designed to take away your freedom to share and change it. By contrast, the GNU General Public License is intended to guarantee your freedom to share and change free software--to make sure the software is free for all its users. This General Public License applies to most of the Free Software Foundation's software and to any other program whose authors commit to using it. (Some other Free Software Foundation software is covered by the GNU Lesser General Public License instead.) You can apply it to your programs, too. When we speak of free software, we are referring to freedom, not price. Our General Public Licenses are designed to make sure that you have the freedom to distribute copies of free software (and charge for this service if you wish), that you receive source code or can get it if you want it, that you can change the software or use pieces of it in new free programs; and that you know you can do these things. To protect your rights, we need to make restrictions that forbid anyone to deny you these rights or to ask you to surrender the rights. These restrictions translate to certain responsibilities for you if you distribute copies of the software, or if you modify it. For example, if you distribute copies of such a program, whether gratis or for a fee, you must give the recipients all the rights that you have. You must make sure that they, too, receive or can get the source code. And you must show them these terms so they know their rights. We protect your rights with two steps: (1) copyright the software, and (2) offer you this license which gives you legal permission to copy, distribute and/or modify the software. Also, for each author's protection and ours, we want to make certain that everyone understands that there is no warranty for this free software. If the software is modified by someone else and passed on, we want its recipients to know that what they have is not the original, so that any problems introduced by others will not reflect on the original authors' reputations. Finally, any free program is threatened constantly by software patents. We wish to avoid the danger that redistributors of a free program will individually obtain patent licenses, in effect making the program proprietary. To prevent this, we have made it clear that any patent must be licensed for everyone's free use or not licensed at all. The precise terms and conditions for copying, distribution and modification follow. GNU GENERAL PUBLIC LICENSE TERMS AND CONDITIONS FOR COPYING, DISTRIBUTION AND MODIFICATION 0. This License applies to any program or other work which contains a notice placed by the copyright holder saying it may be distributed under the terms of this General Public License. The "Program", below, refers to any such program or work, and a "work based on the Program" means either the Program or any derivative work under copyright law: that is to say, a work containing the Program or a portion of it, either verbatim or with modifications and/or translated into another language. (Hereinafter, translation is included without limitation in the term "modification".) Each licensee is addressed as "you". Activities other than copying, distribution and modification are not covered by this License; they are outside its scope. The act of running the Program is not restricted, and the output from the Program is covered only if its contents constitute a work based on the Program (independent of having been made by running the Program). Whether that is true depends on what the Program does. 1. You may copy and distribute verbatim copies of the Program's source code as you receive it, in any medium, provided that you conspicuously and appropriately publish on each copy an appropriate copyright notice and disclaimer of warranty; keep intact all the notices that refer to this License and to the absence of any warranty; and give any other recipients of the Program a copy of this License along with the Program. You may charge a fee for the physical act of transferring a copy, and you may at your option offer warranty protection in exchange for a fee. 2. You may modify your copy or copies of the Program or any portion of it, thus forming a work based on the Program, and copy and distribute such modifications or work under the terms of Section 1 above, provided that you also meet all of these conditions: a) You must cause the modified files to carry prominent notices stating that you changed the files and the date of any change. b) You must cause any work that you distribute or publish, that in whole or in part contains or is derived from the Program or any part thereof, to be licensed as a whole at no charge to all third parties under the terms of this License. c) If the modified program normally reads commands interactively when run, you must cause it, when started running for such interactive use in the most ordinary way, to print or display an announcement including an appropriate copyright notice and a notice that there is no warranty (or else, saying that you provide a warranty) and that users may redistribute the program under these conditions, and telling the user how to view a copy of this License. (Exception: if the Program itself is interactive but does not normally print such an announcement, your work based on the Program is not required to print an announcement.) These requirements apply to the modified work as a whole. If identifiable sections of that work are not derived from the Program, and can be reasonably considered independent and separate works in themselves, then this License, and its terms, do not apply to those sections when you distribute them as separate works. But when you distribute the same sections as part of a whole which is a work based on the Program, the distribution of the whole must be on the terms of this License, whose permissions for other licensees extend to the entire whole, and thus to each and every part regardless of who wrote it. Thus, it is not the intent of this section to claim rights or contest your rights to work written entirely by you; rather, the intent is to exercise the right to control the distribution of derivative or collective works based on the Program. In addition, mere aggregation of another work not based on the Program with the Program (or with a work based on the Program) on a volume of a storage or distribution medium does not bring the other work under the scope of this License. 3. You may copy and distribute the Program (or a work based on it, under Section 2) in object code or executable form under the terms of Sections 1 and 2 above provided that you also do one of the following: a) Accompany it with the complete corresponding machine-readable source code, which must be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, b) Accompany it with a written offer, valid for at least three years, to give any third party, for a charge no more than your cost of physically performing source distribution, a complete machine-readable copy of the corresponding source code, to be distributed under the terms of Sections 1 and 2 above on a medium customarily used for software interchange; or, c) Accompany it with the information you received as to the offer to distribute corresponding source code. (This alternative is allowed only for noncommercial distribution and only if you received the program in object code or executable form with such an offer, in accord with Subsection b above.) The source code for a work means the preferred form of the work for making modifications to it. For an executable work, complete source code means all the source code for all modules it contains, plus any associated interface definition files, plus the scripts used to control compilation and installation of the executable. However, as a special exception, the source code distributed need not include anything that is normally distributed (in either source or binary form) with the major components (compiler, kernel, and so on) of the operating system on which the executable runs, unless that component itself accompanies the executable. If distribution of executable or object code is made by offering access to copy from a designated place, then offering equivalent access to copy the source code from the same place counts as distribution of the source code, even though third parties are not compelled to copy the source along with the object code. 4. You may not copy, modify, sublicense, or distribute the Program except as expressly provided under this License. Any attempt otherwise to copy, modify, sublicense or distribute the Program is void, and will automatically terminate your rights under this License. However, parties who have received copies, or rights, from you under this License will not have their licenses terminated so long as such parties remain in full compliance. 5. You are not required to accept this License, since you have not signed it. However, nothing else grants you permission to modify or distribute the Program or its derivative works. These actions are prohibited by law if you do not accept this License. Therefore, by modifying or distributing the Program (or any work based on the Program), you indicate your acceptance of this License to do so, and all its terms and conditions for copying, distributing or modifying the Program or works based on it. 6. Each time you redistribute the Program (or any work based on the Program), the recipient automatically receives a license from the original licensor to copy, distribute or modify the Program subject to these terms and conditions. You may not impose any further restrictions on the recipients' exercise of the rights granted herein. You are not responsible for enforcing compliance by third parties to this License. 7. If, as a consequence of a court judgment or allegation of patent infringement or for any other reason (not limited to patent issues), conditions are imposed on you (whether by court order, agreement or otherwise) that contradict the conditions of this License, they do not excuse you from the conditions of this License. If you cannot distribute so as to satisfy simultaneously your obligations under this License and any other pertinent obligations, then as a consequence you may not distribute the Program at all. For example, if a patent license would not permit royalty-free redistribution of the Program by all those who receive copies directly or indirectly through you, then the only way you could satisfy both it and this License would be to refrain entirely from distribution of the Program. If any portion of this section is held invalid or unenforceable under any particular circumstance, the balance of the section is intended to apply and the section as a whole is intended to apply in other circumstances. It is not the purpose of this section to induce you to infringe any patents or other property right claims or to contest validity of any such claims; this section has the sole purpose of protecting the integrity of the free software distribution system, which is implemented by public license practices. Many people have made generous contributions to the wide range of software distributed through that system in reliance on consistent application of that system; it is up to the author/donor to decide if he or she is willing to distribute software through any other system and a licensee cannot impose that choice. This section is intended to make thoroughly clear what is believed to be a consequence of the rest of this License. 8. If the distribution and/or use of the Program is restricted in certain countries either by patents or by copyrighted interfaces, the original copyright holder who places the Program under this License may add an explicit geographical distribution limitation excluding those countries, so that distribution is permitted only in or among countries not thus excluded. In such case, this License incorporates the limitation as if written in the body of this License. 9. The Free Software Foundation may publish revised and/or new versions of the General Public License from time to time. Such new versions will be similar in spirit to the present version, but may differ in detail to address new problems or concerns. Each version is given a distinguishing version number. If the Program specifies a version number of this License which applies to it and "any later version", you have the option of following the terms and conditions either of that version or of any later version published by the Free Software Foundation. If the Program does not specify a version number of this License, you may choose any version ever published by the Free Software Foundation. 10. If you wish to incorporate parts of the Program into other free programs whose distribution conditions are different, write to the author to ask for permission. For software which is copyrighted by the Free Software Foundation, write to the Free Software Foundation; we sometimes make exceptions for this. Our decision will be guided by the two goals of preserving the free status of all derivatives of our free software and of promoting the sharing and reuse of software generally. NO WARRANTY 11. BECAUSE THE PROGRAM IS LICENSED FREE OF CHARGE, THERE IS NO WARRANTY FOR THE PROGRAM, TO THE EXTENT PERMITTED BY APPLICABLE LAW. EXCEPT WHEN OTHERWISE STATED IN WRITING THE COPYRIGHT HOLDERS AND/OR OTHER PARTIES PROVIDE THE PROGRAM "AS IS" WITHOUT WARRANTY OF ANY KIND, EITHER EXPRESSED OR IMPLIED, INCLUDING, BUT NOT LIMITED TO, THE IMPLIED WARRANTIES OF MERCHANTABILITY AND FITNESS FOR A PARTICULAR PURPOSE. THE ENTIRE RISK AS TO THE QUALITY AND PERFORMANCE OF THE PROGRAM IS WITH YOU. SHOULD THE PROGRAM PROVE DEFECTIVE, YOU ASSUME THE COST OF ALL NECESSARY SERVICING, REPAIR OR CORRECTION. 12. IN NO EVENT UNLESS REQUIRED BY APPLICABLE LAW OR AGREED TO IN WRITING WILL ANY COPYRIGHT HOLDER, OR ANY OTHER PARTY WHO MAY MODIFY AND/OR REDISTRIBUTE THE PROGRAM AS PERMITTED ABOVE, BE LIABLE TO YOU FOR DAMAGES, INCLUDING ANY GENERAL, SPECIAL, INCIDENTAL OR CONSEQUENTIAL DAMAGES ARISING OUT OF THE USE OR INABILITY TO USE THE PROGRAM (INCLUDING BUT NOT LIMITED TO LOSS OF DATA OR DATA BEING RENDERED INACCURATE OR LOSSES SUSTAINED BY YOU OR THIRD PARTIES OR A FAILURE OF THE PROGRAM TO OPERATE WITH ANY OTHER PROGRAMS), EVEN IF SUCH HOLDER OR OTHER PARTY HAS BEEN ADVISED OF THE POSSIBILITY OF SUCH DAMAGES. END OF TERMS AND CONDITIONS How to Apply These Terms to Your New Programs If you develop a new program, and you want it to be of the greatest possible use to the public, the best way to achieve this is to make it free software which everyone can redistribute and change under these terms. To do so, attach the following notices to the program. It is safest to attach them to the start of each source file to most effectively convey the exclusion of warranty; and each file should have at least the "copyright" line and a pointer to where the full notice is found. Copyright (C) This program is free software; you can redistribute it and/or modify it under the terms of the GNU General Public License as published by the Free Software Foundation; either version 2 of the License, or (at your option) any later version. This program is distributed in the hope that it will be useful, but WITHOUT ANY WARRANTY; without even the implied warranty of MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the GNU General Public License for more details. You should have received a copy of the GNU General Public License along with this program; if not, write to the Free Software Foundation, Inc., 51 Franklin Street, Fifth Floor, Boston, MA 02110-1301 USA. Also add information on how to contact you by electronic and paper mail. If the program is interactive, make it output a short notice like this when it starts in an interactive mode: Gnomovision version 69, Copyright (C) year name of author Gnomovision comes with ABSOLUTELY NO WARRANTY; for details type `show w'. This is free software, and you are welcome to redistribute it under certain conditions; type `show c' for details. The hypothetical commands `show w' and `show c' should show the appropriate parts of the General Public License. Of course, the commands you use may be called something other than `show w' and `show c'; they could even be mouse-clicks or menu items--whatever suits your program. You should also get your employer (if you work as a programmer) or your school, if any, to sign a "copyright disclaimer" for the program, if necessary. Here is a sample; alter the names: Yoyodyne, Inc., hereby disclaims all copyright interest in the program `Gnomovision' (which makes passes at compilers) written by James Hacker. , 1 April 1989 Ty Coon, President of Vice This General Public License does not permit incorporating your program into proprietary programs. If your program is a subroutine library, you may consider it more useful to permit linking proprietary applications with the library. If this is what you want to do, use the GNU Lesser General Public License instead of this License. dar-2.6.8/po/0000755000175000017520000000000013617552353007730 500000000000000dar-2.6.8/po/en@boldquot.header0000644000175000017520000000247113617552277013307 00000000000000# All this catalog "translates" are quotation characters. # The msgids must be ASCII and therefore cannot contain real quotation # characters, only substitutes like grave accent (0x60), apostrophe (0x27) # and double quote (0x22). These substitutes look strange; see # http://www.cl.cam.ac.uk/~mgk25/ucs/quotes.html # # This catalog translates grave accent (0x60) and apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019). # It also translates pairs of apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019) # and pairs of quotation mark (0x22) to # left double quotation mark (U+201C) and right double quotation mark (U+201D). # # When output to an UTF-8 terminal, the quotation characters appear perfectly. # When output to an ISO-8859-1 terminal, the single quotation marks are # transliterated to apostrophes (by iconv in glibc 2.2 or newer) or to # grave/acute accent (by libiconv), and the double quotation marks are # transliterated to 0x22. # When output to an ASCII terminal, the single quotation marks are # transliterated to apostrophes, and the double quotation marks are # transliterated to 0x22. # # This catalog furthermore displays the text between the quotation marks in # bold face, assuming the VT100/XTerm escape sequences. # dar-2.6.8/po/quot.sed0000644000175000017520000000023113617552277011336 00000000000000s/"\([^"]*\)"/“\1â€/g s/`\([^`']*\)'/‘\1’/g s/ '\([^`']*\)' / ‘\1’ /g s/ '\([^`']*\)'$/ ‘\1’/g s/^'\([^`']*\)' /‘\1’ /g s/“â€/""/g dar-2.6.8/po/POTFILES.in0000644000175000017520000002572713617552301011433 00000000000000# List of source files containing translatable strings. # Copyright (C) 1995 Free Software Foundation, Inc. ./src/libdar/cat_signature.cpp ./src/libdar/cat_nomme.hpp ./src/libdar/archive_summary.hpp ./src/libdar/compression.cpp ./src/libdar/tuyau.hpp ./src/libdar/header_version.hpp ./src/libdar/filesystem_tools.cpp ./src/libdar/trivial_sar.hpp ./src/libdar/fsa_family.cpp ./src/libdar/candidates.hpp ./src/libdar/statistics.cpp ./src/libdar/generic_file_overlay_for_gpgme.hpp ./src/libdar/tronconneuse.cpp ./src/libdar/archive_listing_callback.hpp ./src/libdar/entrepot_libcurl.cpp ./src/libdar/compile_time_features.hpp ./src/libdar/cat_blockdev.cpp ./src/libdar/cat_delta_signature.cpp ./src/libdar/sar.cpp ./src/libdar/gf_mode.cpp ./src/libdar/user_interaction_blind.hpp ./src/libdar/int_tools.cpp ./src/libdar/erreurs.cpp ./src/libdar/cat_blockdev.hpp ./src/libdar/generic_rsync.cpp ./src/libdar/ea_filesystem.hpp ./src/libdar/nls_swap.hpp ./src/libdar/tools.hpp ./src/libdar/entree_stats.cpp ./src/libdar/wrapperlib.cpp ./src/libdar/fsa_family.hpp ./src/libdar/fichier_libcurl.cpp ./src/libdar/macro_tools.cpp ./src/libdar/database_header.hpp ./src/libdar/libdar_slave.cpp ./src/libdar/fichier_local.cpp ./src/libdar/user_interaction.hpp ./src/libdar/cat_ignored.cpp ./src/libdar/compressor.hpp ./src/libdar/compression.hpp ./src/libdar/erreurs.hpp ./src/libdar/data_tree.hpp ./src/libdar/filesystem_tools.hpp ./src/libdar/memory_file.cpp ./src/libdar/archive_options.hpp ./src/libdar/generic_file_overlay_for_gpgme.cpp ./src/libdar/tronconneuse.hpp ./src/libdar/sar_tools.cpp ./src/libdar/fichier_libcurl.hpp ./src/libdar/slave_thread.cpp ./src/libdar/filesystem_hard_link_write.cpp ./src/libdar/entrepot.cpp ./src/libdar/sparse_file.hpp ./src/libdar/real_infinint.hpp ./src/libdar/elastic.cpp ./src/libdar/fichier_global.cpp ./src/libdar/user_interaction_callback5.hpp ./src/libdar/erreurs_ext.cpp ./src/libdar/mycurl_easyhandle_node.cpp ./src/libdar/etage.hpp ./src/libdar/crypto_sym.hpp ./src/libdar/user_interaction5.cpp ./src/libdar/trontextual.hpp ./src/libdar/filesystem_restore.cpp ./src/libdar/tlv.cpp ./src/libdar/entrepot_libcurl5.hpp ./src/libdar/cat_lien.cpp ./src/libdar/hash_fichier.cpp ./src/libdar/escape_catalogue.cpp ./src/libdar/messaging.cpp ./src/libdar/cat_entree.hpp ./src/libdar/cache_global.hpp ./src/libdar/trontextual.cpp ./src/libdar/cat_entree.cpp ./src/libdar/cat_delta_signature.hpp ./src/libdar/mask_list.hpp ./src/libdar/cat_eod.hpp ./src/libdar/filtre.hpp ./src/libdar/list_entry.hpp ./src/libdar/datetime.hpp ./src/libdar/shell_interaction.cpp ./src/libdar/cat_status.hpp ./src/libdar/cat_all_entrees.hpp ./src/libdar/entrepot_local.cpp ./src/libdar/null_file.hpp ./src/libdar/filesystem_restore.hpp ./src/libdar/cat_file.hpp ./src/libdar/cat_door.hpp ./src/libdar/i_libdar_xform.cpp ./src/libdar/list_entry.cpp ./src/libdar/mem_ui.cpp ./src/libdar/cat_detruit.cpp ./src/libdar/crit_action.cpp ./src/libdar/erreurs_ext.hpp ./src/libdar/cat_ignored_dir.cpp ./src/libdar/crypto.hpp ./src/libdar/cache.cpp ./src/libdar/archive_summary.cpp ./src/libdar/cygwin_adapt.hpp ./src/libdar/ea_filesystem.cpp ./src/libdar/smart_pointer.hpp ./src/libdar/database_header.cpp ./src/libdar/wrapperlib.hpp ./src/libdar/semaphore.hpp ./src/libdar/cat_prise.cpp ./src/libdar/i_entrepot_libcurl.hpp ./src/libdar/cat_tube.cpp ./src/libdar/user_interaction_callback5.cpp ./src/libdar/cat_mirage.hpp ./src/libdar/slave_zapette.hpp ./src/libdar/defile.hpp ./src/libdar/entrepot_libcurl.hpp ./src/libdar/archive.hpp ./src/libdar/real_infinint.cpp ./src/libdar/limitint.hpp ./src/libdar/cat_device.hpp ./src/libdar/mycurl_shared_handle.cpp ./src/libdar/cat_signature.hpp ./src/libdar/cat_chardev.hpp ./src/libdar/terminateur.cpp ./src/libdar/criterium.hpp ./src/libdar/header_version.cpp ./src/libdar/crypto_asym.cpp ./src/libdar/macro_tools.hpp ./src/libdar/user_interaction_callback.hpp ./src/libdar/pile.hpp ./src/libdar/libdar_xform.cpp ./src/libdar/candidates.cpp ./src/libdar/ea.cpp ./src/libdar/i_entrepot_libcurl.cpp ./src/libdar/sar.hpp ./src/libdar/cat_mirage.cpp ./src/libdar/deci.hpp ./src/libdar/datetime.cpp ./src/libdar/slice_layout.cpp ./src/libdar/mask.hpp ./src/libdar/compile_time_features.cpp ./src/libdar/archive_version.cpp ./src/libdar/cat_device.cpp ./src/libdar/shell_interaction_emulator.hpp ./src/libdar/fichier_global.hpp ./src/libdar/slice_layout.hpp ./src/libdar/tuyau.cpp ./src/libdar/contextual.cpp ./src/libdar/archive_options_listing_shell.hpp ./src/libdar/cat_inode.cpp ./src/libdar/header.cpp ./src/libdar/cat_file.cpp ./src/libdar/libdar_slave.hpp ./src/libdar/semaphore.cpp ./src/libdar/archive_options_listing_shell.cpp ./src/libdar/filesystem_backup.cpp ./src/libdar/pile_descriptor.cpp ./src/libdar/filesystem_backup.hpp ./src/libdar/archive5.hpp ./src/libdar/catalogue.hpp ./src/libdar/user_group_bases.hpp ./src/libdar/database5.hpp ./src/libdar/generic_thread.cpp ./src/libdar/data_dir.cpp ./src/libdar/archive5.cpp ./src/libdar/crypto_asym.hpp ./src/libdar/label.cpp ./src/libdar/integers.hpp ./src/libdar/entrepot.hpp ./src/libdar/data_dir.hpp ./src/libdar/zapette.hpp ./src/libdar/crypto_sym.cpp ./src/libdar/memory_file.hpp ./src/libdar/trivial_sar.cpp ./src/libdar/scrambler.hpp ./src/libdar/cat_directory.hpp ./src/libdar/cat_nomme.cpp ./src/libdar/compressor.cpp ./src/libdar/database_options.hpp ./src/libdar/generic_rsync.hpp ./src/libdar/tronc.cpp ./src/libdar/limitint.cpp ./src/libdar/deci.cpp ./src/libdar/cat_etoile.cpp ./src/libdar/secu_string.cpp ./src/libdar/defile.cpp ./src/libdar/database_listing_callback.hpp ./src/libdar/statistics.hpp ./src/libdar/generic_file.cpp ./src/libdar/user_interaction_callback.cpp ./src/libdar/generic_thread.hpp ./src/libdar/archive_num.hpp ./src/libdar/path.hpp ./src/libdar/tools.cpp ./src/libdar/pile_descriptor.hpp ./src/libdar/slave_thread.hpp ./src/libdar/libdar_xform.hpp ./src/libdar/archive_options.cpp ./src/libdar/get_version.cpp ./src/libdar/cache_global.cpp ./src/libdar/i_archive.cpp ./src/libdar/escape_catalogue.hpp ./src/libdar/sar_tools.hpp ./src/libdar/cat_door.cpp ./src/libdar/capabilities.hpp ./src/libdar/mycurl_easyhandle_node.hpp ./src/libdar/archive_options5.hpp ./src/libdar/storage.hpp ./src/libdar/data_tree.cpp ./src/libdar/slave_zapette.cpp ./src/libdar/op_tools.hpp ./src/libdar/database.cpp ./src/libdar/infinint.hpp ./src/libdar/delta_sig_block_size.cpp ./src/libdar/mycurl_shared_handle.hpp ./src/libdar/database.hpp ./src/libdar/user_interaction.cpp ./src/libdar/fichier_local.hpp ./src/libdar/crit_action.hpp ./src/libdar/mem_ui.hpp ./src/libdar/gf_mode.hpp ./src/libdar/filesystem_specific_attribute.hpp ./src/libdar/filesystem_specific_attribute.cpp ./src/libdar/escape.hpp ./src/libdar/shell_interaction_emulator.cpp ./src/libdar/proto_generic_file.hpp ./src/libdar/op_tools.cpp ./src/libdar/database5.cpp ./src/libdar/tronc.hpp ./src/libdar/mask.cpp ./src/libdar/cat_directory.cpp ./src/libdar/cat_inode.hpp ./src/libdar/contextual.hpp ./src/libdar/cat_tube.hpp ./src/libdar/shell_interaction.hpp ./src/libdar/scrambler.cpp ./src/libdar/path.cpp ./src/libdar/crc.cpp ./src/libdar/archive_version.hpp ./src/libdar/mycurl_easyhandle_sharing.cpp ./src/libdar/secu_memory_file.cpp ./src/libdar/i_database.cpp ./src/libdar/messaging.hpp ./src/libdar/catalogue.cpp ./src/libdar/archive_aux.cpp ./src/libdar/cat_ignored.hpp ./src/libdar/delta_sig_block_size.hpp ./src/libdar/integers.cpp ./src/libdar/elastic.hpp ./src/libdar/i_archive.hpp ./src/libdar/secu_string.hpp ./src/libdar/tlv.hpp ./src/libdar/cat_lien.hpp ./src/libdar/storage.cpp ./src/libdar/etage.cpp ./src/libdar/filesystem_hard_link_read.hpp ./src/libdar/zapette_protocol.cpp ./src/libdar/cat_ignored_dir.hpp ./src/libdar/mycurl_easyhandle_sharing.hpp ./src/libdar/user_group_bases.cpp ./src/libdar/header.hpp ./src/libdar/libdar.hpp ./src/libdar/label.hpp ./src/libdar/filesystem_diff.cpp ./src/libdar/entree_stats.hpp ./src/libdar/generic_to_global_file.hpp ./src/libdar/archive_num.cpp ./src/libdar/libdar5.cpp ./src/libdar/cat_detruit.hpp ./src/libdar/zapette.cpp ./src/libdar/terminateur.hpp ./src/libdar/cat_chardev.cpp ./src/libdar/zapette_protocol.hpp ./src/libdar/cat_etoile.hpp ./src/libdar/cache.hpp ./src/libdar/cat_prise.hpp ./src/libdar/user_interaction5.hpp ./src/libdar/i_libdar_xform.hpp ./src/libdar/archive.cpp ./src/libdar/filesystem_diff.hpp ./src/libdar/mask_list.cpp ./src/libdar/filesystem_hard_link_write.hpp ./src/libdar/tlv_list.cpp ./src/libdar/secu_memory_file.hpp ./src/libdar/range.cpp ./src/libdar/range.hpp ./src/libdar/filesystem_hard_link_read.cpp ./src/libdar/capabilities.cpp ./src/libdar/tlv_list.hpp ./src/libdar/crypto.cpp ./src/libdar/get_version.hpp ./src/libdar/int_tools.hpp ./src/libdar/ea.hpp ./src/libdar/generic_file.hpp ./src/libdar/hash_fichier.hpp ./src/libdar/mycurl_protocol.hpp ./src/libdar/libdar5.hpp ./src/libdar/pile.cpp ./src/libdar/mycurl_protocol.cpp ./src/libdar/thread_cancellation.hpp ./src/libdar/user_interaction_blind.cpp ./src/libdar/thread_cancellation.cpp ./src/libdar/crc.hpp ./src/libdar/database_aux.hpp ./src/libdar/filtre.cpp ./src/libdar/database_archives.hpp ./src/libdar/i_database.hpp ./src/libdar/escape.cpp ./src/libdar/entrepot_local.hpp ./src/libdar/archive_aux.hpp ./src/libdar/sparse_file.cpp ./src/libdar/criterium.cpp ./src/testing/test_compressor.cpp ./src/testing/test_deci.cpp ./src/testing/test_mask_list.cpp ./src/testing/test_datetime.cpp ./src/testing/test_escape.cpp ./src/testing/test_hide_file.cpp ./src/testing/test_libdar.cpp ./src/testing/test_range.cpp ./src/testing/test_tuyau.cpp ./src/testing/test_erreurs.cpp ./src/testing/test_elastic.cpp ./src/testing/test_mask.cpp ./src/testing/test_cache.cpp ./src/testing/test_smart_pointer.cpp ./src/testing/test_sar.cpp ./src/testing/test_generic_thread.cpp ./src/testing/test_tronconneuse.cpp ./src/testing/test_generic_file.cpp ./src/testing/test_catalogue.cpp ./src/testing/test_rsync.cpp ./src/testing/testtools.cpp ./src/testing/test_limitint.cpp ./src/testing/test_crypto_asym.cpp ./src/testing/test_scrambler.cpp ./src/testing/test_storage.cpp ./src/testing/test_hash_fichier.cpp ./src/testing/testtools.hpp ./src/testing/moving_file.cpp ./src/testing/test_blowfish.cpp ./src/testing/test_terminateur.cpp ./src/testing/test_tronc.cpp ./src/testing/test_infinint.cpp ./src/testing/test_filesystem.cpp ./src/testing/test_entrepot_libcurl.cpp ./src/testing/test_path.cpp ./src/examples/prime.cpp ./src/examples/formula.cpp ./src/examples/factoriel.cpp ./src/dar_suite/dar.hpp ./src/dar_suite/dar_slave.cpp ./src/dar_suite/line_tools.hpp ./src/dar_suite/dar_cp.cpp ./src/dar_suite/dar_manager.cpp ./src/dar_suite/no_comment.hpp ./src/dar_suite/line_tools.cpp ./src/dar_suite/dar_suite.cpp ./src/dar_suite/crit_action_cmd_line.hpp ./src/dar_suite/dar.cpp ./src/dar_suite/crit_action_cmd_line.cpp ./src/dar_suite/hide_file.hpp ./src/dar_suite/no_comment.cpp ./src/dar_suite/hide_file.cpp ./src/dar_suite/dar_suite.hpp ./src/dar_suite/config_file.hpp ./src/dar_suite/config_file.cpp ./src/dar_suite/command_line.hpp ./src/dar_suite/command_line.cpp ./src/dar_suite/dar_xform.cpp ./src/python/pybind11_libdar.cpp ./src/check/all_features.cpp ./src/check/padder.cpp dar-2.6.8/po/Makefile.in.in0000644000175000017520000004225113617552277012333 00000000000000# Makefile for PO directory in any package using GNU gettext. # Copyright (C) 1995-1997, 2000-2007, 2009-2010 by Ulrich Drepper # # Copying and distribution of this file, with or without modification, # are permitted in any medium without royalty provided the copyright # notice and this notice are preserved. This file is offered as-is, # without any warranty. # # Origin: gettext-0.19.8 GETTEXT_MACRO_VERSION = 0.19 PACKAGE = @PACKAGE@ VERSION = @VERSION@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ SED = @SED@ SHELL = /bin/sh @SET_MAKE@ srcdir = @srcdir@ top_srcdir = @top_srcdir@ VPATH = @srcdir@ prefix = @prefix@ exec_prefix = @exec_prefix@ datarootdir = @datarootdir@ datadir = @datadir@ localedir = @localedir@ gettextsrcdir = $(datadir)/gettext/po INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ # We use $(mkdir_p). # In automake <= 1.9.x, $(mkdir_p) is defined either as "mkdir -p --" or as # "$(mkinstalldirs)" or as "$(install_sh) -d". For these automake versions, # @install_sh@ does not start with $(SHELL), so we add it. # In automake >= 1.10, @mkdir_p@ is derived from ${MKDIR_P}, which is defined # either as "/path/to/mkdir -p" or ".../install-sh -c -d". For these automake # versions, $(mkinstalldirs) and $(install_sh) are unused. mkinstalldirs = $(SHELL) @install_sh@ -d install_sh = $(SHELL) @install_sh@ MKDIR_P = @MKDIR_P@ mkdir_p = @mkdir_p@ # When building gettext-tools, we prefer to use the built programs # rather than installed programs. However, we can't do that when we # are cross compiling. CROSS_COMPILING = @CROSS_COMPILING@ GMSGFMT_ = @GMSGFMT@ GMSGFMT_no = @GMSGFMT@ GMSGFMT_yes = @GMSGFMT_015@ GMSGFMT = $(GMSGFMT_$(USE_MSGCTXT)) MSGFMT_ = @MSGFMT@ MSGFMT_no = @MSGFMT@ MSGFMT_yes = @MSGFMT_015@ MSGFMT = $(MSGFMT_$(USE_MSGCTXT)) XGETTEXT_ = @XGETTEXT@ XGETTEXT_no = @XGETTEXT@ XGETTEXT_yes = @XGETTEXT_015@ XGETTEXT = $(XGETTEXT_$(USE_MSGCTXT)) MSGMERGE = msgmerge MSGMERGE_UPDATE = @MSGMERGE@ --update MSGINIT = msginit MSGCONV = msgconv MSGFILTER = msgfilter POFILES = @POFILES@ GMOFILES = @GMOFILES@ UPDATEPOFILES = @UPDATEPOFILES@ DUMMYPOFILES = @DUMMYPOFILES@ DISTFILES.common = Makefile.in.in remove-potcdate.sin \ $(DISTFILES.common.extra1) $(DISTFILES.common.extra2) $(DISTFILES.common.extra3) DISTFILES = $(DISTFILES.common) Makevars POTFILES.in \ $(POFILES) $(GMOFILES) \ $(DISTFILES.extra1) $(DISTFILES.extra2) $(DISTFILES.extra3) POTFILES = \ CATALOGS = @CATALOGS@ POFILESDEPS_ = $(srcdir)/$(DOMAIN).pot POFILESDEPS_yes = $(POFILESDEPS_) POFILESDEPS_no = POFILESDEPS = $(POFILESDEPS_$(PO_DEPENDS_ON_POT)) DISTFILESDEPS_ = update-po DISTFILESDEPS_yes = $(DISTFILESDEPS_) DISTFILESDEPS_no = DISTFILESDEPS = $(DISTFILESDEPS_$(DIST_DEPENDS_ON_UPDATE_PO)) # Makevars gets inserted here. (Don't remove this line!) .SUFFIXES: .SUFFIXES: .po .gmo .mo .sed .sin .nop .po-create .po-update .po.mo: @echo "$(MSGFMT) -c -o $@ $<"; \ $(MSGFMT) -c -o t-$@ $< && mv t-$@ $@ .po.gmo: @lang=`echo $* | sed -e 's,.*/,,'`; \ test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ echo "$${cdcmd}rm -f $${lang}.gmo && $(GMSGFMT) -c --statistics --verbose -o $${lang}.gmo $${lang}.po"; \ cd $(srcdir) && rm -f $${lang}.gmo && $(GMSGFMT) -c --statistics --verbose -o t-$${lang}.gmo $${lang}.po && mv t-$${lang}.gmo $${lang}.gmo .sin.sed: sed -e '/^#/d' $< > t-$@ mv t-$@ $@ all: all-@USE_NLS@ all-yes: stamp-po all-no: # Ensure that the gettext macros and this Makefile.in.in are in sync. CHECK_MACRO_VERSION = \ test "$(GETTEXT_MACRO_VERSION)" = "@GETTEXT_MACRO_VERSION@" \ || { echo "*** error: gettext infrastructure mismatch: using a Makefile.in.in from gettext version $(GETTEXT_MACRO_VERSION) but the autoconf macros are from gettext version @GETTEXT_MACRO_VERSION@" 1>&2; \ exit 1; \ } # $(srcdir)/$(DOMAIN).pot is only created when needed. When xgettext finds no # internationalized messages, no $(srcdir)/$(DOMAIN).pot is created (because # we don't want to bother translators with empty POT files). We assume that # LINGUAS is empty in this case, i.e. $(POFILES) and $(GMOFILES) are empty. # In this case, stamp-po is a nop (i.e. a phony target). # stamp-po is a timestamp denoting the last time at which the CATALOGS have # been loosely updated. Its purpose is that when a developer or translator # checks out the package via CVS, and the $(DOMAIN).pot file is not in CVS, # "make" will update the $(DOMAIN).pot and the $(CATALOGS), but subsequent # invocations of "make" will do nothing. This timestamp would not be necessary # if updating the $(CATALOGS) would always touch them; however, the rule for # $(POFILES) has been designed to not touch files that don't need to be # changed. stamp-po: $(srcdir)/$(DOMAIN).pot @$(CHECK_MACRO_VERSION) test ! -f $(srcdir)/$(DOMAIN).pot || \ test -z "$(GMOFILES)" || $(MAKE) $(GMOFILES) @test ! -f $(srcdir)/$(DOMAIN).pot || { \ echo "touch stamp-po" && \ echo timestamp > stamp-poT && \ mv stamp-poT stamp-po; \ } # Note: Target 'all' must not depend on target '$(DOMAIN).pot-update', # otherwise packages like GCC can not be built if only parts of the source # have been downloaded. # This target rebuilds $(DOMAIN).pot; it is an expensive operation. # Note that $(DOMAIN).pot is not touched if it doesn't need to be changed. # The determination of whether the package xyz is a GNU one is based on the # heuristic whether some file in the top level directory mentions "GNU xyz". # If GNU 'find' is available, we avoid grepping through monster files. $(DOMAIN).pot-update: $(POTFILES) $(srcdir)/POTFILES.in remove-potcdate.sed package_gnu="$(PACKAGE_GNU)"; \ test -n "$$package_gnu" || { \ if { if (LC_ALL=C find --version) 2>/dev/null | grep GNU >/dev/null; then \ LC_ALL=C find -L $(top_srcdir) -maxdepth 1 -type f \ -size -10000000c -exec grep 'GNU @PACKAGE@' \ /dev/null '{}' ';' 2>/dev/null; \ else \ LC_ALL=C grep 'GNU @PACKAGE@' $(top_srcdir)/* 2>/dev/null; \ fi; \ } | grep -v 'libtool:' >/dev/null; then \ package_gnu=yes; \ else \ package_gnu=no; \ fi; \ }; \ if test "$$package_gnu" = "yes"; then \ package_prefix='GNU '; \ else \ package_prefix=''; \ fi; \ if test -n '$(MSGID_BUGS_ADDRESS)' || test '$(PACKAGE_BUGREPORT)' = '@'PACKAGE_BUGREPORT'@'; then \ msgid_bugs_address='$(MSGID_BUGS_ADDRESS)'; \ else \ msgid_bugs_address='$(PACKAGE_BUGREPORT)'; \ fi; \ case `$(XGETTEXT) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \ '' | 0.[0-9] | 0.[0-9].* | 0.1[0-5] | 0.1[0-5].* | 0.16 | 0.16.[0-1]*) \ $(XGETTEXT) --default-domain=$(DOMAIN) --directory=$(top_srcdir) \ --add-comments=TRANSLATORS: $(XGETTEXT_OPTIONS) @XGETTEXT_EXTRA_OPTIONS@ \ --files-from=$(srcdir)/POTFILES.in \ --copyright-holder='$(COPYRIGHT_HOLDER)' \ --msgid-bugs-address="$$msgid_bugs_address" \ ;; \ *) \ $(XGETTEXT) --default-domain=$(DOMAIN) --directory=$(top_srcdir) \ --add-comments=TRANSLATORS: $(XGETTEXT_OPTIONS) @XGETTEXT_EXTRA_OPTIONS@ \ --files-from=$(srcdir)/POTFILES.in \ --copyright-holder='$(COPYRIGHT_HOLDER)' \ --package-name="$${package_prefix}@PACKAGE@" \ --package-version='@VERSION@' \ --msgid-bugs-address="$$msgid_bugs_address" \ ;; \ esac test ! -f $(DOMAIN).po || { \ if test -f $(srcdir)/$(DOMAIN).pot-header; then \ sed -e '1,/^#$$/d' < $(DOMAIN).po > $(DOMAIN).1po && \ cat $(srcdir)/$(DOMAIN).pot-header $(DOMAIN).1po > $(DOMAIN).po; \ rm -f $(DOMAIN).1po; \ fi; \ if test -f $(srcdir)/$(DOMAIN).pot; then \ sed -f remove-potcdate.sed < $(srcdir)/$(DOMAIN).pot > $(DOMAIN).1po && \ sed -f remove-potcdate.sed < $(DOMAIN).po > $(DOMAIN).2po && \ if cmp $(DOMAIN).1po $(DOMAIN).2po >/dev/null 2>&1; then \ rm -f $(DOMAIN).1po $(DOMAIN).2po $(DOMAIN).po; \ else \ rm -f $(DOMAIN).1po $(DOMAIN).2po $(srcdir)/$(DOMAIN).pot && \ mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \ fi; \ else \ mv $(DOMAIN).po $(srcdir)/$(DOMAIN).pot; \ fi; \ } # This rule has no dependencies: we don't need to update $(DOMAIN).pot at # every "make" invocation, only create it when it is missing. # Only "make $(DOMAIN).pot-update" or "make dist" will force an update. $(srcdir)/$(DOMAIN).pot: $(MAKE) $(DOMAIN).pot-update # This target rebuilds a PO file if $(DOMAIN).pot has changed. # Note that a PO file is not touched if it doesn't need to be changed. $(POFILES): $(POFILESDEPS) @lang=`echo $@ | sed -e 's,.*/,,' -e 's/\.po$$//'`; \ if test -f "$(srcdir)/$${lang}.po"; then \ test -f $(srcdir)/$(DOMAIN).pot || $(MAKE) $(srcdir)/$(DOMAIN).pot; \ test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ echo "$${cdcmd}$(MSGMERGE_UPDATE) $(MSGMERGE_OPTIONS) --lang=$${lang} $${lang}.po $(DOMAIN).pot"; \ cd $(srcdir) \ && { case `$(MSGMERGE) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \ '' | 0.[0-9] | 0.[0-9].* | 0.1[0-7] | 0.1[0-7].*) \ $(MSGMERGE_UPDATE) $(MSGMERGE_OPTIONS) $${lang}.po $(DOMAIN).pot;; \ *) \ $(MSGMERGE_UPDATE) $(MSGMERGE_OPTIONS) --lang=$${lang} $${lang}.po $(DOMAIN).pot;; \ esac; \ }; \ else \ $(MAKE) $${lang}.po-create; \ fi install: install-exec install-data install-exec: install-data: install-data-@USE_NLS@ if test "$(PACKAGE)" = "gettext-tools"; then \ $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \ for file in $(DISTFILES.common) Makevars.template; do \ $(INSTALL_DATA) $(srcdir)/$$file \ $(DESTDIR)$(gettextsrcdir)/$$file; \ done; \ for file in Makevars; do \ rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \ done; \ else \ : ; \ fi install-data-no: all install-data-yes: all @catalogs='$(CATALOGS)'; \ for cat in $$catalogs; do \ cat=`basename $$cat`; \ lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ dir=$(localedir)/$$lang/LC_MESSAGES; \ $(mkdir_p) $(DESTDIR)$$dir; \ if test -r $$cat; then realcat=$$cat; else realcat=$(srcdir)/$$cat; fi; \ $(INSTALL_DATA) $$realcat $(DESTDIR)$$dir/$(DOMAIN).mo; \ echo "installing $$realcat as $(DESTDIR)$$dir/$(DOMAIN).mo"; \ for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \ if test -n "$$lc"; then \ if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \ link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \ mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \ for file in *; do \ if test -f $$file; then \ ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \ fi; \ done); \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ else \ if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \ :; \ else \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ fi; \ fi; \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ ln -s ../LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \ ln $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo 2>/dev/null || \ cp -p $(DESTDIR)$(localedir)/$$lang/LC_MESSAGES/$(DOMAIN).mo $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ echo "installing $$realcat link as $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo"; \ fi; \ done; \ done install-strip: install installdirs: installdirs-exec installdirs-data installdirs-exec: installdirs-data: installdirs-data-@USE_NLS@ if test "$(PACKAGE)" = "gettext-tools"; then \ $(mkdir_p) $(DESTDIR)$(gettextsrcdir); \ else \ : ; \ fi installdirs-data-no: installdirs-data-yes: @catalogs='$(CATALOGS)'; \ for cat in $$catalogs; do \ cat=`basename $$cat`; \ lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ dir=$(localedir)/$$lang/LC_MESSAGES; \ $(mkdir_p) $(DESTDIR)$$dir; \ for lc in '' $(EXTRA_LOCALE_CATEGORIES); do \ if test -n "$$lc"; then \ if (cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc 2>/dev/null) | grep ' -> ' >/dev/null; then \ link=`cd $(DESTDIR)$(localedir)/$$lang && LC_ALL=C ls -l -d $$lc | sed -e 's/^.* -> //'`; \ mv $(DESTDIR)$(localedir)/$$lang/$$lc $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ (cd $(DESTDIR)$(localedir)/$$lang/$$lc.old && \ for file in *; do \ if test -f $$file; then \ ln -s ../$$link/$$file $(DESTDIR)$(localedir)/$$lang/$$lc/$$file; \ fi; \ done); \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc.old; \ else \ if test -d $(DESTDIR)$(localedir)/$$lang/$$lc; then \ :; \ else \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc; \ mkdir $(DESTDIR)$(localedir)/$$lang/$$lc; \ fi; \ fi; \ fi; \ done; \ done # Define this as empty until I found a useful application. installcheck: uninstall: uninstall-exec uninstall-data uninstall-exec: uninstall-data: uninstall-data-@USE_NLS@ if test "$(PACKAGE)" = "gettext-tools"; then \ for file in $(DISTFILES.common) Makevars.template; do \ rm -f $(DESTDIR)$(gettextsrcdir)/$$file; \ done; \ else \ : ; \ fi uninstall-data-no: uninstall-data-yes: catalogs='$(CATALOGS)'; \ for cat in $$catalogs; do \ cat=`basename $$cat`; \ lang=`echo $$cat | sed -e 's/\.gmo$$//'`; \ for lc in LC_MESSAGES $(EXTRA_LOCALE_CATEGORIES); do \ rm -f $(DESTDIR)$(localedir)/$$lang/$$lc/$(DOMAIN).mo; \ done; \ done check: all info dvi ps pdf html tags TAGS ctags CTAGS ID: mostlyclean: rm -f remove-potcdate.sed rm -f stamp-poT rm -f core core.* $(DOMAIN).po $(DOMAIN).1po $(DOMAIN).2po *.new.po rm -fr *.o clean: mostlyclean distclean: clean rm -f Makefile Makefile.in POTFILES *.mo maintainer-clean: distclean @echo "This command is intended for maintainers to use;" @echo "it deletes files that may require special tools to rebuild." rm -f stamp-po $(GMOFILES) distdir = $(top_builddir)/$(PACKAGE)-$(VERSION)/$(subdir) dist distdir: test -z "$(DISTFILESDEPS)" || $(MAKE) $(DISTFILESDEPS) @$(MAKE) dist2 # This is a separate target because 'update-po' must be executed before. dist2: stamp-po $(DISTFILES) dists="$(DISTFILES)"; \ if test "$(PACKAGE)" = "gettext-tools"; then \ dists="$$dists Makevars.template"; \ fi; \ if test -f $(srcdir)/$(DOMAIN).pot; then \ dists="$$dists $(DOMAIN).pot stamp-po"; \ fi; \ if test -f $(srcdir)/ChangeLog; then \ dists="$$dists ChangeLog"; \ fi; \ for i in 0 1 2 3 4 5 6 7 8 9; do \ if test -f $(srcdir)/ChangeLog.$$i; then \ dists="$$dists ChangeLog.$$i"; \ fi; \ done; \ if test -f $(srcdir)/LINGUAS; then dists="$$dists LINGUAS"; fi; \ for file in $$dists; do \ if test -f $$file; then \ cp -p $$file $(distdir) || exit 1; \ else \ cp -p $(srcdir)/$$file $(distdir) || exit 1; \ fi; \ done update-po: Makefile $(MAKE) $(DOMAIN).pot-update test -z "$(UPDATEPOFILES)" || $(MAKE) $(UPDATEPOFILES) $(MAKE) update-gmo # General rule for creating PO files. .nop.po-create: @lang=`echo $@ | sed -e 's/\.po-create$$//'`; \ echo "File $$lang.po does not exist. If you are a translator, you can create it through 'msginit'." 1>&2; \ exit 1 # General rule for updating PO files. .nop.po-update: @lang=`echo $@ | sed -e 's/\.po-update$$//'`; \ if test "$(PACKAGE)" = "gettext-tools" && test "$(CROSS_COMPILING)" != "yes"; then PATH=`pwd`/../src:$$PATH; fi; \ tmpdir=`pwd`; \ echo "$$lang:"; \ test "$(srcdir)" = . && cdcmd="" || cdcmd="cd $(srcdir) && "; \ echo "$${cdcmd}$(MSGMERGE) $(MSGMERGE_OPTIONS) --lang=$$lang $$lang.po $(DOMAIN).pot -o $$lang.new.po"; \ cd $(srcdir); \ if { case `$(MSGMERGE) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \ '' | 0.[0-9] | 0.[0-9].* | 0.1[0-7] | 0.1[0-7].*) \ $(MSGMERGE) $(MSGMERGE_OPTIONS) -o $$tmpdir/$$lang.new.po $$lang.po $(DOMAIN).pot;; \ *) \ $(MSGMERGE) $(MSGMERGE_OPTIONS) --lang=$$lang -o $$tmpdir/$$lang.new.po $$lang.po $(DOMAIN).pot;; \ esac; \ }; then \ if cmp $$lang.po $$tmpdir/$$lang.new.po >/dev/null 2>&1; then \ rm -f $$tmpdir/$$lang.new.po; \ else \ if mv -f $$tmpdir/$$lang.new.po $$lang.po; then \ :; \ else \ echo "msgmerge for $$lang.po failed: cannot move $$tmpdir/$$lang.new.po to $$lang.po" 1>&2; \ exit 1; \ fi; \ fi; \ else \ echo "msgmerge for $$lang.po failed!" 1>&2; \ rm -f $$tmpdir/$$lang.new.po; \ fi $(DUMMYPOFILES): update-gmo: Makefile $(GMOFILES) @: # Recreate Makefile by invoking config.status. Explicitly invoke the shell, # because execution permission bits may not work on the current file system. # Use @SHELL@, which is the shell determined by autoconf for the use by its # scripts, not $(SHELL) which is hardwired to /bin/sh and may be deficient. Makefile: Makefile.in.in Makevars $(top_builddir)/config.status @POMAKEFILEDEPS@ cd $(top_builddir) \ && @SHELL@ ./config.status $(subdir)/$@.in po-directories force: # Tell versions [3.59,3.63) of GNU make not to export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.6.8/po/dar.pot0000644000175000017520000055346713617551261011163 00000000000000# SOME DESCRIPTIVE TITLE. # Copyright (C) YEAR Free Software Foundation, Inc. # This file is distributed under the same license as the dar package. # FIRST AUTHOR , YEAR. # #, fuzzy msgid "" msgstr "" "Project-Id-Version: dar 2.6.8\n" "Report-Msgid-Bugs-To: http://sourceforge.net/tracker/?" "group_id=65612MSGID_BUGS_ADDRESS =atid=511612\n" "POT-Creation-Date: 2020-02-08 16:12+0100\n" "PO-Revision-Date: YEAR-MO-DA HO:MI+ZONE\n" "Last-Translator: FULL NAME \n" "Language-Team: LANGUAGE \n" "Language: \n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=CHARSET\n" "Content-Transfer-Encoding: 8bit\n" #: src/libdar/cat_signature.cpp:72 src/libdar/catalogue.cpp:143 #: src/libdar/catalogue.cpp:162 src/libdar/catalogue.cpp:164 msgid "incoherent catalogue structure" msgstr "" #: src/libdar/compression.cpp:56 src/libdar/compression.cpp:79 #: src/libdar/compression.cpp:102 msgid "unknown compression" msgstr "" #: src/libdar/compression.cpp:129 #, c-format msgid "unknown compression algorithm: %S" msgstr "" #: src/libdar/filesystem_tools.cpp:164 msgid "Cannot get inode information about file to remove " msgstr "" #: src/libdar/filesystem_tools.cpp:177 msgid "Cannot remove directory " msgstr "" #: src/libdar/filesystem_tools.cpp:251 msgid "" "uid value is too high for this system for libdar be able to restore it " "properly" msgstr "" #: src/libdar/filesystem_tools.cpp:255 msgid "" "gid value is too high for this system for libdar be able to restore it " "properly" msgstr "" #: src/libdar/filesystem_tools.cpp:259 src/libdar/filesystem_tools.cpp:263 msgid "Could not restore original file ownership: " msgstr "" #: src/libdar/filesystem_tools.cpp:285 #, c-format msgid "Cannot restore permissions of %s : %s" msgstr "" #: src/libdar/filesystem_tools.cpp:372 #, c-format msgid "Failed to open %S while checking for nodump flag: %s" msgstr "" #: src/libdar/filesystem_tools.cpp:386 #, c-format msgid "Cannot get ext2 attributes (and nodump flag value) for %S : %s" msgstr "" #: src/libdar/filesystem_tools.cpp:418 #, c-format msgid "Cannot get inode information for %s : %s" msgstr "" #: src/libdar/filesystem_tools.cpp:447 #, c-format msgid "" "Replacing %s in the -R option by the directory pointed to by this symbolic " "link: " msgstr "" #: src/libdar/filesystem_tools.cpp:450 #, c-format msgid "" "The given path %s must be a directory (or symbolic link to an existing " "directory)" msgstr "" #: src/libdar/filesystem_tools.cpp:577 msgid "" "File the patch is about to be applied to is not the expected one, aborting " "the patch operation" msgstr "" #: src/libdar/filesystem_tools.cpp:594 msgid "Patch data does not match its CRC, archive corruption took place" msgstr "" #: src/libdar/filesystem_tools.cpp:625 msgid "Error met while checking the resulting patched file: " msgstr "" #: src/libdar/filesystem_tools.cpp:752 msgid "Copied data does not match expected CRC" msgstr "" #: src/libdar/fsa_family.cpp:58 msgid "creation date" msgstr "" #: src/libdar/fsa_family.cpp:60 msgid "append only" msgstr "" #: src/libdar/fsa_family.cpp:62 msgid "compressed" msgstr "" #: src/libdar/fsa_family.cpp:64 msgid "no dump flag" msgstr "" #: src/libdar/fsa_family.cpp:66 msgid "immutable" msgstr "" #: src/libdar/fsa_family.cpp:68 msgid "journalized" msgstr "" #: src/libdar/fsa_family.cpp:70 msgid "secure deletion" msgstr "" #: src/libdar/fsa_family.cpp:72 msgid "no tail merging" msgstr "" #: src/libdar/fsa_family.cpp:74 msgid "undeletable" msgstr "" #: src/libdar/fsa_family.cpp:76 msgid "no atime update" msgstr "" #: src/libdar/fsa_family.cpp:78 msgid "synchronous directory" msgstr "" #: src/libdar/fsa_family.cpp:80 msgid "synchronous update" msgstr "" #: src/libdar/fsa_family.cpp:82 msgid "top of directory hierarchy" msgstr "" #: src/libdar/statistics.cpp:81 msgid "Error while initializing \"mutex\" for class \"statistics\": " msgstr "" #: src/libdar/tronconneuse.cpp:61 #, c-format msgid "%d is not a valid block size" msgstr "" #: src/libdar/tronconneuse.cpp:464 src/libdar/crypto_sym.cpp:349 msgid "Data corruption may have occurred, cannot decrypt data" msgstr "" #: src/libdar/cat_delta_signature.cpp:332 msgid "data corrupted when attempting to read delta signature block size" msgstr "" #: src/libdar/cat_delta_signature.cpp:354 msgid "" "Error while reading CRC of delta signature data. Data corruption occurred" msgstr "" #: src/libdar/cat_delta_signature.cpp:356 msgid "CRC error met while reading delta signature: data corruption." msgstr "" #: src/libdar/sar.cpp:165 #, c-format msgid "" "Error met while opening the last slice: %S. Trying to open the archive using " "the first slice..." msgstr "" #: src/libdar/sar.cpp:213 msgid "File size too small" msgstr "" #: src/libdar/sar.cpp:217 msgid "First file size too small" msgstr "" #: src/libdar/sar.cpp:689 msgid "LAX MODE: Caught exception: " msgstr "" #: src/libdar/sar.cpp:690 #, c-format msgid "" "LAX MODE: %S is missing, You have the possibility to create a zero byte " "length file under the name of this slice, to replace this missing file. This " "will of course generate error messages about the information that is missing " "in this slice, but at least libdar will be able to continue. Can we continue " "now?" msgstr "" #: src/libdar/sar.cpp:702 #, c-format msgid "%S is required for further operation, please provide the file." msgstr "" #: src/libdar/sar.cpp:704 #, c-format msgid "" "%S is required for further operation, please provide the file if you have it." msgstr "" #: src/libdar/sar.cpp:709 #, c-format msgid "Failed reading slice %S: " msgstr "" #: src/libdar/sar.cpp:752 #, c-format msgid "%S has a bad or corrupted header, please provide the correct file." msgstr "" #: src/libdar/sar.cpp:756 #, c-format msgid "" "LAX MODE: %S has a bad or corrupted header, trying to guess original values " "and continuing if possible" msgstr "" #: src/libdar/sar.cpp:766 #, c-format msgid "" "%S is not a valid file (wrong magic number), please provide the good file." msgstr "" #: src/libdar/sar.cpp:770 #, c-format msgid "" "LAX MODE: In spite of its name, %S does not appear to be a dar slice, " "assuming a data corruption took place and continuing" msgstr "" #: src/libdar/sar.cpp:774 msgid "" "This is an old archive, it can only be opened starting by the first slice" msgstr "" #: src/libdar/sar.cpp:801 msgid "" "LAX MODE: Due to probable data corruption, dar could not determine the " "correct size of slices in this archive. For recent archive, this information " "is duplicated in each slice, do you want to try opening another slice to get " "this value if present?" msgstr "" #: src/libdar/sar.cpp:805 msgid "LAX MODE: Please provide the slice number to read: " msgstr "" #: src/libdar/sar.cpp:813 msgid "LAX MODE: Please provide an strictly positive integer number" msgstr "" #: src/libdar/sar.cpp:819 #, c-format msgid "LAX MODE: opening slice %i to read its slice header" msgstr "" #: src/libdar/sar.cpp:821 #, c-format msgid "LAX MODE: closing slice %i, header properly fetched" msgstr "" #: src/libdar/sar.cpp:827 msgid "" "LAX MODE: In spite of a the absence of a known slice size, continuing anyway" msgstr "" #: src/libdar/sar.cpp:834 msgid "Incoherent slice header: First slice size too small" msgstr "" #: src/libdar/sar.cpp:836 msgid "incoherent slice header: Slice size too small" msgstr "" #: src/libdar/sar.cpp:842 #, c-format msgid "Error opening %S : " msgstr "" #: src/libdar/sar.cpp:842 msgid " . Retry ?" msgstr "" #: src/libdar/sar.cpp:853 msgid " is a slice from another backup, please provide the correct slice." msgstr "" #: src/libdar/sar.cpp:858 msgid "" "LAX MODE: internal name of the slice leads dar to consider it is not member " "of the same archive. Assuming data corruption occurred and relying on the " "filename of this slice as proof of its membership to the archive" msgstr "" #: src/libdar/sar.cpp:886 msgid "" "Data corruption met at end of slice, forbidden flag found at this position" msgstr "" #: src/libdar/sar.cpp:892 msgid "Data corruption met at end of slice, unknown flag found" msgstr "" #: src/libdar/sar.cpp:912 #, c-format msgid "" "Two different slices (%i and %i) are marked as the last slice of the backup!" msgstr "" #: src/libdar/sar.cpp:915 #, c-format msgid "" "LAX MODE: slices %i and %i are both recorded as last slice of the archive, " "keeping the higher number as the real last slice" msgstr "" #: src/libdar/sar.cpp:938 #, c-format msgid "Slice %S has an unknown flag (neither terminal nor non_terminal file)." msgstr "" #: src/libdar/sar.cpp:948 #, c-format msgid "" "Due to data corruption, it is not possible to know if slice %S is the last " "slice of the archive or not. I need your help to figure out this. At the " "following prompt please answer either one of the following words: \"last\" " "or \"notlast\" according to the nature of this slice (you can also answer " "with \"abort\" to abort the program immediately): " msgstr "" #: src/libdar/sar.cpp:950 src/libdar/sar.cpp:954 msgid "last" msgstr "" #: src/libdar/sar.cpp:950 msgid "notlast" msgstr "" #: src/libdar/sar.cpp:950 src/libdar/sar.cpp:952 msgid "abort" msgstr "" #: src/libdar/sar.cpp:966 msgid "" "LAX MODE: Slice flag corrupted, but a slice of higher number has been seen, " "thus the header flag was surely not indicating this slice as the last of the " "archive. Continuing" msgstr "" #: src/libdar/sar.cpp:1010 #, c-format msgid "failed openning slice %S: %S. Will try to erase it first, if allowed" msgstr "" #: src/libdar/sar.cpp:1076 src/libdar/sar.cpp:1147 src/libdar/sar.cpp:1158 #: src/libdar/trivial_sar.cpp:224 src/libdar/trivial_sar.cpp:242 #, c-format msgid "Failed creating slice %S: " msgstr "" #: src/libdar/sar.cpp:1097 msgid "file exists, and DONT_ERASE option is set." msgstr "" #: src/libdar/sar.cpp:1102 msgid " is about to be overwritten." msgstr "" #: src/libdar/sar.cpp:1182 msgid "" "First slice size is too small to even just be able to drop the slice header" msgstr "" #: src/libdar/sar.cpp:1184 msgid "Slice size is too small to even just be able to drop the slice header" msgstr "" #: src/libdar/sar.cpp:1247 msgid "Finished writing to file " msgstr "" #: src/libdar/sar.cpp:1247 msgid ", ready to continue ? " msgstr "" #: src/libdar/sar.cpp:1252 msgid "" "If you really want to abort the archive creation hit CTRL-C, then press " "enter." msgstr "" #: src/libdar/sar.cpp:1284 msgid "file not open" msgstr "" #: src/libdar/sar.cpp:1318 msgid "The last file of the set is not present in " msgstr "" #: src/libdar/sar.cpp:1318 msgid " , please provide it." msgstr "" #: src/libdar/sar.cpp:1332 #, c-format msgid "" "No backup file is present in %S for archive %S, please provide the last file " "of the set." msgstr "" #: src/libdar/gf_mode.cpp:43 msgid "read only" msgstr "" #: src/libdar/gf_mode.cpp:46 msgid "write only" msgstr "" #: src/libdar/gf_mode.cpp:49 msgid "read and write" msgstr "" #: src/libdar/int_tools.cpp:66 msgid "a binary digit is either 0 or 1" msgstr "" #: src/libdar/erreurs.cpp:123 #, c-format msgid "File %S line %d" msgstr "" #: src/libdar/erreurs.cpp:123 msgid "it seems to be a bug here" msgstr "" #: src/libdar/erreurs.cpp:152 #, c-format msgid "in file %S line %S" msgstr "" #: src/libdar/erreurs.cpp:171 msgid "# UNEXPECTED EXCEPTION, #" msgstr "" #: src/libdar/erreurs.cpp:172 src/libdar/erreurs.cpp:185 msgid "# E X I T I N G ! #" msgstr "" #: src/libdar/erreurs.cpp:175 src/libdar/erreurs.cpp:188 msgid "" " THANKS TO REPORT THE PREVIOUS OUTPUT TO MAINTAINER\n" " GIVING A DESCRIPTION OF THE CIRCUMSTANCES." msgstr "" #: src/libdar/erreurs.cpp:176 msgid "" " IF POSSIBLE TRY TO REPRODUCE THIS ERROR, A\n" " SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH\n" " IN SOLVING THIS PROBLEM. THANKS" msgstr "" #: src/libdar/erreurs.cpp:184 msgid "# NOT CAUGHT EXCEPTION, #" msgstr "" #: src/libdar/erreurs.cpp:189 msgid "" " IF POSSIBLE TRY TO PRODUCE THIS ERROR, A\n" " SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH\n" " IN SOLVING THIS PROBLEM. THANKS" msgstr "" #: src/libdar/generic_rsync.cpp:210 msgid "Error met building the rsync hash table: " msgstr "" #: src/libdar/generic_rsync.cpp:487 msgid "Error met while feeding data to librsync: " msgstr "" #: src/libdar/generic_rsync.cpp:509 msgid "Error releasing librsync job: " msgstr "" #: src/libdar/tools.hpp:697 #, c-format msgid "Found negative date (%s) for inode %s ." msgstr "" #: src/libdar/tools.hpp:701 #, c-format msgid "%S Can we read it as if it was zero (1st January 1970 at 00:00:00 UTC)?" msgstr "" #: src/libdar/tools.hpp:704 msgid "Considering date as if it was zero (Jan 1970)" msgstr "" #: src/libdar/entree_stats.cpp:140 #, c-format msgid "CATALOGUE CONTENTS :" msgstr "" #: src/libdar/entree_stats.cpp:142 #, c-format msgid "total number of inode : %i" msgstr "" #: src/libdar/entree_stats.cpp:143 #, c-format msgid "fully saved : %i" msgstr "" #: src/libdar/entree_stats.cpp:144 #, c-format msgid "binay delta patch : %i" msgstr "" #: src/libdar/entree_stats.cpp:145 #, c-format msgid "inode metadata only : %i" msgstr "" #: src/libdar/entree_stats.cpp:146 #, c-format msgid "distribution of inode(s)" msgstr "" #: src/libdar/entree_stats.cpp:147 #, c-format msgid " - directories : %i" msgstr "" #: src/libdar/entree_stats.cpp:148 #, c-format msgid " - plain files : %i" msgstr "" #: src/libdar/entree_stats.cpp:149 #, c-format msgid " - symbolic links : %i" msgstr "" #: src/libdar/entree_stats.cpp:150 #, c-format msgid " - named pipes : %i" msgstr "" #: src/libdar/entree_stats.cpp:151 #, c-format msgid " - unix sockets : %i" msgstr "" #: src/libdar/entree_stats.cpp:152 #, c-format msgid " - character devices : %i" msgstr "" #: src/libdar/entree_stats.cpp:153 #, c-format msgid " - block devices : %i" msgstr "" #: src/libdar/entree_stats.cpp:154 #, c-format msgid " - Door entries : %i" msgstr "" #: src/libdar/entree_stats.cpp:155 #, c-format msgid "hard links information" msgstr "" #: src/libdar/entree_stats.cpp:156 #, c-format msgid " - number of inode with hard link : %i" msgstr "" #: src/libdar/entree_stats.cpp:157 #, c-format msgid " - number of reference to hard linked inodes: %i" msgstr "" #: src/libdar/entree_stats.cpp:158 #, c-format msgid "destroyed entries information" msgstr "" #: src/libdar/entree_stats.cpp:159 #, c-format msgid " %i file(s) have been record as destroyed since backup of reference" msgstr "" #: src/libdar/wrapperlib.cpp:468 msgid "compressReset called but compressInit never called before" msgstr "" #: src/libdar/fichier_libcurl.cpp:75 #, c-format msgid "Error met while resetting URL to handle: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:84 src/libdar/fichier_libcurl.cpp:619 #: src/libdar/fichier_libcurl.cpp:676 #, c-format msgid "Error met while setting libcurl for reading data file: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:91 src/libdar/fichier_libcurl.cpp:96 #: src/libdar/fichier_libcurl.cpp:646 src/libdar/fichier_libcurl.cpp:683 #, c-format msgid "Error met while setting libcurl for writing data file: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:139 src/libdar/fichier_libcurl.cpp:148 #, c-format msgid "%s: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:184 src/libdar/fichier_libcurl.cpp:192 #: src/libdar/fichier_libcurl.cpp:203 #, c-format msgid "Error met while fetching file size: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:259 msgid "libcurl does not allow skipping in write mode" msgstr "" #: src/libdar/fichier_libcurl.cpp:491 msgid "Error met during network transfer: " msgstr "" #: src/libdar/fichier_libcurl.cpp:514 #, c-format msgid "Error met while reading a block of data: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:590 src/libdar/fichier_libcurl.cpp:598 #: src/libdar/fichier_libcurl.cpp:636 src/libdar/fichier_libcurl.cpp:776 #, c-format msgid "Error while seeking in file on remote repository: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:631 msgid "" "Integer too large for libcurl, cannot skip at the requested offset in the " "remote repository" msgstr "" #: src/libdar/fichier_libcurl.cpp:654 #, c-format msgid "Error while setting write append mode for libcurl: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:934 src/libdar/fichier_libcurl.cpp:940 #, c-format msgid "%S: %s, aborting" msgstr "" #: src/libdar/fichier_libcurl.cpp:955 #, c-format msgid "%S: %s, retrying in %d seconds" msgstr "" #: src/libdar/fichier_libcurl.cpp:962 #, c-format msgid "%S: %s, do we retry network operation?" msgstr "" #: src/libdar/macro_tools.cpp:165 msgid "Locating archive contents..." msgstr "" #: src/libdar/macro_tools.cpp:177 msgid "Reading archive contents..." msgstr "" #: src/libdar/macro_tools.cpp:213 msgid "Missing catalogue in file." msgstr "" #: src/libdar/macro_tools.cpp:312 msgid "" "LAX MODE: catalogue computed hash does not match the signed hash of the " "archive, ignoring" msgstr "" #: src/libdar/macro_tools.cpp:314 msgid "" "Catalogue computed hash does not match the signed hash of the archive, " "archive has been modified since it was signed!" msgstr "" #: src/libdar/macro_tools.cpp:343 msgid "Cannot open catalogue: " msgstr "" #: src/libdar/macro_tools.cpp:426 msgid "Opening standard input to read the archive..." msgstr "" #: src/libdar/macro_tools.cpp:434 #, c-format msgid "Opening named pipe %S as input to read the archive..." msgstr "" #: src/libdar/macro_tools.cpp:447 #, c-format msgid "" "Opening a pair of pipes to read the archive, expecting dar_slave at the " "other ends..." msgstr "" #: src/libdar/macro_tools.cpp:477 msgid "Opening the archive using the multi-slice abstraction layer..." msgstr "" #: src/libdar/macro_tools.cpp:511 msgid "Reading the archive header..." msgstr "" #: src/libdar/macro_tools.cpp:513 msgid "Reading the archive trailer..." msgstr "" #: src/libdar/macro_tools.cpp:532 #, c-format msgid "" "Error while reading archive's header, this may be because this archive is an " "old encrypted archive or that data corruption took place, Assuming it is an " "old archive, we have to read the header at the beginning of the first " "slice..." msgstr "" #: src/libdar/macro_tools.cpp:543 msgid "" "Found a correct archive header at the beginning of the archive, which does " "not stands to be an old archive, the end of the archive is corrupted and " "thus the catalogue is not readable, aborting. Either retry providing in " "addition an isolated catalogue of that archive to perform the operation, or " "try reading the archive in sequential mode or try in lax mode or, last " "chance, try both lax and sequential read mode at the same time" msgstr "" #: src/libdar/macro_tools.cpp:545 msgid "" "Found a correct archive header at the beginning of the archive, which does " "not stands to be an old archive, the end of the archive is thus corrupted. " "Without external catalogue provided and as we do not read the archive in " "sequential mode, there is very little chance to retreive something from this " "corrupted archive. Do we continue anyway ?" msgstr "" #: src/libdar/macro_tools.cpp:556 #, c-format msgid "Opening construction layer..." msgstr "" #: src/libdar/macro_tools.cpp:573 msgid "Considering cyphering layer..." msgstr "" #: src/libdar/macro_tools.cpp:582 msgid "" "LAX MODE: Archive seems to be ciphered, but you did not have provided any " "encryption algorithm, assuming data corruption and considering that the " "archive is not ciphered" msgstr "" #: src/libdar/macro_tools.cpp:585 #, c-format msgid "" "The archive %S is encrypted and no encryption cipher has been given, cannot " "open archive." msgstr "" #: src/libdar/macro_tools.cpp:616 msgid "" "WARNING: support for secure memory was not available at compilation time, in " "case of heavy memory load, this may lead the password you are about to " "provide to be wrote to disk (swap space) in clear. You have been warned!" msgstr "" #: src/libdar/macro_tools.cpp:617 src/libdar/macro_tools.cpp:1359 #, c-format msgid "Archive %S requires a password: " msgstr "" #: src/libdar/macro_tools.cpp:626 msgid "No cyphering layer opened, adding cache layer for better performance" msgstr "" #: src/libdar/macro_tools.cpp:632 msgid "" "Failed opening the cache layer, lack of memory, archive read performances " "will not be optimized" msgstr "" #: src/libdar/macro_tools.cpp:637 msgid "No cyphering layer opened" msgstr "" #: src/libdar/macro_tools.cpp:646 src/libdar/macro_tools.cpp:696 msgid "Opening cyphering layer..." msgstr "" #: src/libdar/macro_tools.cpp:703 msgid "Unknown encryption algorithm" msgstr "" #: src/libdar/macro_tools.cpp:723 src/libdar/macro_tools.cpp:1481 msgid "Creating a new thread to run the previously created layers..." msgstr "" #: src/libdar/macro_tools.cpp:743 msgid "Opening escape sequence abstraction layer..." msgstr "" #: src/libdar/macro_tools.cpp:748 msgid "" "LAX MODE: Archive is flagged as having escape sequence (which is normal in " "recent archive versions). However if this is not expected, shall I assume a " "data corruption occurred in this field and that this flag should be ignored? " "(If unsure, refuse)" msgstr "" #: src/libdar/macro_tools.cpp:763 msgid "" "Sequential read asked, but this archive is flagged to not have the necessary " "embedded escape sequences for that operation, aborting" msgstr "" #: src/libdar/macro_tools.cpp:768 msgid "" "LAX MODE: the requested sequential read mode relies on escape sequence which " "seem to be absent from this archive. Assuming data corruption occurred. " "However, if no data corruption occurred and thus no escape sequence are " "present in this archive, do not use sequential reading mode to explore this " "archive else you will just get nothing usable from it" msgstr "" #: src/libdar/macro_tools.cpp:774 msgid "" "LAX MODE: Archive is flagged to not have escape sequence which is not the " "case by default since archive format 8 (release 2.4.x). If corruption " "occurred and an escape sequence is present, this may lead data restoration " "to fail, answering no at this question will let me consider that an escape " "sequence layer has to be added in spite of the archive flags. Do you want to " "continue as suggested by the archive flag, thus without escape sequence " "layer?" msgstr "" #: src/libdar/macro_tools.cpp:805 src/libdar/macro_tools.cpp:1527 msgid "Creating a new thread to run the escape layer..." msgstr "" #: src/libdar/macro_tools.cpp:823 msgid "" "Opening the compression abstraction layer (compression algorithm used is " "none)..." msgstr "" #: src/libdar/macro_tools.cpp:825 msgid "Opening the compression layer..." msgstr "" #: src/libdar/macro_tools.cpp:849 src/libdar/macro_tools.cpp:1559 msgid "Creating a new thread to run the compression layer..." msgstr "" #: src/libdar/macro_tools.cpp:863 src/libdar/macro_tools.cpp:1569 msgid "All layers have been created successfully" msgstr "" #: src/libdar/macro_tools.cpp:866 #, c-format msgid "" "Warning, the archive %S has been encrypted. A wrong key is not possible to " "detect, it would cause DAR to report the archive as corrupted" msgstr "" #: src/libdar/macro_tools.cpp:905 msgid "" "LAX MODE: The catalogue (table of contents) usually takes a few percents of " "the archive at its end, which percentage do you want me to scan (answer by " "an *integer* number between 0 and 100)? " msgstr "" #: src/libdar/macro_tools.cpp:911 #, c-format msgid "LAX MODE: %i is not a valid percent value" msgstr "" #: src/libdar/macro_tools.cpp:915 #, c-format msgid "%S is not a valid number" msgstr "" #: src/libdar/macro_tools.cpp:921 #, c-format msgid "" "LAX MODE: Beginning search of the catalogue (from the end toward the " "beginning of the archive, on %i %% of its length), this may take a while..." msgstr "" #: src/libdar/macro_tools.cpp:930 msgid "" "LAX MODE: Cannot skip at the end of the archive! Using current position to " "start the catalogue search" msgstr "" #: src/libdar/macro_tools.cpp:935 msgid "LAX MODE: Failed to read the catalogue (no data to inspect)" msgstr "" #: src/libdar/macro_tools.cpp:938 msgid "" "LAX MODE: Failed to read the catalogue (0 bytes of the archive length asked " "to look for the catalogue)" msgstr "" #: src/libdar/macro_tools.cpp:949 msgid "" "LAX MODE: Escape sequence seems present in this archive. I have thus two " "different methods, either I look for the escape sequence indicating the " "start of the catalogue or I try each position in turn in the hope it will " "not be data that look like a catalogue" msgstr "" #: src/libdar/macro_tools.cpp:952 msgid "LAX MODE: Trying to locate the escape sequence (safer choice) ?" msgstr "" #: src/libdar/macro_tools.cpp:957 msgid "" "LAX MODE: Good point! I could find the escape sequence marking the beginning " "of the catalogue, now trying to read it..." msgstr "" #: src/libdar/macro_tools.cpp:966 msgid "" "LAX MODE: Escape sequence could not be found, it may have been corrupted or " "out of the scanned portion of the archive, trying to find the catalogue the " "other way" msgstr "" #: src/libdar/macro_tools.cpp:989 #, c-format msgid "LAX MODE: %i %% remaining" msgstr "" #: src/libdar/macro_tools.cpp:1001 #, c-format msgid "" "Could read a catalogue data structure at offset %i, it contains the " "following:" msgstr "" #: src/libdar/macro_tools.cpp:1003 msgid "Do you want to use it for the operation?" msgstr "" #: src/libdar/macro_tools.cpp:1035 msgid "" "LAX MODE: Reached the end of the area to scan, FAILED to find any catalogue" msgstr "" #: src/libdar/macro_tools.cpp:1045 msgid "LAX MODE: Failed to read the catalogue" msgstr "" #: src/libdar/macro_tools.cpp:1130 msgid "" "Creating low layer: Writing archive into a black hole object (equivalent to /" "dev/null)..." msgstr "" #: src/libdar/macro_tools.cpp:1139 msgid "Creating low layer: Writing archive into standard output object..." msgstr "" #: src/libdar/macro_tools.cpp:1152 msgid "Creating low layer: Writing archive into a plain file object..." msgstr "" #: src/libdar/macro_tools.cpp:1173 msgid "" "Creating low layer: Writing archive into a sar object (Segmentation and " "Reassembly) for slicing..." msgstr "" #: src/libdar/macro_tools.cpp:1211 msgid "Adding cache layer over pipe to provide limited skippability..." msgstr "" #: src/libdar/macro_tools.cpp:1241 msgid "" "WARNING: support for secure memory was not available at compilation time, in " "case of heavy memory load, this may lead the password/passphrase provided to " "be wrote to disk (swap space) in clear. You have been warned!" msgstr "" #: src/libdar/macro_tools.cpp:1256 msgid "Generating random key for symmetric encryption..." msgstr "" #: src/libdar/macro_tools.cpp:1264 msgid "" "Scrambling is a very weak encryption algorithm, this is a non-sens to use " "with asymmetric encryption" msgstr "" #: src/libdar/macro_tools.cpp:1304 #, c-format msgid "" "For your information, this is the iteration %d for which the randomly " "generated key is reported to be weak by libgcrypt, continuing generating " "another random key... patience" msgstr "" #: src/libdar/macro_tools.cpp:1308 #, c-format msgid "" "... A strong randomly generated key could be found after %d iteration(s)" msgstr "" #: src/libdar/macro_tools.cpp:1315 msgid "Key generated" msgstr "" #: src/libdar/macro_tools.cpp:1360 msgid "Please confirm your password: " msgstr "" #: src/libdar/macro_tools.cpp:1364 msgid "The two passwords are not identical. Aborting" msgstr "" #: src/libdar/macro_tools.cpp:1406 msgid "Writing down the archive header..." msgstr "" #: src/libdar/macro_tools.cpp:1422 msgid "Adding a new layer on top: scrambler object..." msgstr "" #: src/libdar/macro_tools.cpp:1434 msgid "Adding a new layer on top: Strong encryption object..." msgstr "" #: src/libdar/macro_tools.cpp:1454 msgid "Adding a new layer on top: Caching layer for better performances..." msgstr "" #: src/libdar/macro_tools.cpp:1499 msgid "Writing down the initial elastic buffer through the encryption layer..." msgstr "" #: src/libdar/macro_tools.cpp:1511 msgid "Adding a new layer on top: Escape layer to allow sequential reading..." msgstr "" #: src/libdar/macro_tools.cpp:1541 msgid "Adding a new layer on top: compression..." msgstr "" #: src/libdar/macro_tools.cpp:1583 msgid "Error creating archive layers: " msgstr "" #: src/libdar/macro_tools.cpp:1692 msgid "Writing down archive contents..." msgstr "" #: src/libdar/macro_tools.cpp:1716 msgid "Calculating the signature of the catalogue hash..." msgstr "" #: src/libdar/macro_tools.cpp:1732 msgid "Writing down the signed hash of the catalogue..." msgstr "" #: src/libdar/macro_tools.cpp:1782 msgid "Closing the compression layer..." msgstr "" #: src/libdar/macro_tools.cpp:1797 msgid "Closing the escape layer..." msgstr "" #: src/libdar/macro_tools.cpp:1817 msgid "Writing down the first archive terminator..." msgstr "" #: src/libdar/macro_tools.cpp:1823 msgid "writing down the final elastic buffer through the encryption layer..." msgstr "" #: src/libdar/macro_tools.cpp:1862 msgid "Closing the encryption layer..." msgstr "" #: src/libdar/macro_tools.cpp:1884 msgid "Writing down archive trailer..." msgstr "" #: src/libdar/macro_tools.cpp:1889 msgid "Writing down the second archive terminator..." msgstr "" #: src/libdar/macro_tools.cpp:1900 msgid "Closing archive low layer..." msgstr "" #: src/libdar/macro_tools.cpp:1917 msgid "Archive is closed." msgstr "" #: src/libdar/macro_tools.cpp:2084 msgid "" "The format version of the archive is too high for that software version, try " "reading anyway?" msgstr "" #: src/libdar/fichier_local.cpp:152 msgid "Error getting size of file: " msgstr "" #: src/libdar/fichier_local.cpp:262 msgid "Error getting file reading position: " msgstr "" #: src/libdar/fichier_local.cpp:294 src/libdar/fichier_local.cpp:296 msgid "Error while reading from file: " msgstr "" #: src/libdar/fichier_local.cpp:345 src/libdar/fichier_local.cpp:351 msgid "Error while writing to file: " msgstr "" #: src/libdar/fichier_local.cpp:411 src/libdar/archive_options.cpp:474 #: src/libdar/archive_options.cpp:1729 msgid "Furtive read mode" msgstr "" #: src/libdar/fichier_local.cpp:429 msgid "" "No space left for inode, you have the opportunity to make some room now. " "When done : can we continue ?" msgstr "" #: src/libdar/fichier_local.cpp:440 msgid "Cannot open file : " msgstr "" #: src/libdar/fichier_local.cpp:463 #, c-format msgid "Cannot dup() filedescriptor while copying \"fichier_local\" object: %s" msgstr "" #: src/libdar/erreurs.hpp:129 msgid "Lack of Memory" msgstr "" #: src/libdar/erreurs.hpp:146 msgid "Lack of Secured Memory" msgstr "" #: src/libdar/erreurs.hpp:204 msgid "" "Cannot handle such a too large integer. Use a full version of libdar " "(compiled to rely on the \"infinint\" integer type) to solve this problem" msgstr "" #: src/libdar/erreurs.hpp:385 msgid "Thread cancellation requested, aborting as soon as possible" msgstr "" #: src/libdar/erreurs.hpp:385 msgid "Thread cancellation requested, aborting as properly as possible" msgstr "" #: src/libdar/generic_file_overlay_for_gpgme.cpp:55 #, c-format msgid "Error creating data buffer overlay for GPGME: %s" msgstr "" #: src/libdar/generic_file_overlay_for_gpgme.cpp:107 msgid "File offset too large to be stored in off_t type" msgstr "" #: src/libdar/fichier_libcurl.hpp:100 msgid "user/group ownership not supported for this repository" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:292 #, c-format msgid "" "Error creating hard link %s : %s\n" " Trying to duplicate the inode" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:306 #, c-format msgid "" "Error creating hard link : %s , the inode to link with [ %s ] has " "disappeared, re-creating it" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:313 #, c-format msgid "" "Error creating hard link : %s , the inode to link with [ %s ] is not " "present, cannot restore this hard link" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:385 msgid "Bad CRC, data corruption occurred" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:435 #, c-format msgid "" "error restoring Unix socket %s, path too long to be stored properly, socket " "will be created as %s instead, do you confirm?" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:437 msgid "Error creating Unix socket file: " msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:457 msgid "Could not create inode: " msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:459 msgid "Cannot create inode: " msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:459 msgid " Ready to continue ?" msgstr "" #: src/libdar/entrepot.cpp:69 msgid "root's entrepot must be an absolute path: " msgstr "" #: src/libdar/entrepot.cpp:156 msgid "Error met while creating the hash file: " msgstr "" #: src/libdar/elastic.cpp:80 msgid "Zero is not a valid size for an elastic buffer" msgstr "" #: src/libdar/elastic.cpp:82 msgid "Size too large for an elastic buffer" msgstr "" #: src/libdar/elastic.cpp:99 src/libdar/elastic.cpp:105 #: src/libdar/elastic.cpp:135 src/libdar/elastic.cpp:141 #: src/libdar/elastic.cpp:157 src/libdar/elastic.cpp:165 #: src/libdar/elastic.cpp:194 src/libdar/elastic.cpp:202 #: src/libdar/elastic.cpp:212 msgid "elastic buffer incoherent structure" msgstr "" #: src/libdar/elastic.cpp:131 src/libdar/elastic.cpp:190 msgid "too large elastic buffer or elastic buffer incoherent structure" msgstr "" #: src/libdar/elastic.cpp:219 msgid "not enough space provided to dump the elastic buffer" msgstr "" #: src/libdar/fichier_global.cpp:110 src/libdar/tuyau.cpp:368 msgid "" "No space left on device, you have the opportunity to make room now. When " "ready : can we continue ?" msgstr "" #: src/libdar/mycurl_easyhandle_node.cpp:39 msgid "Error met while creating a libcurl handle" msgstr "" #: src/libdar/mycurl_easyhandle_node.cpp:51 msgid "Error met while duplicating libcurl handle" msgstr "" #: src/libdar/user_interaction5.cpp:167 src/libdar/shell_interaction.cpp:405 msgid "Continue? " msgstr "" #: src/libdar/filesystem_restore.cpp:277 msgid "" "Cannot restore a delta binary patch without a file to patch on filesystem" msgstr "" #: src/libdar/filesystem_restore.cpp:285 msgid "" "Cannot restore a inode metadata only without an existing file on filesystem" msgstr "" #: src/libdar/filesystem_restore.cpp:302 #, c-format msgid "Cannot remove non-existent file from filesystem: %S" msgstr "" #: src/libdar/filesystem_restore.cpp:307 src/libdar/filesystem_restore.cpp:751 msgid "Restoring file's data: " msgstr "" #: src/libdar/filesystem_restore.cpp:328 src/libdar/filesystem_restore.cpp:970 msgid "Restoring file's EA: " msgstr "" #: src/libdar/filesystem_restore.cpp:339 src/libdar/filesystem_restore.cpp:522 #, c-format msgid "Restoration of EA for %S aborted: " msgstr "" #: src/libdar/filesystem_restore.cpp:351 src/libdar/filesystem_restore.cpp:1120 msgid "Restoring file's FSA: " msgstr "" #: src/libdar/filesystem_restore.cpp:366 src/libdar/filesystem_restore.cpp:536 #, c-format msgid "Restoration of FSA for %S aborted: " msgstr "" #: src/libdar/filesystem_restore.cpp:424 src/libdar/filesystem_restore.cpp:619 msgid "Restoring linux immutable FSA for " msgstr "" #: src/libdar/filesystem_restore.cpp:429 #, c-format msgid "Restoration of linux immutable FSA for %S aborted: " msgstr "" #: src/libdar/filesystem_restore.cpp:484 msgid "Restoring file's data using a delta patching: " msgstr "" #: src/libdar/filesystem_restore.cpp:495 #, c-format msgid "Cannot restore delta diff for %S as exsiting inode is not a plain file" msgstr "" #: src/libdar/filesystem_restore.cpp:670 #, c-format msgid "%S is about to be removed from filesystem, continue?" msgstr "" #: src/libdar/filesystem_restore.cpp:675 src/libdar/filesystem_restore.cpp:684 #, c-format msgid "Removing file (reason is file recorded as removed in archive): %S" msgstr "" #: src/libdar/filesystem_restore.cpp:682 #, c-format msgid "" "%S must be removed, but does not match expected type, remove it anyway ?" msgstr "" #: src/libdar/filesystem_restore.cpp:690 #, c-format msgid "" "%S: Overwriting policy (Data) is undefined for that file, do not know " "whether removal is allowed or not!" msgstr "" #: src/libdar/filesystem_restore.cpp:727 #, c-format msgid "" "Directory %S cannot be restored: overwriting not allowed and a non-directory " "inode of that name already exists, all files in that directory will be " "skipped for restoration:" msgstr "" #: src/libdar/filesystem_restore.cpp:736 #, c-format msgid "%S is about to be overwritten, OK?" msgstr "" #: src/libdar/filesystem_restore.cpp:768 msgid "" "Existing file is of a different nature, cannot only restore inode metadata" msgstr "" #: src/libdar/filesystem_restore.cpp:786 #, c-format msgid "Existing EA for %S could not be read and preserved: " msgstr "" #: src/libdar/filesystem_restore.cpp:806 #, c-format msgid "Existing FSA for %S could not be read and preserved: " msgstr "" #: src/libdar/filesystem_restore.cpp:820 #, c-format msgid "Removing existing immutable flag in order to restore data for %S" msgstr "" #: src/libdar/filesystem_restore.cpp:846 #, c-format msgid "Existing EA for %S could not be preserved : " msgstr "" #: src/libdar/filesystem_restore.cpp:863 #, c-format msgid "Existing FSA for %S could not be preserved : " msgstr "" #: src/libdar/filesystem_restore.cpp:878 #, c-format msgid "" "%S is about to be deleted (required by overwriting policy), do you agree?" msgstr "" #: src/libdar/filesystem_restore.cpp:880 #, c-format msgid "Removing file (reason is overwriting policy): %S" msgstr "" #: src/libdar/filesystem_restore.cpp:886 #, c-format msgid "" "%S: Overwriting policy (Data) is undefined for that file, do not know " "whether overwriting is allowed or not!" msgstr "" #: src/libdar/filesystem_restore.cpp:949 #, c-format msgid "EA for %S are about to be overwritten, OK?" msgstr "" #: src/libdar/filesystem_restore.cpp:964 #, c-format msgid "" "EA for %S have not been overwritten because this file is a hard link " "pointing to an already restored inode" msgstr "" #: src/libdar/filesystem_restore.cpp:984 #, c-format msgid "EA for %S are about to be removed, OK?" msgstr "" #: src/libdar/filesystem_restore.cpp:995 #, c-format msgid "" "EA for %S have not been cleared as requested by the overwriting policy " "because this file is a hard link pointing to an already restored inode" msgstr "" #: src/libdar/filesystem_restore.cpp:1001 msgid "Clearing file's EA (requested by overwriting policy): " msgstr "" #: src/libdar/filesystem_restore.cpp:1016 #, c-format msgid "EA for %S are about to be merged, OK?" msgstr "" #: src/libdar/filesystem_restore.cpp:1047 #, c-format msgid "" "%S: Overwriting policy (EA) is undefined for that file, do not know whether " "overwriting is allowed or not!" msgstr "" #: src/libdar/filesystem_restore.cpp:1099 #: src/libdar/filesystem_restore.cpp:1145 #, c-format msgid "FSA for %S are about to be overwritten, OK?" msgstr "" #: src/libdar/filesystem_restore.cpp:1114 #, c-format msgid "" "FSA for %S have not been overwritten because this file is a hard link " "pointing to an already restored inode" msgstr "" #: src/libdar/filesystem_restore.cpp:1171 #, c-format msgid "" "%S: Overwriting policy (FSA) is undefined for that file, do not know whether " "overwriting is allowed or not!" msgstr "" #: src/libdar/tlv.cpp:66 msgid "Missing data to initiate a TLV object" msgstr "" #: src/libdar/cat_lien.cpp:98 msgid "symbolic link does not point to the same target: " msgstr "" #: src/libdar/hash_fichier.cpp:63 #, c-format msgid "" "Error while initializing hash: Hash algorithm not available in libgcrypt: %s/" "%s" msgstr "" #: src/libdar/hash_fichier.cpp:67 #, c-format msgid "Error while creating hash handle: %s/%s" msgstr "" #: src/libdar/hash_fichier.cpp:69 src/libdar/hash_fichier.cpp:107 #: src/libdar/hash_fichier.cpp:122 msgid "" "Missing hashing algorithms support (which is part of strong encryption " "support, using libgcrypt)" msgstr "" #: src/libdar/hash_fichier.cpp:155 msgid "Failed writing down the hash: " msgstr "" #: src/libdar/escape_catalogue.cpp:89 msgid "" "incoherent data after escape sequence, cannot read internal data set label" msgstr "" #: src/libdar/escape_catalogue.cpp:99 msgid "Could not find tape mark for the internal catalogue" msgstr "" #: src/libdar/escape_catalogue.cpp:489 msgid "" "Escape sequences used for reading lead the archive to place some files out " "of the specified root. To overcome this problem, try reading the archive in " "direct mode (not using sequential reading), try repairing the archive using " "Parchive if redundancy data has been created or in last resort try using the " "lax mode" msgstr "" #: src/libdar/escape_catalogue.cpp:492 msgid "" "LAX MODE: Archive directory structure is corrupted, it would lead to place " "some files out of the specified root directory. Restoring different " "directory contents at the root not out of it, which will put files of " "different directories in the specified root directory" msgstr "" #: src/libdar/escape_catalogue.cpp:507 msgid "" "LAX MODE: found unknown catalogue entry, assuming data corruption occurred. " "Skipping to the next entry, this may lead to improper directory structure " "being restored, if the corrupted data was a directory" msgstr "" #: src/libdar/escape_catalogue.cpp:514 msgid "Corrupted entry following an escape mark in the archive" msgstr "" #: src/libdar/escape_catalogue.cpp:543 msgid "" "Uncompleted archive! Assuming it has been interrupted during the backup " "process. If an error has been reported just above, simply ignore it, this is " "about the file that was saved at the time of the interruption." msgstr "" #: src/libdar/escape_catalogue.cpp:578 msgid "Cannot extract from the internal catalogue the list of files to remove" msgstr "" #: src/libdar/escape_catalogue.cpp:639 src/libdar/i_archive.cpp:270 #: src/libdar/i_archive.cpp:304 msgid "" "Archive internal catalogue is not identically signed as the archive itself, " "this might be the sign the archive has been compromised" msgstr "" #: src/libdar/escape_catalogue.cpp:655 msgid "" "Archive internal catalogue is properly signed but its content does not match " "the tape marks used so far for sequentially reading. Possible data " "corruption or archive compromission occurred! if data extracted in " "sequential read mode does not match the data extracted in direct access " "mode, consider the sequential data has been been modified after the archive " "has been generated" msgstr "" #: src/libdar/escape_catalogue.cpp:795 msgid "" "Resetting the sequential reading process of the archive contents while it is " "not finished, will make all data unread so far becoming inaccessible" msgstr "" #: src/libdar/cat_entree.cpp:84 src/libdar/cat_entree.cpp:132 #: src/libdar/cat_entree.cpp:142 msgid "corrupted file" msgstr "" #: src/libdar/cat_entree.cpp:134 msgid "" "LAX MODE: Unexpected saved status for end of directory entry, assuming data " "corruption occurred, ignoring and continuing" msgstr "" #: src/libdar/cat_entree.cpp:144 msgid "" "LAX MODE: Unexpected saved status for class \"cat_detruit\" object, assuming " "data corruption occurred, ignoring and continuing" msgstr "" #: src/libdar/cat_entree.cpp:153 msgid "unknown type of data in catalogue" msgstr "" #: src/libdar/cat_entree.cpp:156 msgid "" "LAX MODE: found unknown catalogue entry, assuming data corruption occurred, " "cannot read further the catalogue as I do not know the length of this type " "of entry" msgstr "" #: src/libdar/cat_entree.cpp:207 msgid "unknown entry" msgstr "" #: src/libdar/cat_entree.cpp:208 #, c-format msgid "Entry information CRC failure for %S. Ignore the failure?" msgstr "" #: src/libdar/cat_entree.cpp:214 #, c-format msgid "Entry information CRC failure for %S" msgstr "" #: src/libdar/cat_entree.cpp:216 msgid "Entry information CRC failure" msgstr "" #: src/libdar/cat_entree.cpp:468 msgid "directory" msgstr "" #: src/libdar/cat_entree.cpp:472 msgid "hard linked inode" msgstr "" #: src/libdar/cat_entree.cpp:474 msgid "plain file" msgstr "" #: src/libdar/cat_entree.cpp:476 msgid "soft link" msgstr "" #: src/libdar/cat_entree.cpp:478 msgid "char device" msgstr "" #: src/libdar/cat_entree.cpp:480 msgid "block device" msgstr "" #: src/libdar/cat_entree.cpp:482 msgid "named pipe" msgstr "" #: src/libdar/cat_entree.cpp:484 msgid "unix socket" msgstr "" #: src/libdar/cat_entree.cpp:486 msgid "deleted entry" msgstr "" #: src/libdar/cat_entree.cpp:488 msgid "door inode" msgstr "" #: src/libdar/shell_interaction.cpp:175 msgid "" "No terminal found for user interaction. All questions will be assumed a " "negative answer (less destructive choice), which most of the time will abort " "the program." msgstr "" #: src/libdar/shell_interaction.cpp:239 msgid "Error reading character: " msgstr "" #: src/libdar/shell_interaction.cpp:265 #, c-format msgid "" "[Data ][D][ EA ][FSA][Compr][S]| Permission | User | Group | Size " "| Date | filename" msgstr "" #: src/libdar/shell_interaction.cpp:270 #, c-format msgid "" "Access mode | User | Group | Size | Date |" "[Data ][D][ EA ][FSA][Compr][S]| Filename" msgstr "" #: src/libdar/shell_interaction.cpp:315 src/libdar/database5.cpp:78 #, c-format msgid "dar path : %S" msgstr "" #: src/libdar/shell_interaction.cpp:316 src/libdar/database5.cpp:79 #, c-format msgid "dar options : %S" msgstr "" #: src/libdar/shell_interaction.cpp:317 src/libdar/database5.cpp:80 #, c-format msgid "database version: %S" msgstr "" #: src/libdar/shell_interaction.cpp:318 src/libdar/database5.cpp:81 #, c-format msgid "compression used: %S" msgstr "" #: src/libdar/shell_interaction.cpp:320 src/libdar/database5.cpp:83 #, c-format msgid "archive # | path | basename" msgstr "" #: src/libdar/shell_interaction.cpp:328 src/libdar/database5.cpp:97 msgid "" msgstr "" #: src/libdar/shell_interaction.cpp:378 src/libdar/database5.cpp:148 #, c-format msgid " archive # | most recent/total data | most recent/total EA" msgstr "" #: src/libdar/shell_interaction.cpp:379 src/libdar/database5.cpp:149 #, c-format msgid "--------------+-------------------------+-----------------------" msgstr "" #: src/libdar/shell_interaction.cpp:442 msgid " [return = YES | Esc = NO]" msgstr "" #: src/libdar/shell_interaction.cpp:461 msgid "Error while reading user answer from terminal: " msgstr "" #: src/libdar/shell_interaction.cpp:466 msgid "Continuing..." msgstr "" #: src/libdar/shell_interaction.cpp:468 msgid "Escaping..." msgstr "" #: src/libdar/shell_interaction.cpp:533 msgid "Secured string can only be read from a terminal" msgstr "" #: src/libdar/shell_interaction.cpp:558 msgid "provided password is too long for the allocated memory" msgstr "" #: src/libdar/shell_interaction.cpp:594 msgid "Error while changing user terminal properties: " msgstr "" #: src/libdar/shell_interaction.cpp:636 #, c-format msgid "%S [%c] [ REMOVED ENTRY ] (%S) %S" msgstr "" #: src/libdar/shell_interaction.cpp:666 src/libdar/shell_interaction.cpp:718 msgid " Extended Attribute: [" msgstr "" #: src/libdar/shell_interaction.cpp:934 src/libdar/database5.cpp:181 msgid "[ Saved ]" msgstr "" #: src/libdar/shell_interaction.cpp:936 src/libdar/shell_interaction.cpp:941 #: src/libdar/database5.cpp:183 src/libdar/database5.cpp:188 msgid "[ ]" msgstr "" #: src/libdar/shell_interaction.cpp:939 src/libdar/database5.cpp:186 msgid "[ EA ]" msgstr "" #: src/libdar/shell_interaction.cpp:955 src/libdar/database5.cpp:203 msgid "removed " msgstr "" #: src/libdar/shell_interaction.cpp:956 src/libdar/database5.cpp:204 msgid "present " msgstr "" #: src/libdar/shell_interaction.cpp:957 src/libdar/database5.cpp:205 msgid "saved " msgstr "" #: src/libdar/shell_interaction.cpp:958 src/libdar/database5.cpp:206 msgid "absent " msgstr "" #: src/libdar/shell_interaction.cpp:959 src/libdar/database5.cpp:207 msgid "patch " msgstr "" #: src/libdar/shell_interaction.cpp:960 src/libdar/database5.cpp:208 msgid "BROKEN " msgstr "" #: src/libdar/shell_interaction.cpp:961 src/libdar/database5.cpp:209 msgid "inode " msgstr "" #: src/libdar/entrepot_local.cpp:169 #, c-format msgid "Cannot remove file %s: %s" msgstr "" #: src/libdar/i_libdar_xform.cpp:263 msgid "Error transforming the archive :" msgstr "" #: src/libdar/list_entry.cpp:50 msgid "[DIRTY]" msgstr "" #: src/libdar/list_entry.cpp:52 src/libdar/list_entry.cpp:71 msgid "[Saved]" msgstr "" #: src/libdar/list_entry.cpp:54 msgid "[Inode]" msgstr "" #: src/libdar/list_entry.cpp:56 src/libdar/list_entry.cpp:73 msgid "[InRef]" msgstr "" #: src/libdar/cat_detruit.cpp:46 msgid "missing data to build" msgstr "" #: src/libdar/crit_action.cpp:145 msgid "cannot evaluate an empty chain in an overwriting policy" msgstr "" #: src/libdar/cache.cpp:69 msgid "wrong value given as initial_size argument while initializing cache" msgstr "" #: src/libdar/ea_filesystem.cpp:189 #, c-format msgid "Error while adding EA %s : %s" msgstr "" #: src/libdar/ea_filesystem.cpp:226 #, c-format msgid "Error while removing %s : %s" msgstr "" #: src/libdar/ea_filesystem.cpp:258 src/libdar/ea_filesystem.cpp:282 #, c-format msgid "Error reading attribute %s of file %s : %s" msgstr "" #: src/libdar/ea_filesystem.cpp:332 src/libdar/ea_filesystem.cpp:347 #, c-format msgid "Error retrieving EA list for %s : %s" msgstr "" #: src/libdar/database_header.cpp:102 msgid "" "The format version of this database is too high for that software version, " "use a more recent software to read or modify this database" msgstr "" #: src/libdar/database_header.cpp:105 msgid "Unknown header option in database, aborting\n" msgstr "" #: src/libdar/database_header.cpp:148 msgid "Cannot create database, file exists" msgstr "" #: src/libdar/database_header.cpp:191 #, c-format msgid "Error reading database %S : " msgstr "" #: src/libdar/user_interaction_callback5.cpp:93 #: src/libdar/user_interaction_callback.cpp:98 msgid "nullptr given as argument of user_interaction_callback()" msgstr "" #: src/libdar/user_interaction_callback5.cpp:130 #: src/libdar/user_interaction_callback5.cpp:135 #: src/libdar/user_interaction_callback5.cpp:152 #: src/libdar/user_interaction_callback5.cpp:156 #: src/libdar/user_interaction_callback5.cpp:173 #: src/libdar/user_interaction_callback5.cpp:177 #: src/libdar/user_interaction_callback5.cpp:198 #: src/libdar/user_interaction_callback5.cpp:202 #: src/libdar/user_interaction_callback5.cpp:225 #: src/libdar/user_interaction_callback5.cpp:229 #: src/libdar/user_interaction_callback5.cpp:246 #: src/libdar/user_interaction_callback5.cpp:250 #: src/libdar/user_interaction_callback5.cpp:267 #: src/libdar/user_interaction_callback5.cpp:271 #: src/libdar/user_interaction_callback5.cpp:290 #: src/libdar/user_interaction_callback5.cpp:294 #: src/libdar/user_interaction_callback5.cpp:313 #: src/libdar/user_interaction_callback5.cpp:317 msgid "No exception allowed from libdar callbacks" msgstr "" #: src/libdar/real_infinint.cpp:70 src/libdar/limitint.hpp:272 msgid "Reached end of file before all data could be read" msgstr "" #: src/libdar/real_infinint.cpp:83 src/libdar/limitint.hpp:285 msgid "Badly formed \"infinint\" or not supported format" msgstr "" #: src/libdar/real_infinint.cpp:247 src/libdar/limitint.hpp:423 msgid "" "Subtracting an \"infinint\" greater than the first, \"infinint\" cannot be " "negative" msgstr "" #: src/libdar/real_infinint.cpp:840 src/libdar/limitint.hpp:464 #: src/libdar/limitint.hpp:473 msgid "Division by zero" msgstr "" #: src/libdar/terminateur.cpp:133 msgid "Badly formatted terminator, cannot extract catalogue location: " msgstr "" #: src/libdar/header_version.cpp:79 msgid "LAX MODE: Failed to read the archive header's format version." msgstr "" #: src/libdar/header_version.cpp:82 #, c-format msgid "" "LAX MODE: Please provide the archive format: You can use the table at %s to " "find the archive format depending on the release version, (for example if " "this archive has been created using dar release 2.3.4 to 2.3.7 answer \"6\" " "without the quotes here): " msgstr "" #: src/libdar/header_version.cpp:87 #, c-format msgid "LAX MODE: \"%S\" is not a valid archive format" msgstr "" #: src/libdar/header_version.cpp:93 #, c-format msgid "LAX MODE: Using archive format \"%d\"?" msgstr "" #: src/libdar/header_version.cpp:122 msgid "" "LAX MODE: Unknown compression algorithm used, assuming data corruption " "occurred. Please help me, answering with one of the following words \"none" "\", \"gzip\", \"bzip2\", \"lzo\" or \"xz\" at the next prompt:" msgstr "" #: src/libdar/header_version.cpp:123 src/libdar/header_version.cpp:498 #: src/libdar/crypto.cpp:42 msgid "none" msgstr "" #: src/libdar/header_version.cpp:125 msgid "gzip" msgstr "" #: src/libdar/header_version.cpp:127 msgid "bzip2" msgstr "" #: src/libdar/header_version.cpp:129 msgid "lzo" msgstr "" #: src/libdar/header_version.cpp:131 msgid "xz" msgstr "" #: src/libdar/header_version.cpp:138 src/libdar/header_version.cpp:146 #: src/libdar/header_version.cpp:157 src/libdar/header_version.cpp:175 msgid "Reached End of File while reading archive header_version data structure" msgstr "" #: src/libdar/header_version.cpp:208 msgid "Corruption met while reading header_version data structure" msgstr "" #: src/libdar/header_version.cpp:225 msgid "Missing data for encrypted symmetrical key" msgstr "" #: src/libdar/header_version.cpp:242 msgid "" "Error met while reading archive of reference slicing layout, ignoring this " "field and continuing" msgstr "" #: src/libdar/header_version.cpp:266 msgid "valid hash algoritm needed for key derivation function" msgstr "" #: src/libdar/header_version.cpp:278 msgid "" "please indicate the hash algoritm to use for key derivation function '1' for " "sha1, '5' for sha512, 'm' for md5, or 'q' to abort: " msgstr "" #: src/libdar/header_version.cpp:296 msgid "please answer with a single character" msgstr "" #: src/libdar/header_version.cpp:328 src/libdar/header_version.cpp:330 #: src/libdar/header_version.cpp:352 src/libdar/header_version.cpp:354 msgid "Consistency check failed for archive header" msgstr "" #: src/libdar/header_version.cpp:480 msgid "invalid hash algorithm provided for key derivation function" msgstr "" #: src/libdar/header_version.cpp:489 src/libdar/header_version.cpp:506 #: src/libdar/op_tools.cpp:41 src/libdar/op_tools.cpp:113 #: src/libdar/op_tools.cpp:193 msgid "yes" msgstr "" #: src/libdar/header_version.cpp:489 src/libdar/header_version.cpp:506 msgid "no" msgstr "" #: src/libdar/header_version.cpp:510 #, c-format msgid "Archive version format : %s" msgstr "" #: src/libdar/header_version.cpp:511 #, c-format msgid "Compression algorithm used : %S" msgstr "" #: src/libdar/header_version.cpp:512 #, c-format msgid "Symmetric key encryption used : %S" msgstr "" #: src/libdar/header_version.cpp:513 #, c-format msgid "Asymmetric key encryption used : %S" msgstr "" #: src/libdar/header_version.cpp:514 #, c-format msgid "Archive is signed : %S" msgstr "" #: src/libdar/header_version.cpp:515 #, c-format msgid "Sequential reading marks : %s" msgstr "" #: src/libdar/header_version.cpp:515 msgid "present" msgstr "" #: src/libdar/header_version.cpp:515 msgid "absent" msgstr "" #: src/libdar/header_version.cpp:516 #, c-format msgid "User comment : %S" msgstr "" #: src/libdar/header_version.cpp:519 #, c-format msgid "KDF iteration count : %S" msgstr "" #: src/libdar/header_version.cpp:520 #, c-format msgid "KDF hash algorithm : %S" msgstr "" #: src/libdar/header_version.cpp:522 #, c-format msgid "Salt size : %d byte%c" msgstr "" #: src/libdar/crypto_asym.cpp:86 src/libdar/crypto_asym.cpp:139 #: src/libdar/crypto_asym.cpp:175 src/libdar/crypto_asym.cpp:233 #: src/libdar/crypto_asym.cpp:292 msgid "Unexpected error reported by GPGME: " msgstr "" #: src/libdar/crypto_asym.cpp:137 msgid "Key found but users are not all trusted" msgstr "" #: src/libdar/crypto_asym.cpp:169 msgid "No data to decrypt" msgstr "" #: src/libdar/crypto_asym.cpp:171 msgid "Invalid Cipher text" msgstr "" #: src/libdar/crypto_asym.cpp:173 msgid "Failed retreiving passphrase" msgstr "" #: src/libdar/crypto_asym.cpp:188 msgid "Failed creating GPGME context: " msgstr "" #: src/libdar/crypto_asym.cpp:192 msgid "Failed setting GPGME context with OpenPGP protocol: " msgstr "" #: src/libdar/crypto_asym.cpp:306 #, c-format msgid "No valid signing key could be find for %S" msgstr "" #: src/libdar/crypto_asym.cpp:308 #, c-format msgid "No valid encryption key could be find for %S" msgstr "" #: src/libdar/crypto_asym.cpp:319 msgid "No signatory remain with a valid key, signing is impossible, aborting" msgstr "" #: src/libdar/crypto_asym.cpp:321 msgid "" "No recipient remain with a valid key, encryption is impossible, aborting" msgstr "" #: src/libdar/crypto_asym.cpp:394 #, c-format msgid "Passphrase required for key %s :" msgstr "" #: src/libdar/crypto_asym.cpp:415 msgid "Error, invalid passphrase given, try again:" msgstr "" #: src/libdar/crypto_asym.cpp:423 msgid "Error, while sending the passphrase to GPGME:" msgstr "" #: src/libdar/crypto_asym.cpp:425 msgid "Failed sending the totality of the passphrase to GPGME" msgstr "" #: src/libdar/crypto_asym.cpp:432 msgid "Failed sending CR after the passphrase" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:69 #, c-format msgid "protocol %S is not supported by libcurl, aborting" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:85 #, c-format msgid "Error met while setting verbosity on handle: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:125 #, c-format msgid "Error met while preparing directory listing: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:135 #, c-format msgid "Error met while listing FTP/SFTP directory %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:312 src/libdar/i_entrepot_libcurl.cpp:317 #, c-format msgid "Error met while setting up connection for file %S removal: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:325 #, c-format msgid "Error met while removing file %S" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:332 #, c-format msgid "Error met while removing file %S: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:391 #, c-format msgid "Failed assigning URL to libcurl: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:421 #, c-format msgid "Error met while setting known_hosts file: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:432 #, c-format msgid "Error met while assigning public key file: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:440 #, c-format msgid "Error met while assigning private key file: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:448 #, c-format msgid "Error met while assigning sftp authentication methods: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:465 #, c-format msgid "Error met while passing username to libcurl: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:471 #, c-format msgid "" "Error met while asking libcurl to consider ~/.netrc for authentication: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:478 #, c-format msgid "Please provide the password for login %S at host %S: " msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:493 #, c-format msgid "Error met while setting libcurl authentication: %s" msgstr "" #: src/libdar/cat_mirage.cpp:141 msgid "Incoherent catalogue structure: hard linked inode's data not found" msgstr "" #: src/libdar/cat_mirage.cpp:178 msgid "Incoherent catalogue structure: hard linked data is not an inode" msgstr "" #: src/libdar/cat_mirage.cpp:215 msgid "Incoherent catalogue structure: duplicated hard linked inode's data" msgstr "" #: src/libdar/cat_mirage.cpp:229 msgid "" "Incoherent catalogue structure: unknown status flag for hard linked inode" msgstr "" #: src/libdar/datetime.cpp:405 msgid "Unknown time unit" msgstr "" #: src/libdar/slice_layout.cpp:52 msgid "Missing data while reading slice_layout object" msgstr "" #: src/libdar/mask.hpp:118 msgid "TRUE" msgstr "" #: src/libdar/mask.hpp:118 msgid "FALSE" msgstr "" #: src/libdar/mask.hpp:309 msgid "AND" msgstr "" #: src/libdar/mask.hpp:342 msgid "No mask in the list of mask to operate on" msgstr "" #: src/libdar/mask.hpp:372 msgid "OR" msgstr "" #: src/libdar/mask.hpp:382 msgid "No mask to operate on in the list of mask" msgstr "" #: src/libdar/archive_version.cpp:47 msgid "Archive version too high, use a more recent version of libdar" msgstr "" #: src/libdar/archive_version.cpp:77 msgid "Reached End of File while reading archive version" msgstr "" #: src/libdar/archive_version.cpp:90 src/libdar/archive_version.cpp:99 msgid "Unexpected value while reading archive version" msgstr "" #: src/libdar/archive_version.cpp:97 msgid "Reached premature end of file while reading archive version" msgstr "" #: src/libdar/cat_device.cpp:95 src/libdar/cat_device.cpp:98 msgid "missing data to build a special device" msgstr "" #: src/libdar/cat_device.cpp:146 #, c-format msgid "devices have not the same major number: %d <--> %d" msgstr "" #: src/libdar/cat_device.cpp:148 #, c-format msgid "devices have not the same minor number: %d <--> %d" msgstr "" #: src/libdar/tuyau.cpp:143 msgid "Error while creating anonymous pipe: " msgstr "" #: src/libdar/tuyau.cpp:171 msgid "Pipe's other end is not known, cannot provide a filedescriptor on it" msgstr "" #: src/libdar/tuyau.cpp:185 msgid "" "Pipe's other end is not known, cannot close any filedescriptor pointing on it" msgstr "" #: src/libdar/tuyau.cpp:312 msgid "Error while reading from pipe: " msgstr "" #: src/libdar/tuyau.cpp:366 src/libdar/tuyau.cpp:371 msgid "Error while writing data to pipe: " msgstr "" #: src/libdar/tuyau.cpp:422 msgid "Error opening pipe: " msgstr "" #: src/libdar/tuyau.cpp:511 msgid "File mode is neither read nor write" msgstr "" #: src/libdar/cat_inode.cpp:163 msgid "badly structured inode: unknown inode flag" msgstr "" #: src/libdar/cat_inode.cpp:174 src/libdar/cat_inode.cpp:177 #: src/libdar/cat_inode.cpp:187 msgid "missing data to build an inode" msgstr "" #: src/libdar/cat_inode.cpp:278 msgid "badly structured inode: unknown inode flag for FSA" msgstr "" #: src/libdar/cat_inode.cpp:392 msgid "different file type" msgstr "" #: src/libdar/cat_inode.cpp:397 #, c-format msgid "different owner (uid): %i <--> %i" msgstr "" #: src/libdar/cat_inode.cpp:403 #, c-format msgid "different owner group (gid): %i <--> %i" msgstr "" #: src/libdar/cat_inode.cpp:409 #, c-format msgid "different permission: %S <--> %S" msgstr "" #: src/libdar/cat_inode.cpp:417 src/libdar/cat_file.cpp:1213 #, c-format msgid "difference of last modification date: %S <--> %S" msgstr "" #: src/libdar/cat_inode.cpp:432 msgid "different Extended Attributes" msgstr "" #: src/libdar/cat_inode.cpp:438 msgid "no Extended Attribute to compare with" msgstr "" #: src/libdar/cat_inode.cpp:440 src/libdar/cat_inode.cpp:461 msgid "" "Cannot compare EA: EA support has not been activated at compilation time" msgstr "" #: src/libdar/cat_inode.cpp:454 msgid "inode last change date (ctime) greater, EA might be different" msgstr "" #: src/libdar/cat_inode.cpp:459 msgid "no Extended Attributes to compare with" msgstr "" #: src/libdar/cat_inode.cpp:488 msgid "different Filesystem Specific Attributes" msgstr "" #: src/libdar/cat_inode.cpp:494 msgid "No Filesystem Specific Attribute to compare with" msgstr "" #: src/libdar/cat_inode.cpp:502 msgid "inode last change date (ctime) greater, FSA might be different" msgstr "" #: src/libdar/cat_inode.cpp:505 msgid "Filesystem Specific Attribute are missing" msgstr "" #: src/libdar/cat_inode.cpp:792 msgid "CRC error detected while reading EA" msgstr "" #: src/libdar/cat_inode.cpp:936 msgid "" "Error while reading CRC for EA from the archive: No escape mark found for " "that file" msgstr "" #: src/libdar/cat_inode.cpp:1163 msgid "CRC error detected while reading FSA" msgstr "" #: src/libdar/cat_inode.cpp:1287 msgid "" "Error while reading CRC for FSA from the archive: No escape mark found for " "that file" msgstr "" #: src/libdar/header.cpp:103 src/libdar/header.cpp:111 #: src/libdar/header.cpp:114 src/libdar/header.cpp:116 msgid "Reached end of file while reading slice header" msgstr "" #: src/libdar/header.cpp:130 src/libdar/header.cpp:152 msgid "" "LAX MODE: slice size is not possible to read, (lack of virtual memory?), " "continuing anyway..." msgstr "" #: src/libdar/header.cpp:167 msgid "" "LAX MODE: first slice size is not possible to read, (lack of virtual " "memory?), continuing anyway..." msgstr "" #: src/libdar/header.cpp:180 msgid "" "Archive format older than \"08\" (release 2.4.0) cannot be read through a " "single pipe. It only can be read using dar_slave or normal plain file (slice)" msgstr "" #: src/libdar/header.cpp:182 msgid "" "LAX MODE: first slice size is not possible to read, continuing anyway..." msgstr "" #: src/libdar/header.cpp:197 msgid "Badly formatted SAR header (unknown TLV type in slice header)" msgstr "" #: src/libdar/header.cpp:200 msgid "LAX MODE: Unknown data in slice header, ignoring and continuing" msgstr "" #: src/libdar/header.cpp:379 msgid "incomplete data set name found in a slice header" msgstr "" #: src/libdar/header.cpp:383 #, c-format msgid "" "Unknown entry found in slice header (type = %d), option not supported. The " "archive you are reading may have been generated by a more recent version of " "libdar, ignore this entry and continue anyway?" msgstr "" #: src/libdar/cat_file.cpp:470 msgid "cannot provide data from a \"not saved\" file object" msgstr "" #: src/libdar/cat_file.cpp:481 msgid "data has been cleaned, object is now empty" msgstr "" #: src/libdar/cat_file.cpp:877 msgid "can't read data CRC: No escape mark found for that file" msgstr "" #: src/libdar/cat_file.cpp:1080 msgid "can't find mark for delta signature" msgstr "" #: src/libdar/cat_file.cpp:1097 msgid "Error while retrieving delta signature from the archive: " msgstr "" #: src/libdar/cat_file.cpp:1206 #, c-format msgid "not same size: %i <--> %i" msgstr "" #: src/libdar/cat_file.cpp:1274 #, c-format msgid "different file data, offset of first difference is: %i" msgstr "" #: src/libdar/cat_file.cpp:1282 msgid "" "Same data but CRC value could not be verified because we did not guessed " "properly its width (sequential read restriction)" msgstr "" #: src/libdar/cat_file.cpp:1284 msgid "Same data but stored CRC does not match the data!?!" msgstr "" #: src/libdar/cat_file.cpp:1325 msgid "Delta signature do not match" msgstr "" #: src/libdar/cat_file.cpp:1367 #, c-format msgid "Delta signature do not have the same size: %i <--> %i" msgstr "" #: src/libdar/cat_file.cpp:1369 msgid "Delta signature have the same size but do not match" msgstr "" #: src/libdar/cat_file.cpp:1412 msgid "CRC difference concerning file's data" msgstr "" #: src/libdar/semaphore.cpp:127 msgid "Error while converting UID/GID to string for backup hook file: " msgstr "" #: src/libdar/filesystem_backup.cpp:215 src/libdar/filesystem_diff.cpp:193 msgid "Non existent file: " msgstr "" #: src/libdar/filesystem_backup.cpp:217 src/libdar/filesystem_diff.cpp:195 msgid "File must be a directory: " msgstr "" #: src/libdar/filesystem_backup.cpp:302 #, c-format msgid "Cannot read directory contents: %s : " msgstr "" #: src/libdar/filesystem_backup.cpp:339 msgid "Ignoring file with NODUMP flag set: " msgstr "" #: src/libdar/filesystem_backup.cpp:347 src/libdar/filesystem_backup.cpp:352 msgid "Error reading directory contents: " msgstr "" #: src/libdar/filesystem_backup.cpp:347 src/libdar/filesystem_backup.cpp:352 msgid " . Ignoring file or directory" msgstr "" #: src/libdar/data_dir.cpp:74 msgid "Unexpected end of file" msgstr "" #: src/libdar/data_dir.cpp:327 msgid "" "This database has been corrupted probably due to a bug in release 2.4.0 to " "2.4.9, and it has not been possible to cleanup this corruption, please " "rebuild the database from archives or extracted \"catalogues\", if the " "database has never been used by one of the previously mentioned released, " "you are welcome to open a bug report and provide as much as possible details " "about the circumstances" msgstr "" #: src/libdar/data_dir.cpp:614 msgid "Unknown record type" msgstr "" #: src/libdar/archive5.cpp:57 msgid "listing() method must be given" msgstr "" #: src/libdar/label.cpp:123 msgid "Incomplete label" msgstr "" #: src/libdar/crypto_sym.cpp:77 msgid "" "Current implementation of blowfish encryption is not compatible with old " "(weak) implementation, use dar-2.3.x software or later (or other software " "based on libdar-4.4.x or greater) to read this archive" msgstr "" #: src/libdar/crypto_sym.cpp:80 msgid "cannot use 'none' as hashing algorithm for key derivation function" msgstr "" #: src/libdar/crypto_sym.cpp:91 src/libdar/crypto_sym.cpp:164 #, c-format msgid "Cyphering algorithm not available in libgcrypt: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:97 src/libdar/crypto_sym.cpp:631 #, c-format msgid "" "Failed retrieving from libgcrypt the block size used by the cyphering " "algorithm: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:114 msgid "Too large value give for key derivation interation count" msgstr "" #: src/libdar/crypto_sym.cpp:125 #, c-format msgid "Error while opening libgcrypt key handle: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:133 #, c-format msgid "Error while assigning key to libgcrypt key handle: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:150 msgid "Missing strong encryption support (libgcrypt)" msgstr "" #: src/libdar/crypto_sym.cpp:169 msgid "Failed retrieving from libgcrypt the maximum key length" msgstr "" #: src/libdar/crypto_sym.cpp:202 #, c-format msgid "" "Error while opening libgcrypt key handle to check password strength: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:214 #, c-format msgid "" "Error while assigning key to libgcrypt key handle to check password " "strength: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:315 #, c-format msgid "Error while resetting encryption key for a new block: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:319 src/libdar/crypto_sym.cpp:343 #, c-format msgid "Error while setting IV for current block: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:322 #, c-format msgid "Error while cyphering data: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:328 src/libdar/crypto_sym.cpp:352 msgid "blowfish strong encryption support" msgstr "" #: src/libdar/crypto_sym.cpp:346 #, c-format msgid "Error while decyphering data: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:387 #, c-format msgid "Error while generating IV: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:429 #, c-format msgid "Error! SHA1 not available in libgcrypt: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:435 #, c-format msgid "Error while derivating key from password (HMAC open): %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:441 #, c-format msgid "Error while derivating key from password (HMAC set key): %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:583 #, c-format msgid "Error while creating ESSIV handle: %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:591 #, c-format msgid "" "Failed retrieving from libgcrypt the key length to use (essiv key): %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:612 #, c-format msgid "Error while assigning key to libgcrypt key handle (essiv): %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:621 #, c-format msgid "" "Failed retrieving from libgcrypt the block size used by the cyphering " "algorithm (essiv): %s/%s" msgstr "" #: src/libdar/crypto_sym.cpp:676 src/libdar/crypto_sym.cpp:682 #: src/libdar/crypto_sym.cpp:691 src/libdar/crypto_sym.cpp:699 #: src/libdar/crypto_sym.cpp:732 msgid "Library used for blowfish encryption does not respect RFC 3962" msgstr "" #: src/libdar/trivial_sar.cpp:191 #, c-format msgid "%S already exists, and overwritten is forbidden, aborting" msgstr "" #: src/libdar/trivial_sar.cpp:193 #, c-format msgid "%S is about to be overwritten, continue ?" msgstr "" #: src/libdar/trivial_sar.cpp:219 src/libdar/trivial_sar.cpp:237 #, c-format msgid "" "Directory component in %S does not exist or is a dangling symbolic link: " msgstr "" #: src/libdar/trivial_sar.cpp:510 msgid "This archive has slices and is not possible to read from a pipe" msgstr "" #: src/libdar/trivial_sar.cpp:547 msgid "" "This archive is not single sliced, more data exists in the next slices but " "cannot be read from the current pipe, aborting" msgstr "" #: src/libdar/trivial_sar.cpp:585 msgid "Cannot skip to a valid position in file" msgstr "" #: src/libdar/compressor.cpp:171 src/libdar/compressor.cpp:199 msgid "" "incompatible compression library version or unsupported feature required " "from compression library" msgstr "" #: src/libdar/compressor.cpp:344 msgid "compressed data is corrupted" msgstr "" #: src/libdar/compressor.cpp:484 msgid "compressed data CRC error" msgstr "" #: src/libdar/compressor.cpp:571 src/libdar/compressor.cpp:601 #: src/libdar/compressor.cpp:753 src/libdar/compressor.cpp:818 msgid "lzo compression" msgstr "" #: src/libdar/compressor.cpp:739 #, c-format msgid "Probable bug in liblzo2: lzo1x_*_compress returned unexpected code %d" msgstr "" #: src/libdar/compressor.cpp:774 msgid "data corruption detected: Incoherence in LZO compressed data" msgstr "" #: src/libdar/compressor.cpp:778 src/libdar/compressor.cpp:800 #: src/libdar/compressor.cpp:814 msgid "compressed data corruption detected" msgstr "" #: src/libdar/compressor.cpp:788 msgid "data corruption detected: Too large block of compressed data" msgstr "" #: src/libdar/compressor.cpp:790 msgid "" "Too large block of compressed data: Either due to data corruption or current " "system limitation where SSIZE_MAX value implied smaller buffers than required" msgstr "" #: src/libdar/tronc.cpp:256 src/libdar/tronc.cpp:298 msgid "Cannot skip to the current position in \"tronc\"" msgstr "" #: src/libdar/tronc.cpp:310 msgid "Tried to write out of size limited file" msgstr "" #: src/libdar/deci.cpp:46 msgid "invalid decimal digit" msgstr "" #: src/libdar/deci.cpp:132 msgid "an empty string is an invalid argument" msgstr "" #: src/libdar/cat_etoile.cpp:44 msgid "Hard links of directories are not supported" msgstr "" #: src/libdar/secu_string.cpp:102 src/libdar/secu_string.cpp:141 msgid "Error while reading data for a secure memory:" msgstr "" #: src/libdar/secu_string.cpp:119 msgid "appending data over secure_memory its end" msgstr "" #: src/libdar/secu_string.cpp:133 msgid "appending data after the end of a secure_memory" msgstr "" #: src/libdar/secu_string.cpp:136 msgid "Cannot receive that much data in regard to the allocated memory" msgstr "" #: src/libdar/secu_string.cpp:154 msgid "Cannot reduce the string to a size that is larger than its current size" msgstr "" #: src/libdar/secu_string.cpp:164 msgid "secu_string randomization requested exceeds storage capacity" msgstr "" #: src/libdar/secu_string.cpp:177 msgid "Out of range index requested for a secu_string" msgstr "" #: src/libdar/generic_file.cpp:145 msgid "Reading ahead a write only generic_file" msgstr "" #: src/libdar/generic_file.cpp:160 msgid "Reading a write only generic_file" msgstr "" #: src/libdar/generic_file.cpp:170 msgid "Writing to a read only generic_file" msgstr "" #: src/libdar/generic_file.cpp:340 msgid "Cannot compare files in write only mode" msgstr "" #: src/libdar/generic_file.cpp:427 msgid "Cannot sync write on a read-only generic_file" msgstr "" #: src/libdar/generic_file.cpp:438 msgid "Cannot flush read a write-only generic_file" msgstr "" #: src/libdar/tools.cpp:220 msgid "Not a zero terminated string in file" msgstr "" #: src/libdar/tools.cpp:258 #, c-format msgid "Cannot get file size: %s" msgstr "" #: src/libdar/tools.cpp:344 msgid "Cannot read \"fcntl\" file's flags : " msgstr "" #: src/libdar/tools.cpp:350 msgid "Cannot set \"fcntl\" file's flags : " msgstr "" #: src/libdar/tools.cpp:490 src/libdar/tools.cpp:495 #: src/dar_suite/line_tools.cpp:1139 src/dar_suite/line_tools.cpp:1144 msgid "Invalid number: " msgstr "" #: src/libdar/tools.cpp:529 src/libdar/tools.cpp:620 msgid "Error while calling fork() to launch dar: " msgstr "" #: src/libdar/tools.cpp:543 src/libdar/tools.cpp:650 msgid "Unexpected error while waiting for dar to terminate: " msgstr "" #: src/libdar/tools.cpp:549 src/libdar/tools.cpp:656 msgid "DAR terminated upon signal reception: " msgstr "" #: src/libdar/tools.cpp:555 src/libdar/tools.cpp:662 msgid " . Retry to launch dar as previously ?" msgstr "" #: src/libdar/tools.cpp:560 src/libdar/tools.cpp:567 src/libdar/tools.cpp:667 #: src/libdar/tools.cpp:674 msgid " Continue anyway ?" msgstr "" #: src/libdar/tools.cpp:565 src/libdar/tools.cpp:672 msgid "DAR sub-process has terminated with exit code " msgstr "" #: src/libdar/tools.cpp:770 msgid "nullptr argument given to tools_readlink()" msgstr "" #: src/libdar/tools.cpp:772 msgid "Empty string given as argument to tools_readlink()" msgstr "" #: src/libdar/tools.cpp:799 #, c-format msgid "Cannot read file information for %s : %s" msgstr "" #: src/libdar/tools.cpp:886 msgid "Cannot set birth time: " msgstr "" #: src/libdar/tools.cpp:916 msgid "Cannot set last access and last modification time: " msgstr "" #: src/libdar/tools.cpp:994 msgid "Aborting program: child process died unexpectedly" msgstr "" #: src/libdar/tools.cpp:1002 #, c-format msgid "Error trying to run %s: %s" msgstr "" #: src/libdar/tools.cpp:1005 msgid "execvp() failed but did not returned error code" msgstr "" #: src/libdar/tools.cpp:1136 #, c-format msgid "Removing file %s" msgstr "" #: src/libdar/tools.cpp:1157 #, c-format msgid "Error removing file %s: %S" msgstr "" #: src/libdar/tools.cpp:1191 #, c-format msgid "" "Overwriting not allowed while a slice of a previous archive with the same " "basename has been found in the %s directory, Operation aborted" msgstr "" #: src/libdar/tools.cpp:1197 #, c-format msgid "" "At least one slice of an old archive with the same name remains in the " "directory %s. It is advised to remove all the old archive's slices before " "creating an archive of same name. Can I remove these old slices?" msgstr "" #: src/libdar/tools.cpp:1217 src/libdar/tools.cpp:1224 #, c-format msgid "Cannot get inode information for %s: %s" msgstr "" #: src/libdar/tools.cpp:1234 msgid "Current Working Directory cannot be a relative path" msgstr "" #: src/libdar/tools.cpp:1251 msgid "Cannot block signals: " msgstr "" #: src/libdar/tools.cpp:1261 msgid "Cannot unblock signals: " msgstr "" #: src/libdar/tools.cpp:1292 src/libdar/tools.cpp:1325 #, c-format msgid "Cannot get last modification date: %s" msgstr "" #: src/libdar/tools.cpp:1329 #, c-format msgid "Cannot get size of %S: not a plain file" msgstr "" #: src/libdar/tools.cpp:1380 #, c-format msgid "Unknown suffix [%c] in string %S" msgstr "" #: src/libdar/tools.cpp:1407 msgid "Unknown substitution string: %" msgstr "" #: src/libdar/tools.cpp:1414 msgid "" "last char of user command-line to execute is '%', (use '%%' instead to avoid " "this message)" msgstr "" #: src/libdar/tools.cpp:1470 msgid "execve() failed. (process table is full ?)" msgstr "" #: src/libdar/tools.cpp:1472 msgid "system() call failed: " msgstr "" #: src/libdar/tools.cpp:1474 #, c-format msgid "execution of [ %S ] returned error code: %d" msgstr "" #: src/libdar/tools.cpp:1481 msgid "Error during user command line execution: " msgstr "" #: src/libdar/tools.cpp:1481 msgid " . Retry command-line ?" msgstr "" #: src/libdar/tools.cpp:1486 msgid "Ignore previous error on user command line and continue ?" msgstr "" #: src/libdar/tools.cpp:1528 msgid "Fatal error on user command line: " msgstr "" #: src/libdar/tools.cpp:1610 src/libdar/tools.cpp:1616 msgid "Badly formated octal number" msgstr "" #: src/libdar/tools.cpp:1721 msgid "Cannot get effective permission given a file descriptor: " msgstr "" #: src/libdar/tools.cpp:1737 #, c-format msgid "Error while setting file permission: %s" msgstr "" #: src/libdar/tools.cpp:1758 msgid "An empty string is not a valid user name" msgstr "" #: src/libdar/tools.cpp:1796 src/libdar/tools.cpp:1818 msgid "Unknown user" msgstr "" #: src/libdar/tools.cpp:1798 src/libdar/tools.cpp:1820 #, c-format msgid "Error found while looking for UID of user %s: %S" msgstr "" #: src/libdar/tools.cpp:1828 src/libdar/tools.cpp:1922 msgid "" "Cannot convert username to uid in statically linked binary, either directly " "provide the UID or run libdar from a dynamically linked executable" msgstr "" #: src/libdar/tools.cpp:1853 msgid "An empty string is not a valid group name" msgstr "" #: src/libdar/tools.cpp:1889 src/libdar/tools.cpp:1912 msgid "Unknown group" msgstr "" #: src/libdar/tools.cpp:1891 #, c-format msgid "Error found while looking fo GID of group %s: %S" msgstr "" #: src/libdar/tools.cpp:1914 #, c-format msgid "Error found while looking for GID of group %s: %S" msgstr "" #: src/libdar/tools.cpp:1951 #, c-format msgid "Error while setting file user ownership: %s" msgstr "" #: src/libdar/tools.cpp:2077 src/dar_suite/line_tools.cpp:1497 msgid "Cannot get full path of current working directory: " msgstr "" #: src/libdar/tools.cpp:2110 msgid "Worse" msgstr "" #: src/libdar/tools.cpp:2130 #, c-format msgid "Error code %d to message conversion failed" msgstr "" #: src/libdar/tools.cpp:2185 msgid "Invalid wide-char found in string: " msgstr "" #: src/libdar/tools.cpp:2298 #, c-format msgid "Error unlinking %S: %s" msgstr "" #: src/libdar/tools.cpp:2345 msgid "" "Cannot convert negative floating point value to unsigned (positive) integer" msgstr "" #: src/libdar/archive_options.cpp:169 msgid "Cannot get catalogue of reference as it has not been provided" msgstr "" #: src/libdar/archive_options.cpp:187 msgid "Error, catalogue of reference has not been provided" msgstr "" #: src/libdar/archive_options.cpp:1518 src/libdar/archive_options.cpp:1525 msgid "No mask available" msgstr "" #: src/libdar/get_version.cpp:142 msgid "Initialization problem for liblzo2 library" msgstr "" #: src/libdar/get_version.cpp:158 #, c-format msgid "Error while activating libgcrypt's memory guard: %s/%s" msgstr "" #: src/libdar/get_version.cpp:165 src/libdar/get_version.cpp:183 #, c-format msgid "Too old version for libgcrypt, minimum required version is %s" msgstr "" #: src/libdar/get_version.cpp:174 #, c-format msgid "Error while telling libgcrypt that initialization is finished: %s/%s" msgstr "" #: src/libdar/get_version.cpp:179 msgid "libgcrypt not initialized and libdar not allowed to do so" msgstr "" #: src/libdar/get_version.cpp:194 #, c-format msgid "GPGME version requirement is not satisfied, requires version > %s" msgstr "" #: src/libdar/get_version.cpp:198 #, c-format msgid "GPGME engine not available: %s" msgstr "" #: src/libdar/get_version.cpp:208 src/libdar/get_version.cpp:212 #: src/libdar/get_version.cpp:214 #, c-format msgid "libcurl initialization failed: %s" msgstr "" #: src/libdar/i_archive.cpp:110 #, c-format msgid "Opening archive %s ..." msgstr "" #: src/libdar/i_archive.cpp:140 msgid "header only mode asked" msgstr "" #: src/libdar/i_archive.cpp:153 #, c-format msgid "" "Opening the archive of reference %s to retreive the isolated catalog ... " msgstr "" #: src/libdar/i_archive.cpp:164 msgid "" "Reading the archive of reference from pipe or standard input is not possible" msgstr "" #: src/libdar/i_archive.cpp:166 msgid "" "The basename '+' is reserved for special a purpose that has no meaning in " "this context" msgstr "" #: src/libdar/i_archive.cpp:209 msgid "Error while opening the archive of reference: " msgstr "" #: src/libdar/i_archive.cpp:224 msgid "Loading isolated catalogue in memory..." msgstr "" #: src/libdar/i_archive.cpp:236 msgid "" "Archive of reference is not signed properly (no the same signatories for the " "archive and the internal catalogue), do we continue?" msgstr "" #: src/libdar/i_archive.cpp:243 msgid "" "The archive and the isolated catalogue do not correspond to the same data, " "they are thus incompatible between them" msgstr "" #: src/libdar/i_archive.cpp:259 msgid "Loading catalogue into memory..." msgstr "" #: src/libdar/i_archive.cpp:284 msgid "" "No data found in that archive, sequentially reading the catalogue found at " "the end of the archive..." msgstr "" #: src/libdar/i_archive.cpp:314 msgid "" "The catalogue will be filled while sequentially reading the archive, " "preparing the data structure..." msgstr "" #: src/libdar/i_archive.cpp:354 #, c-format msgid "" "LAX MODE: The end of the archive is corrupted, cannot get the archive " "contents (the \"catalogue\")" msgstr "" #: src/libdar/i_archive.cpp:355 msgid "" "LAX MODE: Do you want to bypass some sanity checks and try again reading the " "archive contents (this may take some time, this may also fail)?" msgstr "" #: src/libdar/i_archive.cpp:370 #, c-format msgid "" "LAX MODE: Could not find a whole catalogue in the archive. If you have an " "isolated catalogue, stop here and use it as backup of the internal " "catalogue, else continue but be advised that all data will not be able to be " "retrieved..." msgstr "" #: src/libdar/i_archive.cpp:371 msgid "" "LAX MODE: Do you want to try finding portions of the original catalogue if " "some remain (this may take even more time and in any case, it will only " "permit to recover some files, at most)?" msgstr "" #: src/libdar/i_archive.cpp:549 src/libdar/i_archive.cpp:1947 msgid "Compression_level must be between 1 and 9 included" msgstr "" #: src/libdar/i_archive.cpp:551 src/libdar/i_archive.cpp:1949 msgid "" "\"first_file_size\" cannot be different from zero if \"file_size\" is equal " "to zero" msgstr "" #: src/libdar/i_archive.cpp:553 src/libdar/i_archive.cpp:1951 msgid "Crypto block size must be greater than 10 bytes" msgstr "" #: src/libdar/i_archive.cpp:581 msgid "Both reference archive are nullptr, cannot merge archive from nothing" msgstr "" #: src/libdar/i_archive.cpp:613 msgid "" "the \"Keep file compressed\" feature is not possible when merging two " "archives using different compression algorithms (This is for a future " "version of dar). You can still merge these two archives but without keeping " "file compressed (thus you will probably like to use compression (-z or -y " "options) for the resulting archive" msgstr "" #: src/libdar/i_archive.cpp:637 msgid "" "Cannot calculate delta signature when merging if keep compressed is asked" msgstr "" #: src/libdar/i_archive.cpp:639 msgid "" "To calculate delta signatures of files saved as sparse files, you need to " "activate sparse file detection mechanism with merging operation" msgstr "" #: src/libdar/i_archive.cpp:874 #, c-format msgid "" "Archive repairing completed. WARNING! it is strongly advised to test the " "resulting archive before removing the damaged one" msgstr "" #: src/libdar/i_archive.cpp:901 src/libdar/i_archive.cpp:1309 msgid "This archive is not exploitable, check documentation for more" msgstr "" #: src/libdar/i_archive.cpp:915 msgid "" "File ownership will not be restored du to the lack of privilege, you can " "disable this message by asking not to restore file ownership" msgstr "" #: src/libdar/i_archive.cpp:964 msgid "Error while restoring data: " msgstr "" #: src/libdar/i_archive.cpp:992 #, c-format msgid "Catalogue size in archive : %i bytes" msgstr "" #: src/libdar/i_archive.cpp:994 #, c-format msgid "Catalogue size in archive : N/A" msgstr "" #: src/libdar/i_archive.cpp:1000 #, c-format msgid "Archive is composed of %i file(s)" msgstr "" #: src/libdar/i_archive.cpp:1004 #, c-format msgid "File size: %i bytes" msgstr "" #: src/libdar/i_archive.cpp:1013 #, c-format msgid "First file size : %i bytes" msgstr "" #: src/libdar/i_archive.cpp:1014 #, c-format msgid "File size : %i bytes" msgstr "" #: src/libdar/i_archive.cpp:1015 #, c-format msgid "Last file size : %i bytes" msgstr "" #: src/libdar/i_archive.cpp:1016 #, c-format msgid "Archive total size is : %i bytes" msgstr "" #: src/libdar/i_archive.cpp:1024 #, c-format msgid "Archive size is: %i bytes" msgstr "" #: src/libdar/i_archive.cpp:1025 #, c-format msgid "Previous archive size does not include headers present in each slice" msgstr "" #: src/libdar/i_archive.cpp:1028 #, c-format msgid "Archive size is unknown (reading from a pipe)" msgstr "" #: src/libdar/i_archive.cpp:1034 #, c-format msgid "" "The overall archive size includes %i byte(s) wasted due to bad compression " "ratio" msgstr "" #: src/libdar/i_archive.cpp:1039 msgid "The global data compression ratio is: " msgstr "" #: src/libdar/i_archive.cpp:1046 #, c-format msgid "" "\n" "WARNING! This archive only contains the catalogue of another archive, it can " "only be used as reference for differential backup or as rescue in case of " "corruption of the original archive's content. You cannot restore any data " "from this archive alone\n" msgstr "" #: src/libdar/i_archive.cpp:1072 src/libdar/i_archive.cpp:1387 msgid "" "This archive is not exploitable, check the archive class usage in the API " "documentation" msgstr "" #: src/libdar/i_archive.cpp:1157 src/libdar/i_archive.cpp:1637 #: src/libdar/i_archive.cpp:1670 msgid "" "Fetching EA value while listing an archive is not possible in sequential " "read mode" msgstr "" #: src/libdar/i_archive.cpp:1163 msgid "slicing focused output is not available in sequential-read mode" msgstr "" #: src/libdar/i_archive.cpp:1171 #, c-format msgid "" "Using user provided modified slicing (first slice = %i bytes, other slices = " "%i bytes)" msgstr "" #: src/libdar/i_archive.cpp:1174 msgid "" "No slice layout of the archive of reference for the current isolated " "catalogue is available, cannot provide slicing information, aborting" msgstr "" #: src/libdar/i_archive.cpp:1247 #, c-format msgid "Exception caught from archive_listing_callback execution: %s" msgstr "" #: src/libdar/i_archive.cpp:1252 msgid "Exception caught from archive_listing_callback execution" msgstr "" #: src/libdar/i_archive.cpp:1281 msgid "Error while listing archive contents: " msgstr "" #: src/libdar/i_archive.cpp:1357 msgid "Error while comparing archive with filesystem: " msgstr "" #: src/libdar/i_archive.cpp:1399 msgid "" "WARNING! This is an isolated catalogue, no data or EA is present in this " "archive, only the catalogue structure can be checked" msgstr "" #: src/libdar/i_archive.cpp:1433 msgid "A problem occurred while reading this archive contents: " msgstr "" #: src/libdar/i_archive.cpp:1448 msgid "Error while testing archive: " msgstr "" #: src/libdar/i_archive.cpp:1497 msgid "" "Isolation with delta signature is not possible on a just created archive (on-" "fly isolation)" msgstr "" #: src/libdar/i_archive.cpp:1954 msgid "" "nodump flag feature has not been activated at compilation time, it is thus " "not available" msgstr "" #: src/libdar/i_archive.cpp:1981 msgid "" "Cannot sequentially read an archive of reference when delta signature or " "delta patch is requested" msgstr "" #: src/libdar/i_archive.cpp:2017 #, c-format msgid "" "WARNING! The archive is located in the directory to backup, this may create " "an endless loop when the archive will try to save itself. You can either add " "-X \"%S.*.%S\" on the command line, or change the location of the archive " "(see -h for help). Do you really want to continue?" msgstr "" #: src/libdar/i_archive.cpp:2203 msgid "Ready to start writing down the archive?" msgstr "" #: src/libdar/i_archive.cpp:2249 msgid "Building the catalog object..." msgstr "" #: src/libdar/i_archive.cpp:2267 #, c-format msgid "Error while fetching information for %S: " msgstr "" #: src/libdar/i_archive.cpp:2320 msgid "Processing files for backup..." msgstr "" #: src/libdar/i_archive.cpp:2383 msgid "Processing files for merging..." msgstr "" #: src/libdar/i_archive.cpp:2415 msgid "Processing files for fixing..." msgstr "" #: src/libdar/i_archive.cpp:2497 msgid "" "Adding reference to files that have been destroyed since reference backup..." msgstr "" #: src/libdar/i_archive.cpp:2552 msgid "Error while saving data: " msgstr "" #: src/libdar/i_archive.cpp:2578 msgid "WARNING! Incorrect signature found for archive, continue anyway?" msgstr "" #: src/libdar/i_archive.cpp:2621 msgid "" "Cannot get data name of the archive, this archive is not completely " "initialized" msgstr "" #: src/libdar/i_archive.cpp:2646 msgid "" "This archive contains an isolated catalogue, it cannot be used for this " "operation. It can only be used as reference for a incremental/differential " "backup or as backup of the original archive's catalogue" msgstr "" #: src/libdar/i_archive.cpp:2653 msgid "" "LAX MODE: Archive seems to be only an isolated catalogue (no data in it), " "Can I assume data corruption occurred and consider the archive as being a " "real archive?" msgstr "" #: src/libdar/i_archive.cpp:2658 msgid "Error while fetching archive properties: " msgstr "" #: src/libdar/i_archive.cpp:2683 msgid "Sorry, file size is unknown at this step of the program." msgstr "" #: src/libdar/i_archive.cpp:2782 #, c-format msgid "" "libgcrypt version < %s. Ligcrypt used has a bug that leads md5 and sha1 hash " "results to be erroneous for files larger than 256 Gio (gibioctet), do you " "really want to spend CPU cycles calculating a useless hash?" msgstr "" #: src/libdar/data_tree.cpp:94 msgid "reached End of File before all expected data could be read" msgstr "" #: src/libdar/data_tree.cpp:119 msgid "Unexpected value found in database" msgstr "" #: src/libdar/data_tree.cpp:928 #, c-format msgid "" "Dates of file's %S are not increasing when database's archive number grows. " "Concerned file is: %S" msgstr "" #: src/libdar/data_tree.cpp:931 msgid "" "Dates are not increasing for all files when database's archive number grows, " "working with this database may lead to improper file's restored version. " "Please reorder the archive within the database in the way that the older is " "the first archive and so on up to the most recent archive being the last of " "the database" msgstr "" #: src/libdar/data_tree.cpp:934 msgid "Do you want to ignore the same type of error for other files?" msgstr "" #: src/libdar/slave_zapette.cpp:66 msgid "Input cannot be read" msgstr "" #: src/libdar/slave_zapette.cpp:68 msgid "Cannot write to output" msgstr "" #: src/libdar/slave_zapette.cpp:70 msgid "Data should be read-only" msgstr "" #: src/libdar/slave_zapette.cpp:146 msgid "Cannot skip at end of file" msgstr "" #: src/libdar/slave_zapette.cpp:199 msgid "Received unknown special order" msgstr "" #: src/libdar/delta_sig_block_size.cpp:66 msgid "Invalid divisor used for delta signature block len calculation" msgstr "" #: src/libdar/delta_sig_block_size.cpp:68 msgid "" "minimum size should be lesser or equal than maximum size when specifying " "delta signature block size formula" msgstr "" #: src/libdar/filesystem_specific_attribute.hpp:273 msgid "true" msgstr "" #: src/libdar/filesystem_specific_attribute.hpp:273 msgid "false" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:233 #: src/libdar/filesystem_specific_attribute.cpp:1166 msgid "invalid length for FSA family flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:239 msgid "invalid length for FSA nature flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:716 msgid "Failed setting (opening) extX family FSA: " msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:729 msgid "Failed reading existing extX family FSA: " msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:758 #: src/libdar/filesystem_specific_attribute.cpp:773 #: src/libdar/filesystem_specific_attribute.cpp:788 #: src/libdar/filesystem_specific_attribute.cpp:803 #: src/libdar/filesystem_specific_attribute.cpp:824 #: src/libdar/filesystem_specific_attribute.cpp:840 #: src/libdar/filesystem_specific_attribute.cpp:855 #: src/libdar/filesystem_specific_attribute.cpp:870 #: src/libdar/filesystem_specific_attribute.cpp:885 #: src/libdar/filesystem_specific_attribute.cpp:900 #: src/libdar/filesystem_specific_attribute.cpp:915 #: src/libdar/filesystem_specific_attribute.cpp:930 #, c-format msgid "" "Warning: FSA %s/%s support has not been found at compilation time, cannot " "restore it for inode %s" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:975 msgid "Failed set extX family FSA: " msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1002 #, c-format msgid "" "Not setting FSA extX IMMUTABLE flags for %s due to of lack of capability" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1030 #, c-format msgid "" "Not setting FSA extX SYSTEM RESOURCE flags for %s due to of lack of " "capability" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1048 #, c-format msgid "" "Warning! %s Filesystem Specific Attribute support have not been activated at " "compilation time and could not be restored for %s" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1068 #, c-format msgid "" "Birth Time attribute cannot be restored for %s because no FSA familly able " "to carry that attribute could be activated at compilation time." msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1172 #: src/libdar/filesystem_specific_attribute.cpp:1173 msgid "invalid FSA family flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1207 #: src/libdar/filesystem_specific_attribute.cpp:1209 msgid "invalid FSA nature flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1233 msgid "Unexepected value for boolean FSA, data corruption may have occurred" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1237 msgid "Error while reading FSA: " msgstr "" #: src/libdar/op_tools.cpp:48 src/libdar/op_tools.cpp:119 #: src/libdar/op_tools.cpp:199 #, c-format msgid "" "Conflict found while selecting the file to retain in the resulting archive:" msgstr "" #: src/libdar/op_tools.cpp:49 #, c-format msgid "User Decision requested for data of file %S" msgstr "" #: src/libdar/op_tools.cpp:52 msgid "" "\n" "Your decision about file's data:\n" "[P]reserve\n" "[O]verwrite\n" "mark [S]aved and preserve\n" "mark saved and overwri[T]e\n" "[R]emove\n" "[*] keep undefined\n" "[A]bort\n" " Your Choice? " msgstr "" #: src/libdar/op_tools.cpp:54 src/libdar/op_tools.cpp:125 #: src/libdar/op_tools.cpp:205 msgid "" "Please answer by the character between brackets ('[' and ']') and press " "return" msgstr "" #: src/libdar/op_tools.cpp:84 src/libdar/op_tools.cpp:163 #: src/libdar/op_tools.cpp:231 #, c-format msgid "" "Warning, are you sure you want to abort (please answer \"%S\" to confirm)? " msgstr "" #: src/libdar/op_tools.cpp:88 src/libdar/op_tools.cpp:167 #: src/libdar/op_tools.cpp:235 msgid "Cancellation no confirmed" msgstr "" #: src/libdar/op_tools.cpp:91 src/libdar/op_tools.cpp:170 #: src/libdar/op_tools.cpp:238 msgid "Unknown choice: " msgstr "" #: src/libdar/op_tools.cpp:120 #, c-format msgid "User Decision requested for EA of file %S" msgstr "" #: src/libdar/op_tools.cpp:123 msgid "" "\n" "Your decision about file's EA:\n" "[p]reserve\n" "[o]verwrite\n" "mark [s]aved and preserve\n" "mark saved and overwri[t]e\n" "[m]erge EA and preserve\n" "merge EA a[n]d overwrite\n" "[r]emove\n" "[*] keep undefined\n" "[a]bort\n" " Your choice? " msgstr "" #: src/libdar/op_tools.cpp:200 #, c-format msgid "User Decision requested for FSA of file %S" msgstr "" #: src/libdar/op_tools.cpp:203 msgid "" "\n" "Your decision about file's FSA:\n" "[p]reserve\n" "[o]verwrite\n" "mark [s]aved and preserve\n" "mark saved and overwri[t]e\n" "[*] keep undefined\n" "[a]bort\n" " Your choice? " msgstr "" #: src/libdar/op_tools.cpp:259 src/dar_suite/line_tools.cpp:58 #: src/dar_suite/dar_suite.cpp:279 msgid "YES" msgstr "" #: src/libdar/op_tools.cpp:260 src/dar_suite/line_tools.cpp:58 #: src/dar_suite/dar_suite.cpp:281 msgid "NO" msgstr "" #: src/libdar/op_tools.cpp:272 #, c-format msgid "Entry information:\t\"in place\"\t\"to be added\"" msgstr "" #: src/libdar/op_tools.cpp:273 #, c-format msgid "Is inode :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:274 #, c-format msgid "Is directory :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:275 #, c-format msgid "Is plain file :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:276 #, c-format msgid "Is hard linked :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:277 #, c-format msgid "Entry type :\t %s \t %s" msgstr "" #: src/libdar/op_tools.cpp:283 msgid "me" msgstr "" #: src/libdar/op_tools.cpp:292 #, c-format msgid "Data more recent :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:302 #, c-format msgid "Data size :\t %i \t\t %i" msgstr "" #: src/libdar/op_tools.cpp:303 #, c-format msgid "Sparse file :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:304 #, c-format msgid "Dirty file :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:306 #, c-format msgid "Data full saved :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:307 #, c-format msgid "EA full saved :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:309 #, c-format msgid "EA more recent :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:310 #, c-format msgid "FSA full saved :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:315 #, c-format msgid "FSA familly :\t %S \t\t %S" msgstr "" #: src/libdar/op_tools.cpp:324 #, c-format msgid "EA number :\t %i \t\t %i" msgstr "" #: src/libdar/op_tools.cpp:327 #, c-format msgid "EA size :\t %i \t\t %i" msgstr "" #: src/libdar/mask.cpp:69 #, c-format msgid "%Sglob expression: %S [%S]" msgstr "" #: src/libdar/mask.cpp:110 #, c-format msgid "%Sregular expression: %S [%S]" msgstr "" #: src/libdar/mask.cpp:160 #, c-format msgid "" "%Snot(\n" "%S\n" "%S)" msgstr "" #: src/libdar/mask.cpp:279 #, c-format msgid "%SIs subdir of: %S [%S]" msgstr "" #: src/libdar/mask.cpp:297 #, c-format msgid "%SPath is: %S [%S]" msgstr "" #: src/libdar/mask.cpp:307 #, c-format msgid "%SPath leads to: %S [%S]" msgstr "" #: src/libdar/mask.cpp:315 msgid "case sensitive" msgstr "" #: src/libdar/mask.cpp:315 msgid "case in-sensitive" msgstr "" #: src/libdar/cat_directory.cpp:142 msgid "" "LAX MODE: Error met building a catalogue entry, skipping this entry and " "continuing. Skipped error is: " msgstr "" #: src/libdar/cat_directory.cpp:185 msgid "missing data to build a cat_directory" msgstr "" #: src/libdar/cat_directory.cpp:443 #, c-format msgid "Cannot remove nonexistent entry %S from catalogue" msgstr "" #: src/libdar/scrambler.cpp:34 msgid "Key cannot be an empty string" msgstr "" #: src/libdar/path.cpp:65 src/libdar/path.cpp:87 msgid "Empty string is not a valid path" msgstr "" #: src/libdar/path.cpp:95 #, c-format msgid "%S is an not a valid path: %S" msgstr "" #: src/libdar/path.cpp:199 msgid "Cannot add an absolute path" msgstr "" #: src/libdar/path.cpp:357 msgid "Empty string as subdirectory does not make a valid path" msgstr "" #: src/libdar/crc.cpp:210 src/libdar/crc.cpp:311 msgid "Invalid size for CRC width" msgstr "" #: src/libdar/i_database.cpp:122 msgid "Badly formatted database" msgstr "" #: src/libdar/i_database.cpp:171 msgid "Cannot write down a read-only database" msgstr "" #: src/libdar/i_database.cpp:231 msgid "Empty string is an invalid archive basename" msgstr "" #: src/libdar/i_database.cpp:257 src/libdar/i_database.cpp:259 msgid "Incorrect archive range in database" msgstr "" #: src/libdar/i_database.cpp:286 src/libdar/i_database.cpp:305 #: src/libdar/i_database.cpp:408 msgid "Non existent archive in database" msgstr "" #: src/libdar/i_database.cpp:325 src/libdar/i_database.cpp:327 msgid "Invalid archive number: " msgstr "" #: src/libdar/i_database.cpp:434 msgid "Invalid path, path must be relative" msgstr "" #: src/libdar/i_database.cpp:440 src/libdar/i_database.cpp:445 #: src/libdar/i_database.cpp:449 msgid "Non existent file in database" msgstr "" #: src/libdar/i_database.cpp:513 msgid "Checking chronological ordering of files between the archives..." msgstr "" #: src/libdar/i_database.cpp:539 msgid "File recorded as removed at this date in database: " msgstr "" #: src/libdar/i_database.cpp:543 msgid "File not found in database: " msgstr "" #: src/libdar/i_database.cpp:547 msgid "" "File found in database but impossible to restore (only found \"unchanged\" " "in differential backups, or delta patch without reference to base it on in " "any previous archive of the base): " msgstr "" #: src/libdar/i_database.cpp:566 msgid "" "Extended Attribute of file found in database but impossible to restore (only " "found \"unchanged\" in differential backups): " msgstr "" #: src/libdar/i_database.cpp:579 #, c-format msgid "%S did not exist before specified date and cannot be restored" msgstr "" #: src/libdar/i_database.cpp:608 #, c-format msgid "" "Either archives in database are not properly tidied, or file last " "modification date has been artificially set to an more ancient date. This " "may lead improper Extended Attribute restoration for inode %S" msgstr "" #: src/libdar/i_database.cpp:627 #, c-format msgid "Cannot restore file %S : non existent file in database" msgstr "" #: src/libdar/i_database.cpp:683 msgid "Error while restoring the following files: " msgstr "" #: src/libdar/i_database.cpp:692 msgid "Cannot restore any file, nothing done" msgstr "" #: src/libdar/i_database.cpp:705 src/libdar/i_database.cpp:713 #, c-format msgid "Invalid archive number: %d" msgstr "" #: src/libdar/catalogue.cpp:154 msgid "" "LAX MODE: catalogue label does not match archive label, as if it was an " "extracted catalogue, assuming data corruption occurred and fixing the " "catalogue to be considered an a plain internal catalogue" msgstr "" #: src/libdar/catalogue.cpp:210 msgid "CRC failed for the catalogue" msgstr "" #: src/libdar/catalogue.cpp:212 msgid "" "LAX MODE: CRC failed for catalogue, the archive contents is corrupted. This " "may even lead dar to see files in the archive that never existed, but this " "will most probably lead to other failures in restoring files. Shall we " "proceed anyway?" msgstr "" #: src/libdar/catalogue.cpp:262 msgid "root does not have a parent directory" msgstr "" #: src/libdar/catalogue.cpp:300 msgid "no current directory defined" msgstr "" #: src/libdar/catalogue.cpp:304 msgid "root directory has no parent directory" msgstr "" #: src/libdar/catalogue.cpp:326 msgid "no current reading directory defined" msgstr "" #: src/libdar/catalogue.cpp:399 src/libdar/catalogue.cpp:413 msgid " is not present in the archive" msgstr "" #: src/libdar/catalogue.cpp:491 msgid "Cannot recurs in a non directory entry" msgstr "" #: src/libdar/catalogue.cpp:494 msgid "" "The entry to recurs in does not exist, cannot add further entry to that " "absent subdirectory" msgstr "" #: src/libdar/catalogue.cpp:559 msgid "root has no parent directory" msgstr "" #: src/libdar/archive_aux.cpp:134 #, c-format msgid "unknown hash algorithm corresponding to char `%c'" msgstr "" #: src/libdar/integers.cpp:36 #, c-format msgid "%s type length is not %d byte(s) but %d" msgstr "" #: src/libdar/integers.cpp:44 #, c-format msgid "%s type is not a signed type as expected" msgstr "" #: src/libdar/integers.cpp:46 #, c-format msgid "%s type is not an unsigned type as expected" msgstr "" #: src/libdar/integers.cpp:101 #, c-format msgid "" "type %s is neither big nor little endian! Do not know how to handle integer " "in a portable manner on this host, aborting" msgstr "" #: src/libdar/integers.cpp:154 msgid "incoherent endian between U_16 and U_32" msgstr "" #: src/libdar/integers.cpp:156 msgid "incoherent endian between U_16 and U_64" msgstr "" #: src/libdar/integers.cpp:158 msgid "incoherent endian between U_16 and U_I" msgstr "" #: src/libdar/storage.cpp:85 msgid "Not enough data to initialize storage field" msgstr "" #: src/libdar/storage.cpp:110 msgid "Asking for an element out of array" msgstr "" #: src/libdar/storage.cpp:162 msgid "The iterator is not indexing the object it has been asked to write to" msgstr "" #: src/libdar/storage.cpp:196 msgid "The iterator is not indexing the object it has been asked to read from" msgstr "" #: src/libdar/storage.cpp:515 msgid "The iterator is not indexing the object it has been defined for" msgstr "" #: src/libdar/storage.cpp:767 src/libdar/storage.cpp:832 msgid "Iterator does not point to data" msgstr "" #: src/libdar/storage.cpp:826 msgid "Reference storage of the iterator is empty or non existent" msgstr "" #: src/libdar/storage.cpp:843 msgid "The iterator position is not inside the storage of reference" msgstr "" #: src/libdar/etage.cpp:118 msgid "Error opening directory in furtive read mode: " msgstr "" #: src/libdar/etage.cpp:122 #, c-format msgid "" "Could not open directory %s in furtive read mode (%s), using normal mode" msgstr "" #: src/libdar/etage.cpp:147 msgid "Error opening directory: " msgstr "" #: src/libdar/etage.cpp:165 #, c-format msgid "" "Filename provided by the operating system seems truncated in directory %s, " "storing filename as is: %s" msgstr "" #: src/libdar/etage.cpp:197 #, c-format msgid "" "Detected Cache Directory Tagging Standard for %s, the contents of that " "directory will not be saved" msgstr "" #: src/libdar/zapette_protocol.cpp:69 msgid "Partial request received, aborting\n" msgstr "" #: src/libdar/zapette_protocol.cpp:142 msgid "Corrupted data read on pipe" msgstr "" #: src/libdar/libdar5.cpp:162 msgid "Caught an unknown Egeneric exception: " msgstr "" #: src/libdar/libdar5.cpp:167 msgid "Caught a none libdar exception" msgstr "" #: src/libdar/libdar5.cpp:256 msgid "Invald nullptr argument given to 'ptr'" msgstr "" #: src/libdar/libdar5.cpp:303 msgid "Invalid nullptr pointer given to close_archive" msgstr "" #: src/libdar/libdar5.cpp:326 src/libdar/libdar5.cpp:346 #: src/libdar/libdar5.cpp:365 src/libdar/libdar5.cpp:387 #: src/libdar/libdar5.cpp:407 msgid "Invalid nullptr argument given to 'ptr'" msgstr "" #: src/libdar/zapette.cpp:65 msgid "Cannot read on input" msgstr "" #: src/libdar/zapette.cpp:67 msgid "Cannot write on output" msgstr "" #: src/libdar/zapette.cpp:97 #, c-format msgid "" "Failed driving dar_slave to the end of archive: %S. Trying to open the " "archive from the first bytes" msgstr "" #: src/libdar/zapette.cpp:213 msgid "Uncomplete answer received from peer" msgstr "" #: src/libdar/zapette.cpp:297 msgid "Communication problem with peer, retry ?" msgstr "" #: src/libdar/zapette.cpp:313 src/libdar/zapette.cpp:327 #: src/libdar/zapette.cpp:347 src/libdar/zapette.cpp:352 msgid "Incoherent answer from peer" msgstr "" #: src/libdar/zapette.cpp:322 msgid "Bad answer from peer, while closing connection" msgstr "" #: src/libdar/zapette.cpp:332 src/libdar/zapette.cpp:337 #: src/libdar/zapette.cpp:342 msgid "" "Unexpected answer from slave, communication problem or bug may hang the " "operation" msgstr "" #: src/libdar/zapette.cpp:355 msgid "Corrupted data read from pipe" msgstr "" #: src/libdar/mask_list.cpp:97 #, c-format msgid "Cannot allocate memory for buffer while reading %S" msgstr "" #: src/libdar/mask_list.cpp:122 msgid "Found '" msgstr "" #: src/libdar/mask_list.cpp:146 #, c-format msgid "" "line exceeding the maximum of %d characters in listing file %S, aborting. " "Concerned line starts with: %s" msgstr "" #: src/libdar/mask_list.cpp:180 msgid "" "Mask_list's prefix must be an absolute path or start with \"\" string " "for archive merging" msgstr "" #: src/libdar/mask_list.cpp:202 #, c-format msgid "" "Error met while reading line\n" "\t%S\n" " from file %S: %S" msgstr "" #: src/libdar/mask_list.cpp:219 #, c-format msgid "Too much line in file %S (integer overflow)" msgstr "" #: src/libdar/mask_list.cpp:223 #, c-format msgid "Error met while opening %S: " msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:158 #, c-format msgid "Error reading inode of file %s : %s" msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:163 #, c-format msgid "Failed reading inode information for %s: " msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:171 msgid "Cannot read inode for " msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:182 #: src/libdar/filesystem_hard_link_read.cpp:211 msgid "atime, data access time" msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:188 #: src/libdar/filesystem_hard_link_read.cpp:217 msgid "mtime, data modification time" msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:194 #: src/libdar/filesystem_hard_link_read.cpp:223 msgid "ctime, inode change time" msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:305 msgid "Unknown file type! file name is: " msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:338 msgid "Error reading EA for " msgstr "" #: src/libdar/capabilities.cpp:114 #, c-format msgid "Error met while checking for capability %S: %s" msgstr "" #: src/libdar/capabilities.cpp:144 src/libdar/capabilities.cpp:151 #, c-format msgid "Error met while setting capability %S: %s" msgstr "" #: src/libdar/crypto.cpp:44 msgid "scrambling (weak encryption)" msgstr "" #: src/libdar/crypto.cpp:102 msgid "Unknown crypto algorithm" msgstr "" #: src/libdar/mycurl_protocol.cpp:44 #, c-format msgid "Unknown protocol: %S" msgstr "" #: src/libdar/filtre.cpp:258 src/libdar/filtre.cpp:638 #: src/libdar/filtre.cpp:1249 src/libdar/filtre.cpp:1430 #: src/libdar/filtre.cpp:2711 msgid "Inspecting directory " msgstr "" #: src/libdar/filtre.cpp:294 #, c-format msgid "" "File %S has changed during backup and is probably not saved in a valid state " "(\"dirty file\"), do you want to consider it for restoration anyway?" msgstr "" #: src/libdar/filtre.cpp:304 msgid "" "File had changed during backup and had been copied another time, restoring " "the next copy of file: " msgstr "" #: src/libdar/filtre.cpp:375 #, c-format msgid "" "The just restored file %S has been marked as dirty (sequential reading can " "only detect the dirty status after restoration), do we remove this just " "restored dirty file?" msgstr "" #: src/libdar/filtre.cpp:382 #, c-format msgid "" "The just restored file %S has been marked as dirty (sequential reading can " "only detect the dirty status after restoration), removing the just restored " "dirty file as it is asked to ignore this type of file" msgstr "" #: src/libdar/filtre.cpp:384 #, c-format msgid "Removing the dirty file %S" msgstr "" #: src/libdar/filtre.cpp:457 msgid " not restored (user choice)" msgstr "" #: src/libdar/filtre.cpp:461 msgid "No file in this directory will be restored." msgstr "" #: src/libdar/filtre.cpp:479 msgid "Error while restoring " msgstr "" #: src/libdar/filtre.cpp:484 msgid "Warning! No file in that directory will be restored: " msgstr "" #: src/libdar/filtre.cpp:679 msgid "Recording hard link into the archive: " msgstr "" #: src/libdar/filtre.cpp:746 #, c-format msgid "" "SECURITY WARNING! SUSPICIOUS FILE %S: ctime changed since archive of " "reference was done, while no other inode information changed" msgstr "" #: src/libdar/filtre.cpp:1097 msgid "Error while saving " msgstr "" #: src/libdar/filtre.cpp:1112 msgid "NO FILE IN THAT DIRECTORY CAN BE SAVED." msgstr "" #: src/libdar/filtre.cpp:1117 msgid "Cannot write down the archive: " msgstr "" #: src/libdar/filtre.cpp:1134 msgid ", compression ratio " msgstr "" #: src/libdar/filtre.cpp:1140 #, c-format msgid "Finished Inspecting directory %S , saved %S%S" msgstr "" #: src/libdar/filtre.cpp:1241 msgid "SKIPPED (hard link in sequential read mode): " msgstr "" #: src/libdar/filtre.cpp:1277 msgid "OK " msgstr "" #: src/libdar/filtre.cpp:1288 src/libdar/filtre.cpp:1320 msgid "DIFF " msgstr "" #: src/libdar/filtre.cpp:1320 msgid ": file not present in filesystem" msgstr "" #: src/libdar/filtre.cpp:1372 msgid "ERR " msgstr "" #: src/libdar/filtre.cpp:1455 msgid "Data" msgstr "" #: src/libdar/filtre.cpp:1464 msgid "Can't read saved data." msgstr "" #: src/libdar/filtre.cpp:1510 msgid "CRC error: data corruption." msgstr "" #: src/libdar/filtre.cpp:1611 msgid "OK " msgstr "" #: src/libdar/filtre.cpp:1646 msgid "ERR " msgstr "" #: src/libdar/filtre.cpp:1768 msgid "" "Decremental mode is useless when merging is not applied to both an archive " "of reference and an auxiliary archive of reference. Ignore decremental mode " "and continue?" msgstr "" #: src/libdar/filtre.cpp:1902 msgid "first" msgstr "" #: src/libdar/filtre.cpp:1905 msgid "second" msgstr "" #: src/libdar/filtre.cpp:1908 msgid "next" msgstr "" #: src/libdar/filtre.cpp:1911 #, c-format msgid "Merging/filtering files from the %s archive..." msgstr "" #: src/libdar/filtre.cpp:1993 msgid "overwritten" msgstr "" #: src/libdar/filtre.cpp:1997 src/libdar/filtre.cpp:2041 msgid "dropped from the archive and marked as already saved" msgstr "" #: src/libdar/filtre.cpp:2000 msgid "removed" msgstr "" #: src/libdar/filtre.cpp:2005 #, c-format msgid "Data of file %S is about to be %S, proceed?" msgstr "" #: src/libdar/filtre.cpp:2034 msgid "replaced" msgstr "" #: src/libdar/filtre.cpp:2037 msgid "removed from the archive" msgstr "" #: src/libdar/filtre.cpp:2044 msgid "merged with possible overwriting" msgstr "" #: src/libdar/filtre.cpp:2049 #, c-format msgid "EA and FSA of file %S are about to be %S, proceed?" msgstr "" #: src/libdar/filtre.cpp:2115 #, c-format msgid "" "EA and FSA of file %S from first archive have been updated with those of " "same named file of the auxiliary archive" msgstr "" #: src/libdar/filtre.cpp:2125 #, c-format msgid "" "EA of file %S from first archive have been dropped and marked as already " "saved" msgstr "" #: src/libdar/filtre.cpp:2131 #, c-format msgid "" "FSA of file %S from first archive have been dropped and marked as already " "saved" msgstr "" #: src/libdar/filtre.cpp:2141 src/libdar/filtre.cpp:2279 #, c-format msgid "EA of file %S from first archive have been removed" msgstr "" #: src/libdar/filtre.cpp:2149 #, c-format msgid "FSA of file %S from first archive have been removed" msgstr "" #: src/libdar/filtre.cpp:2163 #, c-format msgid "Data of file %S from first archive has been preserved from overwriting" msgstr "" #: src/libdar/filtre.cpp:2214 #, c-format msgid "" "Data of file %S taken from the first archive of reference has been removed" msgstr "" #: src/libdar/filtre.cpp:2217 #, c-format msgid "" "Data of file %S taken from the first archive of reference has been " "overwritten" msgstr "" #: src/libdar/filtre.cpp:2252 #, c-format msgid "EA of file %S has been overwritten" msgstr "" #: src/libdar/filtre.cpp:2256 src/libdar/filtre.cpp:2272 #, c-format msgid "EA of file %S has been overwritten and marked as already saved" msgstr "" #: src/libdar/filtre.cpp:2262 src/libdar/filtre.cpp:2267 #, c-format msgid "" "EA of file %S from first archive have been updated with those of the same " "named file of the auxiliary archive" msgstr "" #: src/libdar/filtre.cpp:2551 msgid " not merged (user choice)" msgstr "" #: src/libdar/filtre.cpp:2555 msgid "No file in this directory will be considered for merging." msgstr "" #: src/libdar/filtre.cpp:2575 msgid "Error while considering file " msgstr "" #: src/libdar/filtre.cpp:2579 msgid "Warning! No file in this directory will be considered for merging: " msgstr "" #: src/libdar/filtre.cpp:2616 msgid "" "File selection has been aborted. Now building the resulting archive with the " "already selected files" msgstr "" #: src/libdar/filtre.cpp:2795 #, c-format msgid "" "Need to activate sparse file detection in order to calculate delta signature " "for sparse file %S" msgstr "" #: src/libdar/filtre.cpp:2882 msgid "Adding Hard link to archive: " msgstr "" #: src/libdar/filtre.cpp:2898 #, c-format msgid "error met while creating archive: %S" msgstr "" #: src/libdar/filtre.cpp:2966 msgid "failed reading CRC from file: " msgstr "" #: src/libdar/filtre.cpp:2994 msgid "Failed reading CRC for EA and FSA: " msgstr "" #: src/libdar/filtre.cpp:3005 msgid "Error met while reading next entry: " msgstr "" #: src/libdar/filtre.cpp:3116 #, c-format msgid "Adding only inode metadata to archive: %S" msgstr "" #: src/libdar/filtre.cpp:3128 msgid "Resaving file without compression: " msgstr "" #: src/libdar/filtre.cpp:3132 msgid "Delta saving file to archive: " msgstr "" #: src/libdar/filtre.cpp:3136 #, c-format msgid "Adding %S to archive: %S" msgstr "" #: src/libdar/filtre.cpp:3176 src/libdar/filtre.cpp:3188 #, c-format msgid "building delta signature with block size of %d bytes" msgstr "" #: src/libdar/filtre.cpp:3280 #, c-format msgid "" "Failed reading data CRC for %S, file may be damaged and will be marked dirty" msgstr "" #: src/libdar/filtre.cpp:3300 #, c-format msgid "" "Failed setting storage size to zero for this file with missing data CRC, CRC " "error will be reported for that file while reading the repaired archive" msgstr "" #: src/libdar/filtre.cpp:3337 msgid "Copied data does not match CRC" msgstr "" #: src/libdar/filtre.cpp:3484 msgid " : Failed resaving uncompressed the inode data" msgstr "" #: src/libdar/filtre.cpp:3494 msgid "" " : Resaving uncompressed the inode data to gain space is not possible, " "keeping data compressed" msgstr "" #: src/libdar/filtre.cpp:3518 #, c-format msgid "" "File has disappeared while we were reading it, cannot check whether it has " "changed during its backup: %S" msgstr "" #: src/libdar/filtre.cpp:3553 #, c-format msgid "" "WARNING! File modified while reading it for backup. Performing retry %i of %i" msgstr "" #: src/libdar/filtre.cpp:3570 msgid "" "WARNING! File modified while reading it for backup. No more retry for that " "file to not exceed the wasted byte limit. File is " msgstr "" #: src/libdar/filtre.cpp:3577 msgid "" "WARNING! File modified while reading it for backup, but no more retry " "allowed: " msgstr "" #: src/libdar/filtre.cpp:3599 msgid "Dumping delta signature structure for saved file: " msgstr "" #: src/libdar/filtre.cpp:3755 msgid "Saving Extended Attributes for " msgstr "" #: src/libdar/filtre.cpp:3790 #, c-format msgid "" "Computed EA CRC for file %S differs from what was stored in the archive, " "this file's EA may have been corrupted" msgstr "" #: src/libdar/filtre.cpp:3837 msgid "Error saving Extended Attributes for " msgstr "" #: src/libdar/filtre.cpp:3841 msgid "" "be advised that a CRC error will be reported for the EA of that file while " "sequentially reading the repaired archive" msgstr "" #: src/libdar/filtre.cpp:3876 msgid "Saving Filesystem Specific Attributes for " msgstr "" #: src/libdar/filtre.cpp:3906 #, c-format msgid "" "Computed FSA CRC for file %S differs from what was stored in the archive, " "this file's EA may have been corrupted" msgstr "" #: src/libdar/filtre.cpp:3951 msgid "Error saving Filesystem Specific Attributes for " msgstr "" #: src/libdar/filtre.cpp:3955 msgid "" "be advised that a CRC error will be reported for the FSA of that file while " "sequentially reading the repaired archive" msgstr "" #: src/libdar/filtre.cpp:4513 msgid "Copying delta signature structure from the archive of reference: " msgstr "" #: src/libdar/filtre.cpp:4552 msgid "Calculating delta signature from filesystem: " msgstr "" #: src/libdar/filtre.cpp:4632 #, c-format msgid "" "Furtive read mode requires either root permission and FOWNER capability, " "falling back to normal filesystem read" msgstr "" #: src/libdar/escape.cpp:138 msgid "" "Adding an explicit escape sequence of type seqt_not_a_sequence is forbidden" msgstr "" #: src/libdar/escape.cpp:898 msgid "Unknown escape sequence type" msgstr "" #: src/libdar/sparse_file.cpp:157 msgid "Incoherent structure in data carrying sparse files: unknown mark" msgstr "" #: src/libdar/sparse_file.cpp:311 msgid "Cannot skip forward to restore a hole" msgstr "" #: src/libdar/sparse_file.cpp:325 msgid "Data corruption or unknown sparse_file mark found in file's data" msgstr "" #: src/libdar/criterium.cpp:412 msgid "" "Cannot evaluate this crit_and criterium as no criterium has been added to it" msgstr "" #: src/libdar/criterium.cpp:481 msgid "" "Cannot evaluate this crit_or criterium as no criterium has been added to it" msgstr "" #: src/dar_suite/dar_slave.cpp:150 msgid "Cannot read arguments on command line, aborting" msgstr "" #: src/dar_suite/dar_slave.cpp:160 msgid "Missing argument to -i option" msgstr "" #: src/dar_suite/dar_slave.cpp:168 msgid "Missing argument to -o option" msgstr "" #: src/dar_suite/dar_slave.cpp:182 msgid "Missing argument to -E option" msgstr "" #: src/dar_suite/dar_slave.cpp:192 src/dar_suite/command_line.cpp:1834 #: src/dar_suite/dar_xform.cpp:347 msgid "Missing argument to --min-digits" msgstr "" #: src/dar_suite/dar_slave.cpp:200 src/dar_suite/dar_xform.cpp:355 #, c-format msgid "Missing parameter to option -%c" msgstr "" #: src/dar_suite/dar_slave.cpp:202 src/dar_suite/dar_slave.cpp:204 #: src/dar_suite/dar_manager.cpp:514 src/dar_suite/dar_manager.cpp:516 #: src/dar_suite/dar_xform.cpp:357 #, c-format msgid "Ignoring unknown option -%c" msgstr "" #: src/dar_suite/dar_slave.cpp:210 msgid "Missing archive basename, see -h option for help" msgstr "" #: src/dar_suite/dar_slave.cpp:216 src/dar_suite/dar_xform.cpp:371 msgid "Too many argument on command line, see -h option for help" msgstr "" #: src/dar_suite/dar_slave.cpp:256 src/dar_suite/dar_slave.cpp:261 #: src/dar_suite/dar_manager.cpp:906 src/dar_suite/dar_manager.cpp:930 #: src/dar_suite/dar_manager.cpp:933 src/dar_suite/command_line.cpp:2062 #: src/dar_suite/command_line.cpp:2072 src/dar_suite/command_line.cpp:2075 #: src/dar_suite/command_line.cpp:2113 src/dar_suite/command_line.cpp:2139 #: src/dar_suite/command_line.cpp:2144 src/dar_suite/command_line.cpp:2148 #: src/dar_suite/dar_xform.cpp:418 src/dar_suite/dar_xform.cpp:420 #: src/dar_suite/dar_xform.cpp:422 src/dar_suite/dar_xform.cpp:437 #, c-format msgid "\n" msgstr "" #: src/dar_suite/dar_slave.cpp:257 src/dar_suite/command_line.cpp:2076 #: src/dar_suite/dar_xform.cpp:423 #, c-format msgid "Common options:\n" msgstr "" #: src/dar_suite/dar_slave.cpp:258 #, c-format msgid "" " -i pipe to use instead of std input to read orders from dar\n" msgstr "" #: src/dar_suite/dar_slave.cpp:259 #, c-format msgid "" " -o pipe to use instead of std output to write data to dar\n" msgstr "" #: src/dar_suite/dar_slave.cpp:260 #, c-format msgid "" " -E \t command line to execute between slices of the archive\n" msgstr "" #: src/dar_suite/dar_slave.cpp:262 src/dar_suite/dar_manager.cpp:934 #: src/dar_suite/dar_xform.cpp:438 #, c-format msgid "See man page for more options.\n" msgstr "" #: src/dar_suite/dar_slave.cpp:275 src/dar_suite/dar_manager.cpp:948 #: src/dar_suite/dar_xform.cpp:452 #, c-format msgid " Using libdar %u.%u.%u built with compilation time options:\n" msgstr "" #: src/dar_suite/dar_slave.cpp:277 src/dar_suite/dar_manager.cpp:950 #: src/dar_suite/dar_xform.cpp:454 #, c-format msgid " Using libdar %u.%u built with compilation time options:\n" msgstr "" #: src/dar_suite/dar_slave.cpp:280 src/dar_suite/dar_cp.cpp:145 #: src/dar_suite/dar_manager.cpp:953 src/dar_suite/command_line.cpp:2533 #: src/dar_suite/dar_xform.cpp:457 #, c-format msgid " compiled the %s with %s version %s\n" msgstr "" #: src/dar_suite/dar_slave.cpp:281 src/dar_suite/dar_cp.cpp:146 #: src/dar_suite/dar_manager.cpp:954 src/dar_suite/command_line.cpp:2534 #: src/dar_suite/dar_xform.cpp:458 #, c-format msgid " %s is part of the Disk ARchive suite (Release %s)\n" msgstr "" #: src/dar_suite/dar_slave.cpp:282 #, c-format msgid " %s comes with ABSOLUTELY NO WARRANTY;" msgstr "" #: src/dar_suite/dar_slave.cpp:283 msgid "" " for details\n" " type `dar -W'." msgstr "" #: src/dar_suite/dar_slave.cpp:284 src/dar_suite/dar_manager.cpp:956 #: src/dar_suite/command_line.cpp:2536 src/dar_suite/dar_xform.cpp:460 msgid "" " This is free software, and you are welcome\n" " to redistribute it under certain conditions;" msgstr "" #: src/dar_suite/dar_slave.cpp:285 msgid "" " type `dar -L | more'\n" " for details.\n" "\n" msgstr "" #: src/dar_suite/line_tools.hpp:387 #, c-format msgid "Parse error: Unmatched `%c'" msgstr "" #: src/dar_suite/dar_cp.cpp:133 #, c-format msgid "usage : %s \n" msgstr "" #: src/dar_suite/dar_cp.cpp:147 #, c-format msgid " %s comes with ABSOLUTELY NO WARRANTY; for details type `dar -W'." msgstr "" #: src/dar_suite/dar_cp.cpp:148 msgid " This is free software, and you are welcome to redistribute it under" msgstr "" #: src/dar_suite/dar_cp.cpp:149 msgid "" " certain conditions; type `dar -L | more' for details.\n" "\n" msgstr "" #: src/dar_suite/dar_cp.cpp:153 msgid "Unexpected exception from libdar" msgstr "" #: src/dar_suite/dar_cp.cpp:170 #, c-format msgid "Memory allocation failed : %s" msgstr "" #: src/dar_suite/dar_cp.cpp:186 #, c-format msgid "Cannot open source file : %s" msgstr "" #: src/dar_suite/dar_cp.cpp:197 #, c-format msgid "Cannot open destination file : %s" msgstr "" #: src/dar_suite/dar_cp.cpp:222 #, c-format msgid "Starting the copy of %u byte(s)" msgstr "" #: src/dar_suite/dar_cp.cpp:232 #, c-format msgid "" "Error reading source file (we are at %.2f %% of data copied), trying to read " "further: %s\n" msgstr "" #: src/dar_suite/dar_cp.cpp:236 #, c-format msgid "" "Skipping done (missing %.0f byte(s)), found correct data to read, continuing " "the copy...\n" msgstr "" #: src/dar_suite/dar_cp.cpp:242 msgid "" "Reached End of File, no correct data could be found after the last error\n" msgstr "" #: src/dar_suite/dar_cp.cpp:250 #, c-format msgid "Copy finished. Missing %.0f byte(s) of data\n" msgstr "" #: src/dar_suite/dar_cp.cpp:251 #, c-format msgid "Which is %.2f %% of the total amount of data\n" msgstr "" #: src/dar_suite/dar_cp.cpp:260 msgid "Cannot seek back one char" msgstr "" #: src/dar_suite/dar_cp.cpp:271 src/dar_suite/dar_cp.cpp:356 msgid "Cannot write to destination, aborting" msgstr "" #: src/dar_suite/dar_cp.cpp:315 msgid "Cannot seek in file" msgstr "" #: src/dar_suite/dar_cp.cpp:359 #, c-format msgid "Non fatal error while writing to destination file, retrying\n" msgstr "" #: src/dar_suite/dar_manager.cpp:244 msgid "Decompressing and loading database header to memory..." msgstr "" #: src/dar_suite/dar_manager.cpp:246 msgid "Decompressing and loading database to memory..." msgstr "" #: src/dar_suite/dar_manager.cpp:258 msgid "Error met while processing operation: " msgstr "" #: src/dar_suite/dar_manager.cpp:331 msgid "-B option cannot be given inside a batch file" msgstr "" #: src/dar_suite/dar_manager.cpp:469 msgid "Running batch file from a batch file is not allowed" msgstr "" #: src/dar_suite/dar_manager.cpp:495 #, c-format msgid "invalid number given to -9 option: %s" msgstr "" #: src/dar_suite/dar_manager.cpp:541 msgid "-e option is only available when using -r option, aborting" msgstr "" #: src/dar_suite/dar_manager.cpp:552 msgid "-w option is only valid with -r option, ignoring it" msgstr "" #: src/dar_suite/dar_manager.cpp:559 msgid "-9 option is only valid with -A option, ignoring it" msgstr "" #: src/dar_suite/dar_manager.cpp:570 msgid "No action specified, aborting" msgstr "" #: src/dar_suite/dar_manager.cpp:582 src/dar_suite/dar_manager.cpp:586 msgid "Ignoring extra arguments on command line" msgstr "" #: src/dar_suite/dar_manager.cpp:592 src/dar_suite/dar_manager.cpp:609 msgid "Missing argument to command line, aborting" msgstr "" #: src/dar_suite/dar_manager.cpp:601 msgid "Arguments to -r must be relative path (never begin by '/')" msgstr "" #: src/dar_suite/dar_manager.cpp:623 msgid "No database specified, aborting" msgstr "" #: src/dar_suite/dar_manager.cpp:629 msgid "Parse error on command line (or included files): " msgstr "" #: src/dar_suite/dar_manager.cpp:643 msgid "Creating file..." msgstr "" #: src/dar_suite/dar_manager.cpp:644 msgid "Formatting file as an empty database..." msgstr "" #: src/dar_suite/dar_manager.cpp:648 msgid "Database has been successfully created empty." msgstr "" #: src/dar_suite/dar_manager.cpp:663 src/dar_suite/dar_manager.cpp:1136 msgid "Reading catalogue of the archive to add..." msgstr "" #: src/dar_suite/dar_manager.cpp:677 src/dar_suite/dar_manager.cpp:1145 msgid "Updating database with catalogue..." msgstr "" #: src/dar_suite/dar_manager.cpp:684 src/dar_suite/dar_manager.cpp:877 #: src/dar_suite/dar_manager.cpp:1148 src/dar_suite/dar_manager.cpp:1178 #: src/dar_suite/dar_manager.cpp:1272 msgid "Checking date ordering of files between archives..." msgstr "" #: src/dar_suite/dar_manager.cpp:697 src/dar_suite/dar_manager.cpp:881 #: src/dar_suite/dar_manager.cpp:1274 msgid "" "Some files do not follow chronological order when archive index increases " "withing the database, this can lead dar_manager to restored a wrong version " "of these files" msgstr "" #: src/dar_suite/dar_manager.cpp:724 src/dar_suite/dar_manager.cpp:1167 msgid "Removing information from the database..." msgstr "" #: src/dar_suite/dar_manager.cpp:744 src/dar_suite/dar_manager.cpp:764 #: src/dar_suite/dar_manager.cpp:778 src/dar_suite/dar_manager.cpp:792 msgid "Changing database header information..." msgstr "" #: src/dar_suite/dar_manager.cpp:809 msgid "" "Looking in archives for requested files, classifying files archive by " "archive..." msgstr "" #: src/dar_suite/dar_manager.cpp:856 src/dar_suite/dar_manager.cpp:1197 msgid "Computing statistics..." msgstr "" #: src/dar_suite/dar_manager.cpp:866 msgid "" "Negative number or zero not allowed when moving an archive inside a database" msgstr "" #: src/dar_suite/dar_manager.cpp:873 msgid "Changing database information..." msgstr "" #: src/dar_suite/dar_manager.cpp:907 #, c-format msgid "Commands:\n" msgstr "" #: src/dar_suite/dar_manager.cpp:908 #, c-format msgid " -C creates an empty database\n" msgstr "" #: src/dar_suite/dar_manager.cpp:909 #, c-format msgid " -B specify the database to use (read or modify)\n" msgstr "" #: src/dar_suite/dar_manager.cpp:910 #, c-format msgid " -A add an archive to the database\n" msgstr "" #: src/dar_suite/dar_manager.cpp:911 #, c-format msgid "" " -l\t\t gives information about the archive compiled in the database\n" msgstr "" #: src/dar_suite/dar_manager.cpp:912 #, c-format msgid " -D delete an archive from the database\n" msgstr "" #: src/dar_suite/dar_manager.cpp:913 #, c-format msgid "" " -b \t change the basename to use for the give archive number\n" msgstr "" #: src/dar_suite/dar_manager.cpp:914 #, c-format msgid "" " -p \t change the path to use for the given archive number\n" msgstr "" #: src/dar_suite/dar_manager.cpp:915 #, c-format msgid " -o specify a list of option to always pass to dar\n" msgstr "" #: src/dar_suite/dar_manager.cpp:916 #, c-format msgid " -d specify the path to dar\n" msgstr "" #: src/dar_suite/dar_manager.cpp:917 #, c-format msgid " -r \t restores the given files\n" msgstr "" #: src/dar_suite/dar_manager.cpp:918 #, c-format msgid "" " -w \t only with -r, restores in state just before the given date\n" msgstr "" #: src/dar_suite/dar_manager.cpp:919 #, c-format msgid "" " \t date format: [[[year/]month]/day-]hour:minute[:second]\n" msgstr "" #: src/dar_suite/dar_manager.cpp:920 #, c-format msgid "" " -u \t list the most recent files contained in the given " "archive\n" msgstr "" #: src/dar_suite/dar_manager.cpp:921 #, c-format msgid " -f \t list the archives where the given file is present\n" msgstr "" #: src/dar_suite/dar_manager.cpp:922 #, c-format msgid " -s\t\t shows the number of most recent file by archive\n" msgstr "" #: src/dar_suite/dar_manager.cpp:923 #, c-format msgid " -m \t move an archive within a given database.\n" msgstr "" #: src/dar_suite/dar_manager.cpp:924 #, c-format msgid " -i\t\t user interactive mode\n" msgstr "" #: src/dar_suite/dar_manager.cpp:925 #, c-format msgid " -c\t\t check database for dates order\n" msgstr "" #: src/dar_suite/dar_manager.cpp:926 #, c-format msgid "" " -L execute on a given database a batch of action as defined " "by\n" msgstr "" #: src/dar_suite/dar_manager.cpp:927 #, c-format msgid "\t\t the provided file.\n" msgstr "" #: src/dar_suite/dar_manager.cpp:928 src/dar_suite/dar_xform.cpp:424 #, c-format msgid " -h\t\t displays this help information\n" msgstr "" #: src/dar_suite/dar_manager.cpp:929 #, c-format msgid " -V\t\t displays software version\n" msgstr "" #: src/dar_suite/dar_manager.cpp:931 #, c-format msgid "Options:\n" msgstr "" #: src/dar_suite/dar_manager.cpp:932 #, c-format msgid " -v\t\t display more information about what is going on\n" msgstr "" #: src/dar_suite/dar_manager.cpp:955 src/dar_suite/command_line.cpp:2535 #: src/dar_suite/dar_xform.cpp:459 #, c-format msgid "" " %s comes with ABSOLUTELY NO WARRANTY; for details\n" " type `%s -W'." msgstr "" #: src/dar_suite/dar_manager.cpp:957 src/dar_suite/command_line.cpp:2537 #: src/dar_suite/dar_xform.cpp:461 #, c-format msgid "" " type `%s -L | more'\n" " for details.\n" "\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1015 msgid "Corrupted database :" msgstr "" #: src/dar_suite/dar_manager.cpp:1067 #, c-format msgid "" "\n" "\n" "\t Dar Manager Database used [%s] : %S\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1067 msgid "Saved" msgstr "" #: src/dar_suite/dar_manager.cpp:1067 msgid "Not Saved" msgstr "" #: src/dar_suite/dar_manager.cpp:1069 #, c-format msgid "" "\t Pause each %d line of output\n" "\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1071 #, c-format msgid "" "\t No pause in output\n" "\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1072 #, c-format msgid " l : list database contents \t A : Add an archive\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1073 #, c-format msgid " u : list archive contents \t D : Remove an archive\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1074 #, c-format msgid " f : give file localization \t m : modify archive order\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1075 #, c-format msgid " p : modify path of archives \t b : modify basename of archives\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1076 #, c-format msgid " d : path to dar \t o : options to dar\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1077 #, c-format msgid " w : write changes to file \t s : database statistics\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1078 #, c-format msgid "" " a : Save as \t n : pause each 'n' line (zero for no pause)\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1079 #, c-format msgid "" " c : check date order\n" "\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1080 #, c-format msgid "" " q : quit\n" "\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1081 #, c-format msgid " Choice: " msgstr "" #: src/dar_suite/dar_manager.cpp:1098 msgid "Archive number: " msgstr "" #: src/dar_suite/dar_manager.cpp:1105 msgid "File to look for: " msgstr "" #: src/dar_suite/dar_manager.cpp:1109 msgid "Archive number to modify: " msgstr "" #: src/dar_suite/dar_manager.cpp:1113 #, c-format msgid "New basename for archive number %d: " msgstr "" #: src/dar_suite/dar_manager.cpp:1118 msgid "Path to dar (empty string to use the default from PATH variable): " msgstr "" #: src/dar_suite/dar_manager.cpp:1123 src/dar_suite/dar_manager.cpp:1129 #: src/dar_suite/dar_manager.cpp:1419 msgid "Compressing and writing back database to file..." msgstr "" #: src/dar_suite/dar_manager.cpp:1128 msgid "New database name: " msgstr "" #: src/dar_suite/dar_manager.cpp:1135 msgid "Archive basename (or extracted catalogue basename) to add: " msgstr "" #: src/dar_suite/dar_manager.cpp:1162 msgid "Archive number to remove: " msgstr "" #: src/dar_suite/dar_manager.cpp:1166 #, c-format msgid "Are you sure to remove archive number %d ?" msgstr "" #: src/dar_suite/dar_manager.cpp:1172 msgid "Archive number to move: " msgstr "" #: src/dar_suite/dar_manager.cpp:1174 msgid "In which position to insert this archive: " msgstr "" #: src/dar_suite/dar_manager.cpp:1183 msgid "Archive number who's path to modify: " msgstr "" #: src/dar_suite/dar_manager.cpp:1187 #, c-format msgid "New path to give to archive number %d: " msgstr "" #: src/dar_suite/dar_manager.cpp:1201 msgid "How much line to display at once: " msgstr "" #: src/dar_suite/dar_manager.cpp:1205 msgid "Checking file's dates ordering..." msgstr "" #: src/dar_suite/dar_manager.cpp:1213 src/dar_suite/dar_manager.cpp:1234 msgid "Database not saved, Do you really want to quit ?" msgstr "" #: src/dar_suite/dar_manager.cpp:1214 src/dar_suite/dar_manager.cpp:1235 #, c-format msgid "Continuing the action under process which is to exit... so we exit!" msgstr "" #: src/dar_suite/dar_manager.cpp:1223 #, c-format msgid "Unknown choice\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1249 #, c-format msgid "re-enabling all signal handlers and continuing\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1256 #, c-format msgid "Error performing the requested action: %S" msgstr "" #: src/dar_suite/dar_manager.cpp:1276 msgid "No problem found" msgstr "" #: src/dar_suite/dar_manager.cpp:1310 msgid "Opening and reading the batch file..." msgstr "" #: src/dar_suite/dar_manager.cpp:1334 #, c-format msgid "" "\n" "\tExecuting batch file line: %S\n" " " msgstr "" #: src/dar_suite/dar_manager.cpp:1358 #, c-format msgid "Syntax error in batch file: %S" msgstr "" #: src/dar_suite/dar_manager.cpp:1361 msgid "Syntax error in batch file: -C option not allowed" msgstr "" #: src/dar_suite/dar_manager.cpp:1364 msgid "Syntax error in batch file: -i option not allowed" msgstr "" #: src/dar_suite/dar_manager.cpp:1372 msgid "Aborting batch operation: " msgstr "" #: src/dar_suite/dar_manager.cpp:1383 #, c-format msgid "Enter each argument line by line, press return at the end\n" msgstr "" #: src/dar_suite/dar_manager.cpp:1384 #, c-format msgid "To terminate enter an empty line\n" msgstr "" #: src/dar_suite/line_tools.cpp:101 src/dar_suite/line_tools.cpp:119 msgid "Index out of range" msgstr "" #: src/dar_suite/line_tools.cpp:244 msgid "Syntax error in --retry-on-change argument: " msgstr "" #: src/dar_suite/line_tools.cpp:259 msgid "Unknown TLV record type" msgstr "" #: src/dar_suite/line_tools.cpp:300 src/dar_suite/line_tools.cpp:330 msgid "Unbalanced parenthesis in expression: " msgstr "" #: src/dar_suite/line_tools.cpp:398 #, c-format msgid "Unknown macro %%%d in user comment" msgstr "" #: src/dar_suite/line_tools.cpp:590 #, c-format msgid "Invalid number in string: %S" msgstr "" #: src/dar_suite/line_tools.cpp:815 msgid "unknown cryptographic algorithm: " msgstr "" #: src/dar_suite/line_tools.cpp:835 #, c-format msgid "" "| Signature Status| Key Status | Finger Print " "| Signature Date |" msgstr "" #: src/dar_suite/line_tools.cpp:1186 msgid "date before 1970 is not allowed" msgstr "" #: src/dar_suite/line_tools.cpp:1191 msgid "Incorrect month" msgstr "" #: src/dar_suite/line_tools.cpp:1196 msgid "Incorrect day of month" msgstr "" #: src/dar_suite/line_tools.cpp:1201 msgid "Incorrect hour" msgstr "" #: src/dar_suite/line_tools.cpp:1206 msgid "Incorrect minute" msgstr "" #: src/dar_suite/line_tools.cpp:1211 msgid "Incorrect second" msgstr "" #: src/dar_suite/line_tools.cpp:1215 msgid "Bad formatted date expression" msgstr "" #: src/dar_suite/line_tools.cpp:1330 msgid "Given date must be in the past" msgstr "" #: src/dar_suite/line_tools.cpp:1345 #, c-format msgid " Libz compression (gzip) : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1346 #, c-format msgid " Libbz2 compression (bzip2) : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1347 #, c-format msgid " Liblzo2 compression (lzo) : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1348 #, c-format msgid " Liblzma compression (xz) : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1349 #, c-format msgid " Strong encryption (libgcrypt): %s" msgstr "" #: src/dar_suite/line_tools.cpp:1350 #, c-format msgid " Public key ciphers (gpgme) : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1351 #, c-format msgid " Extended Attributes support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1352 #, c-format msgid " Large files support (> 2GB) : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1353 #, c-format msgid " ext2fs NODUMP flag support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1355 #, c-format msgid " Integer size used : unlimited" msgstr "" #: src/dar_suite/line_tools.cpp:1357 #, c-format msgid " Integer size used : %d bits" msgstr "" #: src/dar_suite/line_tools.cpp:1358 #, c-format msgid " Thread safe support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1359 #, c-format msgid " Furtive read mode support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1360 #, c-format msgid " Linux ext2/3/4 FSA support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1361 #, c-format msgid " Mac OS X HFS+ FSA support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1366 msgid "big" msgstr "" #: src/dar_suite/line_tools.cpp:1369 msgid "little" msgstr "" #: src/dar_suite/line_tools.cpp:1372 msgid "error!" msgstr "" #: src/dar_suite/line_tools.cpp:1377 #, c-format msgid " Detected system/CPU endian : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1378 #, c-format msgid " Posix fadvise support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1379 #, c-format msgid " Large dir. speed optimi. : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1384 #, c-format msgid " Timestamp read accuracy : %S" msgstr "" #: src/dar_suite/line_tools.cpp:1389 #, c-format msgid " Timestamp write accuracy : %S" msgstr "" #: src/dar_suite/line_tools.cpp:1390 #, c-format msgid " Restores dates of symlinks : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1395 #, c-format msgid " Multiple threads (libthreads): %s %s" msgstr "" #: src/dar_suite/line_tools.cpp:1396 #, c-format msgid " Delta compression support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1397 #, c-format msgid " Remote repository support : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1457 #, c-format msgid "" "Warning, %S seems more to be a slice name than a base name. Do you want to " "replace it by %S ?" msgstr "" #: src/dar_suite/line_tools.cpp:1462 #, c-format msgid "OK, keeping %S as basename" msgstr "" #: src/dar_suite/line_tools.cpp:1610 msgid "Error while fetching hostname: " msgstr "" #: src/dar_suite/line_tools.cpp:1677 msgid "unknown name give for delta signature block len function" msgstr "" #: src/dar_suite/line_tools.cpp:1751 src/dar_suite/line_tools.cpp:1759 #, c-format msgid "Error met while retrieving current time: %S" msgstr "" #: src/dar_suite/dar_suite.cpp:146 #, c-format msgid "" "We have linked with an incompatible version of libdar. Expecting version %d." "%d.x but having linked with version %d.%d.%d" msgstr "" #: src/dar_suite/dar_suite.cpp:158 src/python/pybind11_libdar.cpp:206 msgid "NOT YET IMPLEMENTED FEATURE has been used: " msgstr "" #: src/dar_suite/dar_suite.cpp:159 msgid "Please check documentation or upgrade your software if available" msgstr "" #: src/dar_suite/dar_suite.cpp:164 src/python/pybind11_libdar.cpp:210 msgid "SEEMS TO BE A HARDWARE PROBLEM: " msgstr "" #: src/dar_suite/dar_suite.cpp:165 msgid "Please check your hardware" msgstr "" #: src/dar_suite/dar_suite.cpp:170 src/python/pybind11_libdar.cpp:214 msgid "Lack of SECURED memory to achieve the operation, aborting operation" msgstr "" #: src/dar_suite/dar_suite.cpp:175 src/dar_suite/dar_suite.cpp:180 #: src/python/pybind11_libdar.cpp:218 src/python/pybind11_libdar.cpp:222 msgid "Lack of memory to achieve the operation, aborting operation" msgstr "" #: src/dar_suite/dar_suite.cpp:185 src/dar_suite/dar_suite.cpp:230 #: src/python/pybind11_libdar.cpp:226 msgid "FATAL error, aborting operation: " msgstr "" #: src/dar_suite/dar_suite.cpp:190 src/python/pybind11_libdar.cpp:230 msgid "Aborting program. User refused to continue while asking: " msgstr "" #: src/dar_suite/dar_suite.cpp:195 src/python/pybind11_libdar.cpp:234 msgid "Program has been aborted for the following reason: " msgstr "" #: src/dar_suite/dar_suite.cpp:205 src/python/pybind11_libdar.cpp:242 msgid "Aborting program. An error occurred concerning user command execution: " msgstr "" #: src/dar_suite/dar_suite.cpp:210 src/python/pybind11_libdar.cpp:246 msgid "Aborting program. An error occurred while calling libdar: " msgstr "" #: src/dar_suite/dar_suite.cpp:215 src/dar_suite/dar_suite.cpp:220 #: src/python/pybind11_libdar.cpp:250 src/python/pybind11_libdar.cpp:254 msgid "Aborting program. " msgstr "" #: src/dar_suite/dar_suite.cpp:225 src/python/pybind11_libdar.cpp:258 msgid "" "Aborting program. The requested operation needs a feature that has been " "disabled at compilation time: " msgstr "" #: src/dar_suite/dar_suite.cpp:235 src/python/pybind11_libdar.cpp:266 msgid "FATAL error during network communication, aborting operation: " msgstr "" #: src/dar_suite/dar_suite.cpp:241 src/dar_suite/dar_suite.cpp:261 msgid "INTERNAL ERROR, PLEASE REPORT THE PREVIOUS OUTPUT TO MAINTAINER" msgstr "" #: src/dar_suite/dar_suite.cpp:260 msgid "CAUGHT A NON (LIB)DAR EXCEPTION" msgstr "" #: src/dar_suite/dar_suite.cpp:268 msgid "" "SANITY CHECK: AT LEAST ONE THREAD_CANCELLATION OBJECT HAS NOT BEEN DESTROYED " "AND REMAINS IN MEMORY WHILE THE PROGRAM REACHED ITS END" msgstr "" #: src/dar_suite/dar_suite.cpp:284 #, c-format msgid "Long options support : %s\n" msgstr "" #: src/dar_suite/dar_suite.cpp:300 #, c-format msgid "Received signal: %s" msgstr "" #: src/dar_suite/dar_suite.cpp:302 #, c-format msgid "Received signal: %d" msgstr "" #: src/dar_suite/dar_suite.cpp:308 msgid "Archive fast termination engaged" msgstr "" #: src/dar_suite/dar_suite.cpp:312 msgid "Archive delayed termination engaged" msgstr "" #: src/dar_suite/dar_suite.cpp:316 msgid "" "Disabling signal handler, the next time this signal is received the program " "will abort immediately" msgstr "" #: src/dar_suite/dar_suite.cpp:320 msgid "" "Cannot cleanly abort the operation, thread-safe support is missing, will " "thus abruptly stop the program, generated archive may be unusable" msgstr "" #: src/dar_suite/dar.cpp:267 msgid "Considering the (first) archive of reference:" msgstr "" #: src/dar_suite/dar.cpp:269 msgid "" "Sequential reading of the archive of reference is not possible when delta " "difference is requested, you need to read the archive of reference in direct " "access mode (default mode)" msgstr "" #: src/dar_suite/dar.cpp:292 src/dar_suite/dar.cpp:343 msgid "" "Using sequential reading mode for archive source is not possible for merging " "operation" msgstr "" #: src/dar_suite/dar.cpp:320 msgid "Considering the second (alias auxiliary) archive of reference:" msgstr "" #: src/dar_suite/dar.cpp:401 src/dar_suite/dar.cpp:504 #: src/dar_suite/dar.cpp:562 src/dar_suite/dar.cpp:642 #: src/dar_suite/dar.cpp:733 msgid "Archive signature is only possible with gnupg encryption" msgstr "" #: src/dar_suite/dar.cpp:594 msgid "" "Making room in memory (releasing memory used by archive of reference)..." msgstr "" #: src/dar_suite/dar.cpp:613 msgid "Now performing on-fly isolation..." msgstr "" #: src/dar_suite/dar.cpp:871 msgid "All files asked could not be restored" msgstr "" #: src/dar_suite/dar.cpp:954 msgid "Some file comparisons failed" msgstr "" #: src/dar_suite/dar.cpp:1029 msgid "" "Some files are corrupted in the archive and it will not be possible to " "restore them" msgstr "" #: src/dar_suite/dar.cpp:1082 msgid "Continue listing archive contents?" msgstr "" #: src/dar_suite/dar.cpp:1104 src/dar_suite/dar.cpp:1109 msgid "Final memory cleanup..." msgstr "" #: src/dar_suite/dar.cpp:1131 #, c-format msgid " %i inode(s) saved\n" msgstr "" #: src/dar_suite/dar.cpp:1132 #, c-format msgid " including %i hard link(s) treated\n" msgstr "" #: src/dar_suite/dar.cpp:1133 #, c-format msgid "" " %i inode(s) changed at the moment of the backup and could not be saved " "properly\n" msgstr "" #: src/dar_suite/dar.cpp:1134 #, c-format msgid " %i byte(s) have been wasted in the archive to resave changing files" msgstr "" #: src/dar_suite/dar.cpp:1135 #, c-format msgid " %i inode(s) with only metadata changed\n" msgstr "" #: src/dar_suite/dar.cpp:1136 #, c-format msgid " %i inode(s) not saved (no inode/file change)\n" msgstr "" #: src/dar_suite/dar.cpp:1137 #, c-format msgid " %i inode(s) failed to be saved (filesystem error)\n" msgstr "" #: src/dar_suite/dar.cpp:1138 src/dar_suite/dar.cpp:1168 #: src/dar_suite/dar.cpp:1228 #, c-format msgid " %i inode(s) ignored (excluded by filters)\n" msgstr "" #: src/dar_suite/dar.cpp:1139 #, c-format msgid " %i inode(s) recorded as deleted from reference backup\n" msgstr "" #: src/dar_suite/dar.cpp:1141 src/dar_suite/dar.cpp:1172 #: src/dar_suite/dar.cpp:1236 #, c-format msgid " Total number of inode(s) considered: %i\n" msgstr "" #: src/dar_suite/dar.cpp:1144 src/dar_suite/dar.cpp:1232 #, c-format msgid " EA saved for %i inode(s)\n" msgstr "" #: src/dar_suite/dar.cpp:1146 src/dar_suite/dar.cpp:1234 #, c-format msgid " FSA saved for %i inode(s)\n" msgstr "" #: src/dar_suite/dar.cpp:1164 #, c-format msgid " %i inode(s) restored\n" msgstr "" #: src/dar_suite/dar.cpp:1165 #, c-format msgid " including %i hard link(s)\n" msgstr "" #: src/dar_suite/dar.cpp:1166 #, c-format msgid " %i inode(s) not restored (not saved in archive)\n" msgstr "" #: src/dar_suite/dar.cpp:1167 #, c-format msgid " %i inode(s) not restored (overwriting policy decision)\n" msgstr "" #: src/dar_suite/dar.cpp:1169 #, c-format msgid " %i inode(s) failed to restore (filesystem error)\n" msgstr "" #: src/dar_suite/dar.cpp:1170 #, c-format msgid " %i inode(s) deleted\n" msgstr "" #: src/dar_suite/dar.cpp:1175 #, c-format msgid " EA restored for %i inode(s)\n" msgstr "" #: src/dar_suite/dar.cpp:1177 #, c-format msgid " FSA restored for %i inode(s)\n" msgstr "" #: src/dar_suite/dar.cpp:1189 src/dar_suite/dar.cpp:1206 #, c-format msgid " %i item(s) treated\n" msgstr "" #: src/dar_suite/dar.cpp:1190 #, c-format msgid " %i item(s) do not match those on filesystem\n" msgstr "" #: src/dar_suite/dar.cpp:1191 src/dar_suite/dar.cpp:1208 #, c-format msgid " %i item(s) ignored (excluded by filters)\n" msgstr "" #: src/dar_suite/dar.cpp:1193 src/dar_suite/dar.cpp:1210 #, c-format msgid " Total number of items considered: %i\n" msgstr "" #: src/dar_suite/dar.cpp:1207 #, c-format msgid " %i item(s) with error\n" msgstr "" #: src/dar_suite/dar.cpp:1226 #, c-format msgid " %i inode(s) added to archive\n" msgstr "" #: src/dar_suite/dar.cpp:1227 #, c-format msgid " with %i hard link(s) recorded\n" msgstr "" #: src/dar_suite/dar.cpp:1229 #, c-format msgid " %i inode(s) recorded as deleted\n" msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:115 msgid "Missing } in conditional statement: " msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:119 msgid "Missing [ after } in conditional statement: " msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:122 msgid "Missing ] in conditional statement: " msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:200 #, c-format msgid "Unknown policy for data '%c' in expression %S" msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:233 #, c-format msgid "Unknown policy for EA '%c' in expression %S" msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:243 msgid "Unknown expression in overwriting policy: " msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:426 msgid "Unknown atomic operator, or atomic not allowed with an argument: " msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:508 msgid "Unknown character found while parsing conditional string: " msgstr "" #: src/dar_suite/crit_action_cmd_line.cpp:517 msgid "Unknown expression found while parsing conditional string: " msgstr "" #: src/dar_suite/hide_file.cpp:32 msgid "hide_file cannot be initialized with write-only file" msgstr "" #: src/dar_suite/command_line.cpp:364 msgid "User target found on command line or included file(s):" msgstr "" #: src/dar_suite/command_line.cpp:373 msgid "No user target found on command line" msgstr "" #: src/dar_suite/command_line.cpp:391 #, c-format msgid "Given user target(s) could not be found: %S" msgstr "" #: src/dar_suite/command_line.cpp:395 #, c-format msgid "Missing -c -x -d -t -l -C -+ option, see `%S -h' for help" msgstr "" #: src/dar_suite/command_line.cpp:397 msgid "" "Slicing (-s option), is not compatible with archive on standard output (\"-" "\" as filename)" msgstr "" #: src/dar_suite/command_line.cpp:418 msgid "-af option is only available with -c" msgstr "" #: src/dar_suite/command_line.cpp:420 msgid "-A option is not available with -l" msgstr "" #: src/dar_suite/command_line.cpp:422 msgid "-T option is only available with -l" msgstr "" #: src/dar_suite/command_line.cpp:424 msgid "with -C option, -A option is mandatory" msgstr "" #: src/dar_suite/command_line.cpp:426 msgid "with -+ option, -A option is mandatory" msgstr "" #: src/dar_suite/command_line.cpp:428 msgid "-wa is only useful with -x option" msgstr "" #: src/dar_suite/command_line.cpp:431 msgid "-o is mandatory when using \"-A -\" with \"-c -\" \"-C -\" or \"-+ -\"" msgstr "" #: src/dar_suite/command_line.cpp:441 msgid "-z option needs only to be used with -c -C or -+ options" msgstr "" #: src/dar_suite/command_line.cpp:443 msgid "-S option requires the use of -s" msgstr "" #: src/dar_suite/command_line.cpp:445 msgid "ignoring -O option, as it is useless in this situation" msgstr "" #: src/dar_suite/command_line.cpp:448 msgid "-F is only useful with -A option, for the archive of reference" msgstr "" #: src/dar_suite/command_line.cpp:451 msgid "-J is only useful with -A option, for the archive of reference" msgstr "" #: src/dar_suite/command_line.cpp:454 msgid "-f in only available with -x option, ignoring" msgstr "" #: src/dar_suite/command_line.cpp:456 msgid "-m is only useful with -c" msgstr "" #: src/dar_suite/command_line.cpp:462 msgid "-H is only useful with -A option when making a backup" msgstr "" #: src/dar_suite/command_line.cpp:468 msgid "-H is only useful with -r option when extracting" msgstr "" #: src/dar_suite/command_line.cpp:472 msgid "-H is only useful with -c, -d or -x" msgstr "" #: src/dar_suite/command_line.cpp:476 msgid "-as is only available with -l, ignoring -as option" msgstr "" #: src/dar_suite/command_line.cpp:478 msgid "-e is only useful with -x, -c or -+ options" msgstr "" #: src/dar_suite/command_line.cpp:480 msgid "-ac is only useful with -c or -d" msgstr "" #: src/dar_suite/command_line.cpp:482 msgid "-M is only useful with -c" msgstr "" #: src/dar_suite/command_line.cpp:484 msgid "The snapshot backup (-A +) is only available with -c option, ignoring" msgstr "" #: src/dar_suite/command_line.cpp:486 msgid "" "The Cache Directory Tagging Standard is only useful while performing a " "backup, ignoring it here" msgstr "" #: src/dar_suite/command_line.cpp:489 msgid "-@ is only available with -+ and -c options" msgstr "" #: src/dar_suite/command_line.cpp:491 msgid "-$ is only available with -+ option and -c options" msgstr "" #: src/dar_suite/command_line.cpp:493 msgid "-~ is only available with -+ and -c options" msgstr "" #: src/dar_suite/command_line.cpp:495 #, c-format msgid "-%% is only available with -+ option" msgstr "" #: src/dar_suite/command_line.cpp:498 msgid "" "-$ is only useful with -@ option, for the auxiliary archive of reference" msgstr "" #: src/dar_suite/command_line.cpp:500 #, c-format msgid "" "-%% is only useful with -@ option, for the auxiliary archive of reference" msgstr "" #: src/dar_suite/command_line.cpp:502 msgid "" "-~ is only useful with -@ option, for the auxiliary archive of reference" msgstr "" #: src/dar_suite/command_line.cpp:505 msgid "-ak is only available while merging (operation -+), ignoring -ak" msgstr "" #: src/dar_suite/command_line.cpp:510 msgid "" "Compression option (-z option) is useless and ignored when using -ak option" msgstr "" #: src/dar_suite/command_line.cpp:517 msgid "" "--sparse-file-min-size only available while saving or merging archives, " "ignoring" msgstr "" #: src/dar_suite/command_line.cpp:520 msgid "" "To use --sparse-file-min-size while merging archive, you need to use -ah " "option too, please check man page for details" msgstr "" #: src/dar_suite/command_line.cpp:526 msgid "-k option is only useful with -x option" msgstr "" #: src/dar_suite/command_line.cpp:529 msgid "-konly and -kignore cannot be used at the same time" msgstr "" #: src/dar_suite/command_line.cpp:532 msgid "-p and -Q options are mutually exclusives" msgstr "" #: src/dar_suite/command_line.cpp:535 msgid "-vf is only useful with -c option" msgstr "" #: src/dar_suite/command_line.cpp:540 msgid "-A option is required with -y option" msgstr "" #: src/dar_suite/command_line.cpp:542 msgid "'-A +' is not possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:544 msgid "-af is not possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:546 msgid "-k option is not possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:548 msgid "" "-X, -I, -P, -g, -], -[ and any other file selection relative commands are " "not possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:550 msgid "-D option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:552 msgid "-r option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:554 msgid "" "-u, -U, -P, -g, -], -[ and any other EA selection relative commands are not " "possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:556 msgid "-O option is not possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:558 msgid "-H option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:560 msgid "-as option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:562 msgid "-ae option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:564 msgid "-ad option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:566 msgid "-asecu option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:568 msgid "-ai option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:570 msgid "--alter=do-not-compare-symlink-mtime option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:572 msgid "-M option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:574 msgid "-@ option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:576 msgid "-/ option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:578 msgid "-< and -> options are useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:580 msgid "-5 option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:582 msgid "-8 option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:584 msgid "-{ and -} options are useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:586 msgid "-\\ option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:588 msgid "compression (-z option) cannot be changed with -y option" msgstr "" #: src/dar_suite/command_line.cpp:590 msgid "-ak option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:592 msgid "-ah option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:594 msgid "--sequential-read is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:596 msgid "--alter=tape-marks is impossible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:641 msgid "" "-Y and -Z are only useful with compression (-z option), ignoring any -Y and -" "Z option" msgstr "" #: src/dar_suite/command_line.cpp:643 msgid "-m is only useful with compression (-z option), ignoring -m" msgstr "" #: src/dar_suite/command_line.cpp:693 msgid "" "-= option is valid only while saving files, thus in conjunction with -c " "option, ignoring" msgstr "" #: src/dar_suite/command_line.cpp:695 msgid "" "-= option will be ignored as it is useless if you do not specify to which " "files or directories this backup hook is to be applied, thanks to -< and -> " "options. See man page for more details." msgstr "" #: src/dar_suite/command_line.cpp:701 msgid "" "backup hook feature (-<, -> or -= options) is only available when saving " "files, ignoring" msgstr "" #: src/dar_suite/command_line.cpp:769 msgid "-/ option is only useful with -+ option, ignoring" msgstr "" #: src/dar_suite/command_line.cpp:780 msgid "" "-. option is only useful when merging, creating or isolating an archive, " "ignoring" msgstr "" #: src/dar_suite/command_line.cpp:785 #, c-format msgid "" "The following user comment will be placed in clear text in the archive: %S" msgstr "" #: src/dar_suite/command_line.cpp:804 msgid "" "Warning: libdar multi-threading is an experimental and unsupported feature, " "read man page about -G option for more information" msgstr "" #: src/dar_suite/command_line.cpp:809 msgid "Parse error: " msgstr "" #: src/dar_suite/command_line.cpp:854 msgid " Only one option of -c -d -t -l -C -x or -+ is allowed" msgstr "" #: src/dar_suite/command_line.cpp:907 msgid "Only one -A option is allowed" msgstr "" #: src/dar_suite/command_line.cpp:934 msgid "Error while parsing -A argument as a date: " msgstr "" #: src/dar_suite/command_line.cpp:1015 msgid "Choose only one compression algorithm" msgstr "" #: src/dar_suite/command_line.cpp:1021 msgid "-w option is useless with -n" msgstr "" #: src/dar_suite/command_line.cpp:1033 msgid "Unknown argument given to -w: " msgstr "" #: src/dar_suite/command_line.cpp:1053 src/dar_suite/command_line.cpp:1060 #: src/dar_suite/command_line.cpp:1067 msgid "\"-k\" (or \"-kignore\") and \"-konly\" are not compatible" msgstr "" #: src/dar_suite/command_line.cpp:1071 #, c-format msgid "Unknown argument given to -k : %s" msgstr "" #: src/dar_suite/command_line.cpp:1075 msgid "Only one -R option is allowed" msgstr "" #: src/dar_suite/command_line.cpp:1102 src/dar_suite/dar_xform.cpp:228 msgid "Only one -s option is allowed" msgstr "" #: src/dar_suite/command_line.cpp:1127 src/dar_suite/command_line.cpp:1145 #: src/dar_suite/dar_xform.cpp:253 src/dar_suite/dar_xform.cpp:271 msgid "Only one -S option is allowed" msgstr "" #: src/dar_suite/command_line.cpp:1135 msgid "" "Giving to -S option the same value as the one given to -s option is useless" msgstr "" #: src/dar_suite/command_line.cpp:1210 msgid "-r is useless with -n" msgstr "" #: src/dar_suite/command_line.cpp:1291 msgid "Invalid argument given to -T option, expecting [:]" msgstr "" #: src/dar_suite/command_line.cpp:1307 #, c-format msgid "Invalid hash algorithm provided to -T opton: %s" msgstr "" #: src/dar_suite/command_line.cpp:1401 #, c-format msgid "" "File inclusion loop detected. The file %s includes itself directly or " "through other files (-B option)" msgstr "" #: src/dar_suite/command_line.cpp:1418 #, c-format msgid "Error reading included file (%s): " msgstr "" #: src/dar_suite/command_line.cpp:1423 #, c-format msgid "Error in included file (%s): " msgstr "" #: src/dar_suite/command_line.cpp:1439 src/dar_suite/command_line.cpp:2841 #: src/dar_suite/command_line.cpp:2923 #, c-format msgid "In included file %S: " msgstr "" #: src/dar_suite/command_line.cpp:1482 #, c-format msgid "" "%d is the default value for -m, no need to specify it on command line, " "ignoring" msgstr "" #: src/dar_suite/command_line.cpp:1499 msgid "" "--nodump feature has not been activated at compilation time, it is thus not " "available" msgstr "" #: src/dar_suite/command_line.cpp:1515 msgid "Argument given to -H is not a positive integer number" msgstr "" #: src/dar_suite/command_line.cpp:1521 src/dar_suite/dar_xform.cpp:318 msgid "-a option requires an argument" msgstr "" #: src/dar_suite/command_line.cpp:1568 msgid "" "-ak option need not be specified more than once, ignoring extra -ak options" msgstr "" #: src/dar_suite/command_line.cpp:1574 msgid "-af must be present before -A option not after!" msgstr "" #: src/dar_suite/command_line.cpp:1576 msgid "" "-af option need not be specified more than once, ignoring extra -af options" msgstr "" #: src/dar_suite/command_line.cpp:1614 #, c-format msgid "Unknown argument given to -a : %s" msgstr "" #: src/dar_suite/command_line.cpp:1631 msgid "libthreadar required for multithreaded execution" msgstr "" #: src/dar_suite/command_line.cpp:1659 msgid "Only one -@ option is allowed" msgstr "" #: src/dar_suite/command_line.cpp:1737 msgid "Syntax error in overwriting policy: " msgstr "" #: src/dar_suite/command_line.cpp:1770 #, c-format msgid "" "%d is the default value for --sparse-file-min-size, no need to specify it on " "command line, ignoring" msgstr "" #: src/dar_suite/command_line.cpp:1791 #, c-format msgid "Unknown argument given to -2 : %s" msgstr "" #: src/dar_suite/command_line.cpp:1828 src/dar_suite/dar_xform.cpp:336 msgid "Missing argument to --hash" msgstr "" #: src/dar_suite/command_line.cpp:1830 src/dar_suite/dar_xform.cpp:343 msgid "Unknown parameter given to --hash option: " msgstr "" #: src/dar_suite/command_line.cpp:1843 msgid "Error while parsing --min-digits option: " msgstr "" #: src/dar_suite/command_line.cpp:1849 msgid "Missing argument to --backup-hook-execute" msgstr "" #: src/dar_suite/command_line.cpp:1920 msgid "Missing argument to --delta" msgstr "" #: src/dar_suite/command_line.cpp:1942 msgid "missing function name argument in string" msgstr "" #: src/dar_suite/command_line.cpp:1948 msgid "missing multiplier argument in string" msgstr "" #: src/dar_suite/command_line.cpp:1966 src/dar_suite/command_line.cpp:1977 msgid "too large value provided for the min block size" msgstr "" #: src/dar_suite/command_line.cpp:1982 msgid "unexpected extra argument in string" msgstr "" #: src/dar_suite/command_line.cpp:1988 msgid "Unknown parameter given to --delta option: " msgstr "" #: src/dar_suite/command_line.cpp:2035 msgid "Unknown parameter given to --modified-data-detection option: " msgstr "" #: src/dar_suite/command_line.cpp:2040 src/dar_suite/command_line.cpp:2042 #, c-format msgid "Unknown option -%c" msgstr "" #: src/dar_suite/command_line.cpp:2059 #, c-format msgid "" "usage: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] " "[options...]\n" msgstr "" #: src/dar_suite/command_line.cpp:2063 #, c-format msgid "Commands are:\n" msgstr "" #: src/dar_suite/command_line.cpp:2064 #, c-format msgid " -c creates an archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2065 #, c-format msgid " -x extracts files from the archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2066 #, c-format msgid " -d compares the archive with the existing filesystem\n" msgstr "" #: src/dar_suite/command_line.cpp:2067 #, c-format msgid " -t tests the archive integrity\n" msgstr "" #: src/dar_suite/command_line.cpp:2068 #, c-format msgid " -l lists the contents of the archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2069 #, c-format msgid " -C isolates the catalogue from an archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2070 #, c-format msgid " -+ merge two archives / create a sub archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2071 #, c-format msgid " -y repair a truncated archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2073 #, c-format msgid " -h displays this help information\n" msgstr "" #: src/dar_suite/command_line.cpp:2074 #, c-format msgid " -V displays version information\n" msgstr "" #: src/dar_suite/command_line.cpp:2077 #, c-format msgid " -v[s|t|d|m|f|a] verbose output\n" msgstr "" #: src/dar_suite/command_line.cpp:2078 #, c-format msgid " -q\t\t suppress final statistics report\n" msgstr "" #: src/dar_suite/command_line.cpp:2079 #, c-format msgid " -vs\t\t display skipped files\n" msgstr "" #: src/dar_suite/command_line.cpp:2080 #, c-format msgid " -R \t filesystem root directory (current dir by default)\n" msgstr "" #: src/dar_suite/command_line.cpp:2081 #, c-format msgid "" " -X \t files to exclude from the operation (none by default)\n" msgstr "" #: src/dar_suite/command_line.cpp:2082 #, c-format msgid " -I \t files to include in the operation (all by default)\n" msgstr "" #: src/dar_suite/command_line.cpp:2083 #, c-format msgid " -P \t subdirectory to exclude from the operation\n" msgstr "" #: src/dar_suite/command_line.cpp:2084 #, c-format msgid " -g \t subdirectory to include in the operation\n" msgstr "" #: src/dar_suite/command_line.cpp:2085 #, c-format msgid " -[ filename contains a list of files to include\n" msgstr "" #: src/dar_suite/command_line.cpp:2086 #, c-format msgid " -] \t filename contains a list of files to exclude\n" msgstr "" #: src/dar_suite/command_line.cpp:2087 src/dar_suite/dar_xform.cpp:429 #, c-format msgid " -n\t\t don't overwrite files\n" msgstr "" #: src/dar_suite/command_line.cpp:2088 src/dar_suite/dar_xform.cpp:430 #, c-format msgid " -w\t\t don't warn before overwriting files\n" msgstr "" #: src/dar_suite/command_line.cpp:2089 #, c-format msgid " -wa\t\t don't warn before overwriting and removing files\n" msgstr "" #: src/dar_suite/command_line.cpp:2090 src/dar_suite/dar_xform.cpp:431 #, c-format msgid " -b\t\t ring the terminal bell when user action is required\n" msgstr "" #: src/dar_suite/command_line.cpp:2091 #, c-format msgid "" " -O[ignore-owner | mtime | inode-type] do not consider user and group\n" msgstr "" #: src/dar_suite/command_line.cpp:2092 #, c-format msgid "\t\t ownership\n" msgstr "" #: src/dar_suite/command_line.cpp:2093 #, c-format msgid " -H [N]\t ignore shift in dates of an exact number of hours\n" msgstr "" #: src/dar_suite/command_line.cpp:2094 #, c-format msgid " -E \t command to execute between slices\n" msgstr "" #: src/dar_suite/command_line.cpp:2095 #, c-format msgid " -F \t same as -E but for the archive of reference\n" msgstr "" #: src/dar_suite/command_line.cpp:2096 #, c-format msgid " -u \t mask to ignore certain EA\n" msgstr "" #: src/dar_suite/command_line.cpp:2097 #, c-format msgid " -U \t mask to allow certain EA\n" msgstr "" #: src/dar_suite/command_line.cpp:2098 #, c-format msgid " -K \t use as key to encrypt/decrypt\n" msgstr "" #: src/dar_suite/command_line.cpp:2099 #, c-format msgid "" " -J \t same as -K but it does concern the archive of reference\n" msgstr "" #: src/dar_suite/command_line.cpp:2100 #, c-format msgid " -# encryption block size\n" msgstr "" #: src/dar_suite/command_line.cpp:2101 #, c-format msgid " -* same as -# but for archive of reference\n" msgstr "" #: src/dar_suite/command_line.cpp:2102 #, c-format msgid " -B read options from given file\n" msgstr "" #: src/dar_suite/command_line.cpp:2103 #, c-format msgid " -N\t\t do not read ~/.darrc nor /etc/darrc configuration file\n" msgstr "" #: src/dar_suite/command_line.cpp:2104 #, c-format msgid " -e\t\t dry run, fake execution, nothing is produced\n" msgstr "" #: src/dar_suite/command_line.cpp:2105 #, c-format msgid " -Q\t\t suppress the initial warning when not launched from a tty\n" msgstr "" #: src/dar_suite/command_line.cpp:2106 #, c-format msgid " -aa\t\t do not try to preserve atime of file open for reading.\n" msgstr "" #: src/dar_suite/command_line.cpp:2107 #, c-format msgid " -ac\t\t do not try to preserve ctime (default behavior).\n" msgstr "" #: src/dar_suite/command_line.cpp:2108 #, c-format msgid " -am\t\t set ordered mode for all filters\n" msgstr "" #: src/dar_suite/command_line.cpp:2109 #, c-format msgid " -an\t\t the masks that follow are now case insensitive\n" msgstr "" #: src/dar_suite/command_line.cpp:2110 #, c-format msgid " -acase\t the masks that follow are now case sensitive\n" msgstr "" #: src/dar_suite/command_line.cpp:2111 #, c-format msgid " -ar\t\t set the following masks to be regex expressions\n" msgstr "" #: src/dar_suite/command_line.cpp:2112 #, c-format msgid " -ag\t\t set the following masks to be glob expressions\n" msgstr "" #: src/dar_suite/command_line.cpp:2114 #, c-format msgid "" "Saving/Isolation/merging/repairing options (to use with -c, -C, -+ or -y):\n" msgstr "" #: src/dar_suite/command_line.cpp:2115 #, c-format msgid " -A [path/] archive to take as reference\n" msgstr "" #: src/dar_suite/command_line.cpp:2116 #, c-format msgid " -@ [path/] auxiliary archive of reference for merging\n" msgstr "" #: src/dar_suite/command_line.cpp:2117 #, c-format msgid " -$ \t encryption key for auxiliary archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2118 #, c-format msgid " -~ \t command between slices of the auxiliary archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2119 #, c-format msgid " -z [[algo:]level]\t compress data in archive. -z = -z9 = -zgzip:9\n" msgstr "" #: src/dar_suite/command_line.cpp:2120 #, c-format msgid "" " Available algo: gzip,bzip2,lzo,xz. Exemples: -zlzo -zxz:5 -z1 -z\n" msgstr "" #: src/dar_suite/command_line.cpp:2121 src/dar_suite/dar_xform.cpp:426 #, c-format msgid "" " -s split the archive in several files of size \n" msgstr "" #: src/dar_suite/command_line.cpp:2122 #, c-format msgid " -S first file size (if different from following ones)\n" msgstr "" #: src/dar_suite/command_line.cpp:2123 #, c-format msgid " -aSI \t slice size suffixes k, M, T, G, etc. are powers of 10\n" msgstr "" #: src/dar_suite/command_line.cpp:2124 #, c-format msgid " -abinary\t slice size suffixes k, M, T, G, etc. are powers of 2\n" msgstr "" #: src/dar_suite/command_line.cpp:2125 src/dar_suite/dar_xform.cpp:428 #, c-format msgid " -p\t\t pauses before writing to a new file\n" msgstr "" #: src/dar_suite/command_line.cpp:2126 #, c-format msgid " -D\t\t excluded directories are stored as empty directories\n" msgstr "" #: src/dar_suite/command_line.cpp:2127 #, c-format msgid " -Z \t do not compress the matching filenames\n" msgstr "" #: src/dar_suite/command_line.cpp:2128 #, c-format msgid " -Y \t do only compress the matching filenames\n" msgstr "" #: src/dar_suite/command_line.cpp:2129 #, c-format msgid " -m \t do not compress file smaller than \n" msgstr "" #: src/dar_suite/command_line.cpp:2130 #, c-format msgid " --nodump\t do not backup, files having the nodump 'd' flag set\n" msgstr "" #: src/dar_suite/command_line.cpp:2131 #, c-format msgid "" " -@ [path/] Do on-fly catalogue isolation of the resulting " "archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2132 #, c-format msgid " -M\t\t stay in the same filesystem while scanning directories\n" msgstr "" #: src/dar_suite/command_line.cpp:2133 #, c-format msgid " -,\t\t ignore directories that follow the Directory Tagging\n" msgstr "" #: src/dar_suite/command_line.cpp:2134 #, c-format msgid "\t\t Standard\n" msgstr "" #: src/dar_suite/command_line.cpp:2135 #, c-format msgid "" " -/ \t which way dar can overwrite files at archive merging or\n" msgstr "" #: src/dar_suite/command_line.cpp:2136 #, c-format msgid "\t\t extraction time\n" msgstr "" #: src/dar_suite/command_line.cpp:2137 src/dar_suite/dar_xform.cpp:436 #, c-format msgid " -^ \t permission[:user[:group]] of created slices\n" msgstr "" #: src/dar_suite/command_line.cpp:2138 #, c-format msgid "" " -8 sig\t add delta signature to perform binary delta if used as ref." msgstr "" #: src/dar_suite/command_line.cpp:2140 #, c-format msgid "Restoring options (to use with -x) :\n" msgstr "" #: src/dar_suite/command_line.cpp:2141 #, c-format msgid " -k\t\t do not remove files destroyed since the reference backup\n" msgstr "" #: src/dar_suite/command_line.cpp:2142 #, c-format msgid " -r\t\t do not restore file older than those on filesystem\n" msgstr "" #: src/dar_suite/command_line.cpp:2143 #, c-format msgid " -f\t\t do not restore directory structure\n" msgstr "" #: src/dar_suite/command_line.cpp:2145 #, c-format msgid "Reading options (to use with -x, -d, -t, -l, -A)\n" msgstr "" #: src/dar_suite/command_line.cpp:2146 #, c-format msgid "" " -i pipe to use instead of std input to read data from " "dar_slave\n" msgstr "" #: src/dar_suite/command_line.cpp:2147 #, c-format msgid "" " -o pipe to use instead of std output to orders dar_slave\n" msgstr "" #: src/dar_suite/command_line.cpp:2149 #, c-format msgid "Listing options (to use with -l):\n" msgstr "" #: src/dar_suite/command_line.cpp:2150 #, c-format msgid " -T\t\t tree output format\n" msgstr "" #: src/dar_suite/command_line.cpp:2151 #, c-format msgid " -as\t\t only list files saved in the archive\n" msgstr "" #: src/dar_suite/command_line.cpp:2152 #, c-format msgid "" "\n" "\n" msgstr "" #: src/dar_suite/command_line.cpp:2153 #, c-format msgid "" "Type \"man dar\" for more details and for all other available options.\n" msgstr "" #: src/dar_suite/command_line.cpp:2529 #, c-format msgid " Using libdar %u.%u.%u built with compilation time options:" msgstr "" #: src/dar_suite/command_line.cpp:2530 #, c-format msgid " Using libdar %u.%u built with compilation time options:" msgstr "" #: src/dar_suite/command_line.cpp:2753 #, c-format msgid "Arguments read from %S :" msgstr "" #: src/dar_suite/command_line.cpp:2875 #, c-format msgid "Failed reading %S: " msgstr "" #: src/dar_suite/command_line.cpp:2956 #, c-format msgid "Warning: Failed reading %S: " msgstr "" #: src/dar_suite/command_line.cpp:3246 #, c-format msgid "" "%s does not name a compression \"[algorithm][:][level]\" , like for examples " "\"gzip\", \"lzo\", \"bzip2\", \"lzo:3\", \"gzip:2\", \"8\" or \"1\". Please " "review the man page about -z option" msgstr "" #: src/dar_suite/command_line.cpp:3265 msgid "Compression level must be between 1 and 9, included" msgstr "" #: src/dar_suite/command_line.cpp:3294 msgid "unknown FSA family: " msgstr "" #: src/dar_suite/command_line.cpp:3326 #, c-format msgid "" "User target named \"%s\" is not allowed (reserved word for conditional " "syntax)" msgstr "" #: src/dar_suite/dar_xform.cpp:230 msgid "Missing argument to -s" msgstr "" #: src/dar_suite/dar_xform.cpp:241 msgid "Invalid size for option -s" msgstr "" #: src/dar_suite/dar_xform.cpp:248 msgid "Missing argument to -S" msgstr "" #: src/dar_suite/dar_xform.cpp:261 msgid "Giving -S option the same value as the one given to -s is useless" msgstr "" #: src/dar_suite/dar_xform.cpp:265 msgid "Invalid size for option -S" msgstr "" #: src/dar_suite/dar_xform.cpp:302 msgid "Missing argument to -E" msgstr "" #: src/dar_suite/dar_xform.cpp:310 msgid "Missing argument to -F" msgstr "" #: src/dar_suite/dar_xform.cpp:325 msgid "Unknown parameter given to -a option: " msgstr "" #: src/dar_suite/dar_xform.cpp:331 msgid "Missing argument to -^" msgstr "" #: src/dar_suite/dar_xform.cpp:366 msgid "" "Missing source or destination argument on command line, see -h option for " "help" msgstr "" #: src/dar_suite/dar_xform.cpp:381 msgid "Invalid argument as source archive" msgstr "" #: src/dar_suite/dar_xform.cpp:388 msgid "Invalid argument as destination archive" msgstr "" #: src/dar_suite/dar_xform.cpp:394 msgid "Archive on stdout is not compatible with slicing (-s option)" msgstr "" #: src/dar_suite/dar_xform.cpp:419 #, c-format msgid "\t\t the first non options argument is the archive to read\n" msgstr "" #: src/dar_suite/dar_xform.cpp:421 #, c-format msgid "\t\t the second non option argument is the archive to create\n" msgstr "" #: src/dar_suite/dar_xform.cpp:425 #, c-format msgid " -V\t\t displays version information\n" msgstr "" #: src/dar_suite/dar_xform.cpp:427 #, c-format msgid " -S first file size\n" msgstr "" #: src/dar_suite/dar_xform.cpp:432 #, c-format msgid "" " -E \t command to execute between slices of destination archive\n" msgstr "" #: src/dar_suite/dar_xform.cpp:433 #, c-format msgid " -F \t command to execute between slice of source archive\n" msgstr "" #: src/dar_suite/dar_xform.cpp:434 #, c-format msgid " -aSI \t slice size suffixes k, M, T, G, etc. are power of 10\n" msgstr "" #: src/dar_suite/dar_xform.cpp:435 #, c-format msgid " -abinary\t slice size suffixes k, M, T, G, etc. are power of 2\n" msgstr "" #: src/python/pybind11_libdar.cpp:262 msgid "FATAL error from operating system, aborting operation: " msgstr "" #: src/check/all_features.cpp:54 msgid "-" msgstr "" dar-2.6.8/po/fr.po0000644000175000017520000111717513617551261010631 00000000000000# French translations for DAR package. # Copyright (C) 2011 Free Software Foundation, Inc. # This file is distributed under the same license as the DAR package. # Denis Corbin , 2011. # msgid "" msgstr "" "Project-Id-Version: DAR 2.6.0\n" "Report-Msgid-Bugs-To: http://sourceforge.net/tracker/?" "group_id=65612MSGID_BUGS_ADDRESS =atid=511612\n" "POT-Creation-Date: 2020-02-08 16:12+0100\n" "PO-Revision-Date: 2020-01-12 15:30+0100\n" "Last-Translator: Denis Corbin \n" "Language-Team: French \n" "Language: fr\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=ISO-8859-1\n" "Content-Transfer-Encoding: 8bit\n" "Plural-Forms: nplurals=2; plural=(n > 1);\n" #: src/libdar/cat_signature.cpp:72 src/libdar/catalogue.cpp:143 #: src/libdar/catalogue.cpp:162 src/libdar/catalogue.cpp:164 msgid "incoherent catalogue structure" msgstr "structure incohérente du catalogue" #: src/libdar/compression.cpp:56 src/libdar/compression.cpp:79 #: src/libdar/compression.cpp:102 msgid "unknown compression" msgstr "compression inconnue" #: src/libdar/compression.cpp:129 #, c-format msgid "unknown compression algorithm: %S" msgstr "algorithme de compression inconnue : %S" #: src/libdar/filesystem_tools.cpp:164 msgid "Cannot get inode information about file to remove " msgstr "Impossible d'obtenir d'information sur l'inode d'un fichier à effacer " #: src/libdar/filesystem_tools.cpp:177 msgid "Cannot remove directory " msgstr "Suppression de répertoire impossible " #: src/libdar/filesystem_tools.cpp:251 msgid "" "uid value is too high for this system for libdar be able to restore it " "properly" msgstr "" "valeur d'UID trop grande pour ce système pour que libdar puisse le restaurer " "correctement" #: src/libdar/filesystem_tools.cpp:255 msgid "" "gid value is too high for this system for libdar be able to restore it " "properly" msgstr "" "valeur de GID trop grande pour ce système pour que libdar puisse le " "restaurer correctement" #: src/libdar/filesystem_tools.cpp:259 src/libdar/filesystem_tools.cpp:263 msgid "Could not restore original file ownership: " msgstr "Affection du fichier à son propriétaire d'origine impossible : " #: src/libdar/filesystem_tools.cpp:285 #, c-format msgid "Cannot restore permissions of %s : %s" msgstr "Restauration des droits d'accès de %s impossible : %s" #: src/libdar/filesystem_tools.cpp:372 #, c-format msgid "Failed to open %S while checking for nodump flag: %s" msgstr "" "Impossible d'ouvrir %S pendant la consultation du drapeau \"nodump\" : %s" #: src/libdar/filesystem_tools.cpp:386 #, c-format msgid "Cannot get ext2 attributes (and nodump flag value) for %S : %s" msgstr "Impossible d'accéder aux attributs ext2 (drapeau nodump) pour %S : %s" #: src/libdar/filesystem_tools.cpp:418 #, c-format msgid "Cannot get inode information for %s : %s" msgstr "Impossible d'obtenir des informations sur l'inode %s : %s" #: src/libdar/filesystem_tools.cpp:447 #, c-format msgid "" "Replacing %s in the -R option by the directory pointed to by this symbolic " "link: " msgstr "" "Remplacement de %s dans l'option -R par le répertoire pointé par ce lien " "symbolique" #: src/libdar/filesystem_tools.cpp:450 #, c-format msgid "" "The given path %s must be a directory (or symbolic link to an existing " "directory)" msgstr "Le chemin %s doit être un répertoire (ou un lien sur un répertoire)" #: src/libdar/filesystem_tools.cpp:577 msgid "" "File the patch is about to be applied to is not the expected one, aborting " "the patch operation" msgstr "" "Le fichier sur lequel le patch est sur le point d'être appliqué ne " "correspond pas à celui attendu, Abandon de l'opération" #: src/libdar/filesystem_tools.cpp:594 msgid "Patch data does not match its CRC, archive corruption took place" msgstr "Le patch ne correspond pas à son CRC, l'archive est corrompue" #: src/libdar/filesystem_tools.cpp:625 msgid "Error met while checking the resulting patched file: " msgstr "" "Erreur lors du contrôle du fichier résultant de l'application du patch : " #: src/libdar/filesystem_tools.cpp:752 msgid "Copied data does not match expected CRC" msgstr "Les données copiées ne correspondent pas au CRC" #: src/libdar/fsa_family.cpp:58 msgid "creation date" msgstr "date de création" #: src/libdar/fsa_family.cpp:60 msgid "append only" msgstr "ajout seul" #: src/libdar/fsa_family.cpp:62 msgid "compressed" msgstr "compressé" #: src/libdar/fsa_family.cpp:64 msgid "no dump flag" msgstr "drapeau no dump" #: src/libdar/fsa_family.cpp:66 msgid "immutable" msgstr "immutable" #: src/libdar/fsa_family.cpp:68 msgid "journalized" msgstr "journalisé" #: src/libdar/fsa_family.cpp:70 msgid "secure deletion" msgstr "suppression sécurisée" #: src/libdar/fsa_family.cpp:72 msgid "no tail merging" msgstr "pas d'ajout en fin" #: src/libdar/fsa_family.cpp:74 msgid "undeletable" msgstr "insuppressible" #: src/libdar/fsa_family.cpp:76 msgid "no atime update" msgstr "pas de mise à jour de atime" #: src/libdar/fsa_family.cpp:78 msgid "synchronous directory" msgstr "répertoire synchrone" #: src/libdar/fsa_family.cpp:80 msgid "synchronous update" msgstr "mise à jour synchrone" #: src/libdar/fsa_family.cpp:82 msgid "top of directory hierarchy" msgstr "sommet de hiérarchie de répertoires" #: src/libdar/statistics.cpp:81 msgid "Error while initializing \"mutex\" for class \"statistics\": " msgstr "" "Erreur lors de l'initialisation de \"mutex\" pour la classe \"statistics\" : " #: src/libdar/tronconneuse.cpp:61 #, c-format msgid "%d is not a valid block size" msgstr "%d n'est pas une taille valide" #: src/libdar/tronconneuse.cpp:464 src/libdar/crypto_sym.cpp:349 msgid "Data corruption may have occurred, cannot decrypt data" msgstr "" "Une corruption de données semble avoir eu lieu, impossible de déchiffrer les " "données" #: src/libdar/cat_delta_signature.cpp:332 msgid "data corrupted when attempting to read delta signature block size" msgstr "" "corruption de donnée rencontrée lors de la lecture de la taille des blocks " "de la signature delta" #: src/libdar/cat_delta_signature.cpp:354 msgid "" "Error while reading CRC of delta signature data. Data corruption occurred" msgstr "" "Erreur de CRC lors de la lecture de la signature de différence binaire : " "Corruption de données." #: src/libdar/cat_delta_signature.cpp:356 msgid "CRC error met while reading delta signature: data corruption." msgstr "" "Erreur de CRC lors de la lecture de la signature de différence binaire : " "Corruption de données." #: src/libdar/sar.cpp:165 #, c-format msgid "" "Error met while opening the last slice: %S. Trying to open the archive using " "the first slice..." msgstr "" "Erreur rencontrée lors de l'ouverture de la dernière tranche : %S . " "Tentative d'ouverture à partir de la première tranche ..." #: src/libdar/sar.cpp:213 msgid "File size too small" msgstr "Taille de fichier trop petite" #: src/libdar/sar.cpp:217 msgid "First file size too small" msgstr "Taille du premier fichier trop petite" #: src/libdar/sar.cpp:689 msgid "LAX MODE: Caught exception: " msgstr "MODE RELAX : Exception attrapée : " #: src/libdar/sar.cpp:690 #, c-format msgid "" "LAX MODE: %S is missing, You have the possibility to create a zero byte " "length file under the name of this slice, to replace this missing file. This " "will of course generate error messages about the information that is missing " "in this slice, but at least libdar will be able to continue. Can we continue " "now?" msgstr "" "MODE RELAX : %S est manquant. Vous avez la possibilité de créer un fichier " "de taille nulle de ce nom pour remplacer le fichier manquant. Ceci génèrera " "bien sûr des messages d'erreurs à propos des informations manquantes dans " "cette tranche, mais au moins libdar pourra continuer. Peut on continuer " "maintenant ?" #: src/libdar/sar.cpp:702 #, c-format msgid "%S is required for further operation, please provide the file." msgstr "" "%S est requis pour la suite des opérations, merci de fournir ce fichier." #: src/libdar/sar.cpp:704 #, c-format msgid "" "%S is required for further operation, please provide the file if you have it." msgstr "" "%S est requis pour la suite des opérations, merci de fournir ce fichier si " "vous l'avez." #: src/libdar/sar.cpp:709 #, c-format msgid "Failed reading slice %S: " msgstr "Echec de lecture de la tranche %S : " #: src/libdar/sar.cpp:752 #, c-format msgid "%S has a bad or corrupted header, please provide the correct file." msgstr "%S a un en-tête corrompu ou malformé, merci de fournir le bon fichier." #: src/libdar/sar.cpp:756 #, c-format msgid "" "LAX MODE: %S has a bad or corrupted header, trying to guess original values " "and continuing if possible" msgstr "" "MODE RELAX : %S a un en-tête corrompu, tentative de détermination des " "valeurs d'origine puis on continue si c'est possible" #: src/libdar/sar.cpp:766 #, c-format msgid "" "%S is not a valid file (wrong magic number), please provide the good file." msgstr "" "%S n'est pas un fichier valide (mauvais numéro magique), merci de fournir le " "bon fichier." #: src/libdar/sar.cpp:770 #, c-format msgid "" "LAX MODE: In spite of its name, %S does not appear to be a dar slice, " "assuming a data corruption took place and continuing" msgstr "" "MODE RELAX : En dépit de son nom, %S ne semble pas être une tranche " "d'archive, on suppose qu'une corruption des données a eu lieu et on continue" #: src/libdar/sar.cpp:774 msgid "" "This is an old archive, it can only be opened starting by the first slice" msgstr "" "Ceci est une ancienne archive, elle doit être ouverte en lisant d'abord la " "première tranche" #: src/libdar/sar.cpp:801 msgid "" "LAX MODE: Due to probable data corruption, dar could not determine the " "correct size of slices in this archive. For recent archive, this information " "is duplicated in each slice, do you want to try opening another slice to get " "this value if present?" msgstr "" "MODE RELAX : A cause d'une corruption probable des données, libdar ne peut " "déterminer la taille exacte des tranches. Pour des archives récentes, cette " "information est dupliquée dans chaque tranche. Voulez-vous essayer de lire " "une autre tranche pour récupérer cette information ?" #: src/libdar/sar.cpp:805 msgid "LAX MODE: Please provide the slice number to read: " msgstr "MODE RELAX : Merci de fournir le numéro de la tranche à lire : " #: src/libdar/sar.cpp:813 msgid "LAX MODE: Please provide an strictly positive integer number" msgstr "MODE RELAX : Merci de fournir un nombre entier strictement positif" #: src/libdar/sar.cpp:819 #, c-format msgid "LAX MODE: opening slice %i to read its slice header" msgstr "" "MODE RELAX : ouverture de la tranche %i afin de lire l'en-tête de tranche" #: src/libdar/sar.cpp:821 #, c-format msgid "LAX MODE: closing slice %i, header properly fetched" msgstr "" "MODE RELAX : fermeture de la tranche %i, l'en-tête a pu être lu correctement" #: src/libdar/sar.cpp:827 msgid "" "LAX MODE: In spite of a the absence of a known slice size, continuing anyway" msgstr "" "MODE RELAX : On continue malgré l'absence d'information à propos de la " "taille des tranches" #: src/libdar/sar.cpp:834 msgid "Incoherent slice header: First slice size too small" msgstr "En-tête de tranche incohérent : taille de première tranche trop petite" #: src/libdar/sar.cpp:836 msgid "incoherent slice header: Slice size too small" msgstr "En-tête de tranche incohérent : taille de tranche trop petite" #: src/libdar/sar.cpp:842 #, c-format msgid "Error opening %S : " msgstr "Erreur d'ouverture de %S : " #: src/libdar/sar.cpp:842 msgid " . Retry ?" msgstr " . Essayer à nouveau ?" #: src/libdar/sar.cpp:853 msgid " is a slice from another backup, please provide the correct slice." msgstr " est une tranche d'une autre archive, merci de fournir le bon fichier." #: src/libdar/sar.cpp:858 msgid "" "LAX MODE: internal name of the slice leads dar to consider it is not member " "of the same archive. Assuming data corruption occurred and relying on the " "filename of this slice as proof of its membership to the archive" msgstr "" "MODE RELAX : le nom interne de la tranche laisse à penser qu'elle ne fait " "pas partie de la même archive. On suppose qu'il s'agit d'une corruption de " "données et on ignore cette erreur" #: src/libdar/sar.cpp:886 msgid "" "Data corruption met at end of slice, forbidden flag found at this position" msgstr "" "Corruption de données rencontré en fin de tranche, drapeau interdit à cet " "endroit" #: src/libdar/sar.cpp:892 msgid "Data corruption met at end of slice, unknown flag found" msgstr "Corruption de données rencontrée en fin de tranche, drapeau inconnu" #: src/libdar/sar.cpp:912 #, c-format msgid "" "Two different slices (%i and %i) are marked as the last slice of the backup!" msgstr "" "Deux tranches différentes %i et %i sont toutes les deux marquées comme la " "dernière de l'archive !" #: src/libdar/sar.cpp:915 #, c-format msgid "" "LAX MODE: slices %i and %i are both recorded as last slice of the archive, " "keeping the higher number as the real last slice" msgstr "" "MODE RELAX : les tranches %i et %i sont toutes les deux marquées comme la " "dernière de l'archive, on considère que la dernière est celle de numéro le " "plus élevé" #: src/libdar/sar.cpp:938 #, c-format msgid "Slice %S has an unknown flag (neither terminal nor non_terminal file)." msgstr "" "La tranche %S possède un drapeau inconnu (ni terminal ni non-terminal)." #: src/libdar/sar.cpp:948 #, c-format msgid "" "Due to data corruption, it is not possible to know if slice %S is the last " "slice of the archive or not. I need your help to figure out this. At the " "following prompt please answer either one of the following words: \"last\" " "or \"notlast\" according to the nature of this slice (you can also answer " "with \"abort\" to abort the program immediately): " msgstr "" "A cause d'une corruption des données, il n'est pas possible si la tranche %S " "est ou non la dernière de l'archive. Merci de votre aide pour le déterminer. " "A l'invite qui suit, merci de répondre par \"dernier\" ou \"non dernier\" " "selon la nature de cette tranche (vous pouvez-aussi répondre par \"stop\" " "pour arrêter le programme immédiatement) : " #: src/libdar/sar.cpp:950 src/libdar/sar.cpp:954 msgid "last" msgstr "dernier" #: src/libdar/sar.cpp:950 msgid "notlast" msgstr "non dernier" #: src/libdar/sar.cpp:950 src/libdar/sar.cpp:952 msgid "abort" msgstr "stop" #: src/libdar/sar.cpp:966 msgid "" "LAX MODE: Slice flag corrupted, but a slice of higher number has been seen, " "thus the header flag was surely not indicating this slice as the last of the " "archive. Continuing" msgstr "" "MODE RELAX : Le drapeau de tranche est corrompu, mais comme une tranche de " "numéro supérieur a déjà été vu, ce drapeau ne pouvait pas indiquer cette " "tranche comme la dernière de l'archive, on continue" #: src/libdar/sar.cpp:1010 #, c-format msgid "failed openning slice %S: %S. Will try to erase it first, if allowed" msgstr "" "Echec d'ouverture de la tranche %S: %S. Nouvelle tentative avec écrasement " "préalable, si autorisé" #: src/libdar/sar.cpp:1076 src/libdar/sar.cpp:1147 src/libdar/sar.cpp:1158 #: src/libdar/trivial_sar.cpp:224 src/libdar/trivial_sar.cpp:242 #, c-format msgid "Failed creating slice %S: " msgstr "Échec de création d'une tranche %S: " #: src/libdar/sar.cpp:1097 msgid "file exists, and DONT_ERASE option is set." msgstr "le fichier existe et l'option contre l'écrasement a été activée." #: src/libdar/sar.cpp:1102 msgid " is about to be overwritten." msgstr " est sur le point d'être écrasé." #: src/libdar/sar.cpp:1182 msgid "" "First slice size is too small to even just be able to drop the slice header" msgstr "" "La taille de la première tranche est trop petite pour être en mesure " "contenir ne serait-ce que l'en-tête de tranche" #: src/libdar/sar.cpp:1184 msgid "Slice size is too small to even just be able to drop the slice header" msgstr "" "La taille de tranche est trop petite pour être en mesure de contenir ne " "serait-ce que l'en-tête de tranche" #: src/libdar/sar.cpp:1247 msgid "Finished writing to file " msgstr "Écriture terminée dans le fichier " #: src/libdar/sar.cpp:1247 msgid ", ready to continue ? " msgstr ", prêt à continuer ? " #: src/libdar/sar.cpp:1252 msgid "" "If you really want to abort the archive creation hit CTRL-C, then press " "enter." msgstr "" "Si vous voulez vraiment arrêter la création de l'archive, appuyez sur CTRL+C " "puis appuyez sur la touche entrée" #: src/libdar/sar.cpp:1284 msgid "file not open" msgstr "Fichier non ouvert" #: src/libdar/sar.cpp:1318 msgid "The last file of the set is not present in " msgstr "Le dernier fichier (tranche) de l'archive n'est pas présent dans " #: src/libdar/sar.cpp:1318 msgid " , please provide it." msgstr " , veuillez le fournir." #: src/libdar/sar.cpp:1332 #, c-format msgid "" "No backup file is present in %S for archive %S, please provide the last file " "of the set." msgstr "" "Aucune tranche n'est présent dans %S pour l'archive %S, merci de fournir la " "dernière tranche de l'archive." #: src/libdar/gf_mode.cpp:43 msgid "read only" msgstr "lecture seule" #: src/libdar/gf_mode.cpp:46 msgid "write only" msgstr "écriture seule" #: src/libdar/gf_mode.cpp:49 msgid "read and write" msgstr "lecture et écriture" #: src/libdar/int_tools.cpp:66 msgid "a binary digit is either 0 or 1" msgstr "Un chiffre binaire est soit 0 soit 1" #: src/libdar/erreurs.cpp:123 #, c-format msgid "File %S line %d" msgstr "Fichier %S ligne %d" #: src/libdar/erreurs.cpp:123 msgid "it seems to be a bug here" msgstr "Il semble qu'il y ait un bogue ici" #: src/libdar/erreurs.cpp:152 #, c-format msgid "in file %S line %S" msgstr "dans le fichier %S ligne %S" #: src/libdar/erreurs.cpp:171 msgid "# UNEXPECTED EXCEPTION, #" msgstr "# EXCEPTION NON ATTENDUE, #" #: src/libdar/erreurs.cpp:172 src/libdar/erreurs.cpp:185 msgid "# E X I T I N G ! #" msgstr "# A B A N D O N ! #" #: src/libdar/erreurs.cpp:175 src/libdar/erreurs.cpp:188 msgid "" " THANKS TO REPORT THE PREVIOUS OUTPUT TO MAINTAINER\n" " GIVING A DESCRIPTION OF THE CIRCUMSTANCES." msgstr "" " MERCI DE REPORTER L'AFFICHAGE PRÉCÉDENT AU MAINTENEUR\n" " EN DONNANT UNE DESCRIPTION DES CIRCONSTANCES." #: src/libdar/erreurs.cpp:176 msgid "" " IF POSSIBLE TRY TO REPRODUCE THIS ERROR, A\n" " SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH\n" " IN SOLVING THIS PROBLEM. THANKS" msgstr "" " SI POSSIBLE ESSAYEZ DE REPRODUIRE L'ERREUR, UN\n" " SCÉNARIO QUI PERMET DE LA REPRODUIRE SERAIT UTILE\n" " POUR CORRIGER CE PROBLÈME. MERCI" #: src/libdar/erreurs.cpp:184 msgid "# NOT CAUGHT EXCEPTION, #" msgstr "# EXCEPTION NON ATTRAPÉE, #" #: src/libdar/erreurs.cpp:189 msgid "" " IF POSSIBLE TRY TO PRODUCE THIS ERROR, A\n" " SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH\n" " IN SOLVING THIS PROBLEM. THANKS" msgstr "" " SI POSSIBLE ESSAYEZ DE REPRODUIRE L'ERREUR, UN\n" " SCÉNARIO QUI PERMET DE LA REPRODUIRE SERAIT UTILE\n" " POUR CORRIGER CE PROBLÈME. MERCI" #: src/libdar/generic_rsync.cpp:210 msgid "Error met building the rsync hash table: " msgstr "Erreur lors de la construction de la table de hashage rsync: " #: src/libdar/generic_rsync.cpp:487 msgid "Error met while feeding data to librsync: " msgstr "Erreur lors de l'envoi de données à librsync: " #: src/libdar/generic_rsync.cpp:509 msgid "Error releasing librsync job: " msgstr "Erreur lors de la libération d'un travail de librsync: " #: src/libdar/tools.hpp:697 #, c-format msgid "Found negative date (%s) for inode %s ." msgstr "Date négative rencontrée (%s) pour l'inode %s ." #: src/libdar/tools.hpp:701 #, c-format msgid "%S Can we read it as if it was zero (1st January 1970 at 00:00:00 UTC)?" msgstr "" "%S Pouvons-nous la considérer nulle (1er janvier 1970 à 00:00:00 UTC) ?" #: src/libdar/tools.hpp:704 msgid "Considering date as if it was zero (Jan 1970)" msgstr "On considère la date comme nulle (Janvier 1970<)" #: src/libdar/entree_stats.cpp:140 #, c-format msgid "CATALOGUE CONTENTS :" msgstr "CONTENU DU CATALOGUE :" #: src/libdar/entree_stats.cpp:142 #, c-format msgid "total number of inode : %i" msgstr "nombre total d'inode : %i" #: src/libdar/entree_stats.cpp:143 #, c-format msgid "fully saved : %i" msgstr "inode sauvegardée : %i" #: src/libdar/entree_stats.cpp:144 #, c-format msgid "binay delta patch : %i" msgstr "fichiers patchés : %i" #: src/libdar/entree_stats.cpp:145 #, c-format msgid "inode metadata only : %i" msgstr "métadonnée seulement : %i" #: src/libdar/entree_stats.cpp:146 #, c-format msgid "distribution of inode(s)" msgstr "répartition des inodes" #: src/libdar/entree_stats.cpp:147 #, c-format msgid " - directories : %i" msgstr " - répertoires : %i" #: src/libdar/entree_stats.cpp:148 #, c-format msgid " - plain files : %i" msgstr " - fichiers normaux : %i" #: src/libdar/entree_stats.cpp:149 #, c-format msgid " - symbolic links : %i" msgstr " - liens symboliques : %i" #: src/libdar/entree_stats.cpp:150 #, c-format msgid " - named pipes : %i" msgstr " - tubes nommés : %i" #: src/libdar/entree_stats.cpp:151 #, c-format msgid " - unix sockets : %i" msgstr " - prises unix : %i" #: src/libdar/entree_stats.cpp:152 #, c-format msgid " - character devices : %i" msgstr " - fichiers spéciaux char : %i" #: src/libdar/entree_stats.cpp:153 #, c-format msgid " - block devices : %i" msgstr " - fichiers spéciaux bloc : %i" #: src/libdar/entree_stats.cpp:154 #, c-format msgid " - Door entries : %i" msgstr " - entrées Portes (Door) : %i" #: src/libdar/entree_stats.cpp:155 #, c-format msgid "hard links information" msgstr "information sur les liens durs" #: src/libdar/entree_stats.cpp:156 #, c-format msgid " - number of inode with hard link : %i" msgstr " - nombre d'inode ayant des liens durs : %i" #: src/libdar/entree_stats.cpp:157 #, c-format msgid " - number of reference to hard linked inodes: %i" msgstr " - nombre de référence à des inodes ayant des liens durs : %i" #: src/libdar/entree_stats.cpp:158 #, c-format msgid "destroyed entries information" msgstr "information sur les entrées supprimées" #: src/libdar/entree_stats.cpp:159 #, c-format msgid " %i file(s) have been record as destroyed since backup of reference" msgstr "" " %i fichier(s) ont été enregistré(s) comme détruit(s) depuis la sauvegarde " "de référence" #: src/libdar/wrapperlib.cpp:468 msgid "compressReset called but compressInit never called before" msgstr "" "\"compressReset appelé alors que \"compressInit\" n'a jamais été invoqué au " "préalable" #: src/libdar/fichier_libcurl.cpp:75 #, c-format msgid "Error met while resetting URL to handle: %s" msgstr "Erreur lors de la réinitialisation de l'URL: %s" #: src/libdar/fichier_libcurl.cpp:84 src/libdar/fichier_libcurl.cpp:619 #: src/libdar/fichier_libcurl.cpp:676 #, c-format msgid "Error met while setting libcurl for reading data file: %s" msgstr "Erreur lors du paramétrage de libcurl pour la lecture du fichier : %s" #: src/libdar/fichier_libcurl.cpp:91 src/libdar/fichier_libcurl.cpp:96 #: src/libdar/fichier_libcurl.cpp:646 src/libdar/fichier_libcurl.cpp:683 #, c-format msgid "Error met while setting libcurl for writing data file: %s" msgstr "Erreur lors du paramétrage de libcurl pour l'écriture du fichier : %s" #: src/libdar/fichier_libcurl.cpp:139 src/libdar/fichier_libcurl.cpp:148 #, c-format msgid "%s: %s" msgstr "%s: %s" #: src/libdar/fichier_libcurl.cpp:184 src/libdar/fichier_libcurl.cpp:192 #: src/libdar/fichier_libcurl.cpp:203 #, c-format msgid "Error met while fetching file size: %s" msgstr "Erreur lors lors de la récupération de la taille du fichier : %s" #: src/libdar/fichier_libcurl.cpp:259 msgid "libcurl does not allow skipping in write mode" msgstr "libcurl interdit le changement de position en mode écriture" #: src/libdar/fichier_libcurl.cpp:491 msgid "Error met during network transfer: " msgstr "Erreur lors du transfert réseau : " #: src/libdar/fichier_libcurl.cpp:514 #, c-format msgid "Error met while reading a block of data: %s" msgstr "Erreur lors de la lecture d'un block de données : %s" #: src/libdar/fichier_libcurl.cpp:590 src/libdar/fichier_libcurl.cpp:598 #: src/libdar/fichier_libcurl.cpp:636 src/libdar/fichier_libcurl.cpp:776 #, c-format msgid "Error while seeking in file on remote repository: %s" msgstr "Erreur de positionnement dans un fichier d'un dépot distant : %s" #: src/libdar/fichier_libcurl.cpp:631 msgid "" "Integer too large for libcurl, cannot skip at the requested offset in the " "remote repository" msgstr "" "Entier trop grand pour libcurl, impossible de placer le curseur à la " "position demandée dans le répertoire distant" #: src/libdar/fichier_libcurl.cpp:654 #, c-format msgid "Error while setting write append mode for libcurl: %s" msgstr "Impossible de passer libcurl en mode ajout en écriture : %s" #: src/libdar/fichier_libcurl.cpp:934 src/libdar/fichier_libcurl.cpp:940 #, c-format msgid "%S: %s, aborting" msgstr "%S : %s, abandon" #: src/libdar/fichier_libcurl.cpp:955 #, c-format msgid "%S: %s, retrying in %d seconds" msgstr "%S : %s, nouvelle tentative dans %d secondes" #: src/libdar/fichier_libcurl.cpp:962 #, c-format msgid "%S: %s, do we retry network operation?" msgstr "%S : %s, retente-t-on l'opération réseau ?" #: src/libdar/macro_tools.cpp:165 msgid "Locating archive contents..." msgstr "Localisation du contenu de l'archive ..." #: src/libdar/macro_tools.cpp:177 msgid "Reading archive contents..." msgstr "Lecture du contenu de l'archive ..." #: src/libdar/macro_tools.cpp:213 msgid "Missing catalogue in file." msgstr "Pas de catalogue dans le fichier." #: src/libdar/macro_tools.cpp:312 msgid "" "LAX MODE: catalogue computed hash does not match the signed hash of the " "archive, ignoring" msgstr "" "LAX MODE: le hashage du catalogue ne correspond pas au hashage signé de " "l'archive, erreur ignorée" #: src/libdar/macro_tools.cpp:314 msgid "" "Catalogue computed hash does not match the signed hash of the archive, " "archive has been modified since it was signed!" msgstr "" "Le hashage du catalogue ne correspond pas au hashage signé de l'archive, " "l'archive a été modifiée depuis sa création !" #: src/libdar/macro_tools.cpp:343 msgid "Cannot open catalogue: " msgstr "Ouverture du catalogue impossible : " #: src/libdar/macro_tools.cpp:426 msgid "Opening standard input to read the archive..." msgstr "Ouverture de l'entrée standard pour lire l'archive ..." #: src/libdar/macro_tools.cpp:434 #, c-format msgid "Opening named pipe %S as input to read the archive..." msgstr "Ouverture du tube nommé %S comme source pour lire l'archive ..." #: src/libdar/macro_tools.cpp:447 #, c-format msgid "" "Opening a pair of pipes to read the archive, expecting dar_slave at the " "other ends..." msgstr "" "Ouverture d'une paire de tubes pour lire l'archive, dar_slave doit être " "utilisé à l'autre extrémité de ces tubes ..." #: src/libdar/macro_tools.cpp:477 msgid "Opening the archive using the multi-slice abstraction layer..." msgstr "Ouverture de l'archive par la couche d'abstraction des tranches..." #: src/libdar/macro_tools.cpp:511 msgid "Reading the archive header..." msgstr "Lecture de l'en-tête de l'archive ..." #: src/libdar/macro_tools.cpp:513 msgid "Reading the archive trailer..." msgstr "Lecture de la fin de l'archive ..." #: src/libdar/macro_tools.cpp:532 #, c-format msgid "" "Error while reading archive's header, this may be because this archive is an " "old encrypted archive or that data corruption took place, Assuming it is an " "old archive, we have to read the header at the beginning of the first " "slice..." msgstr "" "Erreur lors de l'ouverture de l'en-tête de l'archive. Ce peut être dû à une " "ancienne archive chiffrée ou à une corruption de l'archive. On supposons " "qu'il s'agit d'une ancienne archive, pour laquelle pour laquelle l'en-tête " "n'est présent qu'au début de la première tranche ..." #: src/libdar/macro_tools.cpp:543 msgid "" "Found a correct archive header at the beginning of the archive, which does " "not stands to be an old archive, the end of the archive is corrupted and " "thus the catalogue is not readable, aborting. Either retry providing in " "addition an isolated catalogue of that archive to perform the operation, or " "try reading the archive in sequential mode or try in lax mode or, last " "chance, try both lax and sequential read mode at the same time" msgstr "" "Un en-tête d'archive a bien été trouvé en début d'archive, il n'indique pas " "que l'archive est d'un ancien format, la fin de l'archive est donc belle et " "bien corrompue. Vous pouvez soit réessayer en fournissant un catalogue isolé " "en complément de cette operation ou tenter de lire l'archive en mode " "sequentiel, voire tenter le mode laxiste comme dernier recours, en utilisant " "le mode laxiste et le mode sequentiel en même temps" #: src/libdar/macro_tools.cpp:545 msgid "" "Found a correct archive header at the beginning of the archive, which does " "not stands to be an old archive, the end of the archive is thus corrupted. " "Without external catalogue provided and as we do not read the archive in " "sequential mode, there is very little chance to retreive something from this " "corrupted archive. Do we continue anyway ?" msgstr "" "Un en-tête d'archive a bien été trouvé en début d'archive, il n'indique pas " "que l'archive est d'un ancien format, la fin de l'archive est donc belle et " "bien corrompue. Sans la fourniture d'un catalogue externe et comme nous ne " "lisons pas l'archive en mode sequentiel, il n'y a que peu de chances " "d'arriver à récupérer quelque chose de cette archive corrompue. Doit-on " "continuer malgré tout ?" #: src/libdar/macro_tools.cpp:556 #, c-format msgid "Opening construction layer..." msgstr "Ouverture d'une couche de construction ..." #: src/libdar/macro_tools.cpp:573 msgid "Considering cyphering layer..." msgstr "Considération d'une couche de chiffrement ..." #: src/libdar/macro_tools.cpp:582 msgid "" "LAX MODE: Archive seems to be ciphered, but you did not have provided any " "encryption algorithm, assuming data corruption and considering that the " "archive is not ciphered" msgstr "" "MODE RELAX : L'archive semble chiffrée, mais aucun algorithme de chiffrement " "n'a été fourni, on suppose qu'il s'agit d'une corruption des données et " "qu'en fait l'archive n'est pas chiffrée" #: src/libdar/macro_tools.cpp:585 #, c-format msgid "" "The archive %S is encrypted and no encryption cipher has been given, cannot " "open archive." msgstr "" "L'archive %S est chiffrée mais aucune algorithme de chiffrement n'a été " "donné. Ouverture de l'archive impossible." #: src/libdar/macro_tools.cpp:616 msgid "" "WARNING: support for secure memory was not available at compilation time, in " "case of heavy memory load, this may lead the password you are about to " "provide to be wrote to disk (swap space) in clear. You have been warned!" msgstr "" "ATTENTION : le support pour la mémoire sécurisé n'a pas été disponible lors " "de la compilation. En case d'utilisation importante de la mémoire, le mot de " "passe sur le point d'être fourni peut être écrit sur disque en clair " "(fichier d'échange). Vous avez été prévenu !" #: src/libdar/macro_tools.cpp:617 src/libdar/macro_tools.cpp:1359 #, c-format msgid "Archive %S requires a password: " msgstr "L'archive %S nécessite un mot de passe : " #: src/libdar/macro_tools.cpp:626 msgid "No cyphering layer opened, adding cache layer for better performance" msgstr "" "Aucune couche de chiffrement n'a été ouverte. Ajout d'une couche de cache " "pour améliorer les performances" #: src/libdar/macro_tools.cpp:632 msgid "" "Failed opening the cache layer, lack of memory, archive read performances " "will not be optimized" msgstr "" "Echec de l'ouverture de la couche de cache, faute de mémoire disponible, " "l'archive sera lue sans optimisation des performances" #: src/libdar/macro_tools.cpp:637 msgid "No cyphering layer opened" msgstr "Aucune couche de chiffrement ouverte" #: src/libdar/macro_tools.cpp:646 src/libdar/macro_tools.cpp:696 msgid "Opening cyphering layer..." msgstr "Ouverture d'une couche de chiffrement ..." #: src/libdar/macro_tools.cpp:703 msgid "Unknown encryption algorithm" msgstr "Algorithme de chiffrement inconnu" #: src/libdar/macro_tools.cpp:723 src/libdar/macro_tools.cpp:1481 msgid "Creating a new thread to run the previously created layers..." msgstr "" "Création d'une nouvelle file pour exécuter les couches précédemment " "créées ..." #: src/libdar/macro_tools.cpp:743 msgid "Opening escape sequence abstraction layer..." msgstr "Ouverture d'une couche de gestion des séquences d'échappement ..." #: src/libdar/macro_tools.cpp:748 msgid "" "LAX MODE: Archive is flagged as having escape sequence (which is normal in " "recent archive versions). However if this is not expected, shall I assume a " "data corruption occurred in this field and that this flag should be ignored? " "(If unsure, refuse)" msgstr "" "MODE RELAX : l'archive est marquée comme ayant des séquences d'échappement " "(ce qui est normal pour une version récente d'archive). Cependant, si ce " "n'est pas attendu, doit-on considérer qu'une corruption des données a eu " "lieu pour ce champ et ignorer cette marque ? (En cas de doute, refuser)" #: src/libdar/macro_tools.cpp:763 msgid "" "Sequential read asked, but this archive is flagged to not have the necessary " "embedded escape sequences for that operation, aborting" msgstr "" "Lecture séquentielle demandée, mais cette archive est marquée comme ne " "possédant pas de séquence d'échappement nécessaire pour cette opération. " "Opération abandonnée" #: src/libdar/macro_tools.cpp:768 msgid "" "LAX MODE: the requested sequential read mode relies on escape sequence which " "seem to be absent from this archive. Assuming data corruption occurred. " "However, if no data corruption occurred and thus no escape sequence are " "present in this archive, do not use sequential reading mode to explore this " "archive else you will just get nothing usable from it" msgstr "" "MODE RELAX : La lecture séqentielle demandée a besoin que l'archive " "contienne de séquences d'échappement ce qui ne semble pas être le cas ici. " "On suppose qu'il s'agit d'une corruption de données. Cependant, si tel n'est " "pas le cas, et si aucune séquence d'échappement n'est effectivement présente " "dans cette archive, il ne faut pas utiliser le mode de lecture séquentielle " "pour lire cette archive, aucune information pertinente ne pourrait en être " "extraite" #: src/libdar/macro_tools.cpp:774 msgid "" "LAX MODE: Archive is flagged to not have escape sequence which is not the " "case by default since archive format 8 (release 2.4.x). If corruption " "occurred and an escape sequence is present, this may lead data restoration " "to fail, answering no at this question will let me consider that an escape " "sequence layer has to be added in spite of the archive flags. Do you want to " "continue as suggested by the archive flag, thus without escape sequence " "layer?" msgstr "" "MODE RELAX : L'archive est marquée comme ne possédant pas de séquence " "d'échappement ce qui n'est plus le cas par défaut depuis le format 8 " "(release 2.4.x). Si une corruption a eu lieu sur ce champ et " "qu'effectivement des séquences d'échappement sont présentes, ceci peut " "conduire à un échec de la restauration. Répondre non à la question suivante " "permettra d'insérer une couche de traitement des séquences d'échappement " "lors de la lecture malgré les indications présentes dans l'en-tête de " "l'archive. Voulez-vous continuer comme le suggère l'en-tête de l'archive, " "c'est-à-dire sans couche de gestion des séquences d'échappement ?" #: src/libdar/macro_tools.cpp:805 src/libdar/macro_tools.cpp:1527 msgid "Creating a new thread to run the escape layer..." msgstr "Création d'une nouvelle file pour exécuter la couche d'échappement ..." #: src/libdar/macro_tools.cpp:823 msgid "" "Opening the compression abstraction layer (compression algorithm used is " "none)..." msgstr "" "Ouverture de la couche d'abstraction gérant la compression (aucun algorithme " "de compression utilisé)..." #: src/libdar/macro_tools.cpp:825 msgid "Opening the compression layer..." msgstr "Ouverture de la couche de d'abstraction gérant la compression ..." #: src/libdar/macro_tools.cpp:849 src/libdar/macro_tools.cpp:1559 msgid "Creating a new thread to run the compression layer..." msgstr "" "Création d'une nouvelle file d'exécution pour le couche de compression..." #: src/libdar/macro_tools.cpp:863 src/libdar/macro_tools.cpp:1569 msgid "All layers have been created successfully" msgstr "Toutes les couches d'abstraction ont été crées avec succès" #: src/libdar/macro_tools.cpp:866 #, c-format msgid "" "Warning, the archive %S has been encrypted. A wrong key is not possible to " "detect, it would cause DAR to report the archive as corrupted" msgstr "" "Attention, l'archive %S a été chiffrée. Une mauvaise clef est impossible à " "détecter et conduira libdar a signaler l'archive comme corrumpue" #: src/libdar/macro_tools.cpp:905 msgid "" "LAX MODE: The catalogue (table of contents) usually takes a few percents of " "the archive at its end, which percentage do you want me to scan (answer by " "an *integer* number between 0 and 100)? " msgstr "" "MODE RELAX : le catalogue (table des matières) occupe en générale quelques " "pourcentages de l'archive à sa fin. Quel pourcentage voulez-vous qu'il soit " "balayé pour sa recherche (répondre par un *entier* entre 0 et 100) ? " #: src/libdar/macro_tools.cpp:911 #, c-format msgid "LAX MODE: %i is not a valid percent value" msgstr "MODE RELAX : %i n'est pas une valeur valide de pourcentage" #: src/libdar/macro_tools.cpp:915 #, c-format msgid "%S is not a valid number" msgstr "%S n'est pas nombre valide" #: src/libdar/macro_tools.cpp:921 #, c-format msgid "" "LAX MODE: Beginning search of the catalogue (from the end toward the " "beginning of the archive, on %i %% of its length), this may take a while..." msgstr "" "MODE RELAX : Début de la recherche du catalogue (de la fin vers le début de " "l'archive pour %i %% de sa longueur), l'opération peut durer longtemps ...\"" #: src/libdar/macro_tools.cpp:930 msgid "" "LAX MODE: Cannot skip at the end of the archive! Using current position to " "start the catalogue search" msgstr "" "MODE RELAX : Impossible d'aller à la fin de l'archive ! On utilise la " "position actuelle comme point de départ de la recherche du catalogue" #: src/libdar/macro_tools.cpp:935 msgid "LAX MODE: Failed to read the catalogue (no data to inspect)" msgstr "MODE RELAX : Échec de lecture du catalogue (aucune donnée à inspecter)" #: src/libdar/macro_tools.cpp:938 msgid "" "LAX MODE: Failed to read the catalogue (0 bytes of the archive length asked " "to look for the catalogue)" msgstr "" "MODE RELAX : Échec de lecture du catalogue (0 octets de l'archive demandé " "pour sa recherche)" #: src/libdar/macro_tools.cpp:949 msgid "" "LAX MODE: Escape sequence seems present in this archive. I have thus two " "different methods, either I look for the escape sequence indicating the " "start of the catalogue or I try each position in turn in the hope it will " "not be data that look like a catalogue" msgstr "" "MODE RELAX : Séquences d'échappement présentes dans l'archive. Deux méthodes " "différentes sont disponibles pour localiser le catalogue : soit on recherche " "la séquence d'échappement indiquant le début du catalogue, soit on essaie " "les différentes positions tour à tour dans l'espoir que cette portion de " "l'archive ressemblera à la structure d'un catalogue" #: src/libdar/macro_tools.cpp:952 msgid "LAX MODE: Trying to locate the escape sequence (safer choice) ?" msgstr "" "MODE RELAX : On essaie de localiser la séquence d'échappement (conseillé) ? " #: src/libdar/macro_tools.cpp:957 msgid "" "LAX MODE: Good point! I could find the escape sequence marking the beginning " "of the catalogue, now trying to read it..." msgstr "" "MODE RELAX : Bonne nouvelle ! La séquence d'échappement marquant le début du " "catalogue a été trouvée ! On essaie maintenant de le lire ..." #: src/libdar/macro_tools.cpp:966 msgid "" "LAX MODE: Escape sequence could not be found, it may have been corrupted or " "out of the scanned portion of the archive, trying to find the catalogue the " "other way" msgstr "" "MODE RELAX : La séquence d'échappement n'a pas pu être trouvée, elle a soit " "été corrompue, soit elle est hors du périmètre de recherche dans l'archive. " "On essaye de trouver le catalogue par l'autre méthode, patience ..." #: src/libdar/macro_tools.cpp:989 #, c-format msgid "LAX MODE: %i %% remaining" msgstr "MODE RELAX : %i %% restant" #: src/libdar/macro_tools.cpp:1001 #, c-format msgid "" "Could read a catalogue data structure at offset %i, it contains the " "following:" msgstr "" "La structure de donnée du catalogue a pu être lue à %i de l'archive, elle " "contient ceci :" #: src/libdar/macro_tools.cpp:1003 msgid "Do you want to use it for the operation?" msgstr "Voulez-vous l'utiliser pour l'opération ?" #: src/libdar/macro_tools.cpp:1035 msgid "" "LAX MODE: Reached the end of the area to scan, FAILED to find any catalogue" msgstr "" "MODE RELAX : Fin de la zone de recherche atteinte, ECHEC de la localisation " "du catalogue" #: src/libdar/macro_tools.cpp:1045 msgid "LAX MODE: Failed to read the catalogue" msgstr "MODE RELAX : Échec de lecture du catalogue" #: src/libdar/macro_tools.cpp:1130 msgid "" "Creating low layer: Writing archive into a black hole object (equivalent to /" "dev/null)..." msgstr "" "Création de la couche de base : Ecriture de l'archive dans un trou noir " "(équivalent à /dev/null) ..." #: src/libdar/macro_tools.cpp:1139 msgid "Creating low layer: Writing archive into standard output object..." msgstr "" "Création de la couche de base : Ecriture de l'archive vers la sortie " "standard ..." #: src/libdar/macro_tools.cpp:1152 msgid "Creating low layer: Writing archive into a plain file object..." msgstr "" "Création de la couche de base : Ecriture de l'archive dans un fichier " "normal ..." #: src/libdar/macro_tools.cpp:1173 msgid "" "Creating low layer: Writing archive into a sar object (Segmentation and " "Reassembly) for slicing..." msgstr "" "Création de la couche de base : Ecriture de l'archive vers un objet sar " "(Segmentation and Reassemblement) pour le découpage en tranches ..." #: src/libdar/macro_tools.cpp:1211 msgid "Adding cache layer over pipe to provide limited skippability..." msgstr "" "Ajout d'une couche de cache au-dessus du tube pour fournir une capacité de " "déplacement limitée" #: src/libdar/macro_tools.cpp:1241 msgid "" "WARNING: support for secure memory was not available at compilation time, in " "case of heavy memory load, this may lead the password/passphrase provided to " "be wrote to disk (swap space) in clear. You have been warned!" msgstr "" "ATTENTION : le support pour la mémoire sécurisé n'a pas été disponible lors " "de la compilation. En cas d'utilisation importante de la mémoire, les mots " "de passe peuvent être écrit sur disque en clair (fichier d'échange). Vous " "avez été prévenus !" #: src/libdar/macro_tools.cpp:1256 msgid "Generating random key for symmetric encryption..." msgstr "" "Génération d'une clef de chiffrement aléatoire pour le chiffrement " "symmétrique ..." #: src/libdar/macro_tools.cpp:1264 msgid "" "Scrambling is a very weak encryption algorithm, this is a non-sens to use " "with asymmetric encryption" msgstr "" "Le chiffrement \"scramble\" est très faible. C'est un non-sens de l'utiliser " "avec le chiffrement asymétrique" #: src/libdar/macro_tools.cpp:1304 #, c-format msgid "" "For your information, this is the iteration %d for which the randomly " "generated key is reported to be weak by libgcrypt, continuing generating " "another random key... patience" msgstr "" "Pour information, c'est l'itération n°%d pour laquelle la clef aléatoire " "générée est considérée comme faible par libgcrypt. La rechercher d'une " "nouvelle clef aléatoire continue ... patience" #: src/libdar/macro_tools.cpp:1308 #, c-format msgid "" "... A strong randomly generated key could be found after %d iteration(s)" msgstr "" "Une clef forte générée aléatoirement a pu être trouvée après %d iteration(s)" #: src/libdar/macro_tools.cpp:1315 msgid "Key generated" msgstr "Clef générée" #: src/libdar/macro_tools.cpp:1360 msgid "Please confirm your password: " msgstr "Merci de confirmer le mot de passe : " #: src/libdar/macro_tools.cpp:1364 msgid "The two passwords are not identical. Aborting" msgstr "Les deux mots de passes ne sont pas identiques. Abandon" #: src/libdar/macro_tools.cpp:1406 msgid "Writing down the archive header..." msgstr "Ecriture de l'en-tête de l'archive ..." #: src/libdar/macro_tools.cpp:1422 msgid "Adding a new layer on top: scrambler object..." msgstr "Ajout d'une nouvelle couche : Chiffrement faible ..." #: src/libdar/macro_tools.cpp:1434 msgid "Adding a new layer on top: Strong encryption object..." msgstr "Ajout d'une nouvelle couche : Chiffrement fort ..." #: src/libdar/macro_tools.cpp:1454 msgid "Adding a new layer on top: Caching layer for better performances..." msgstr "" "Ajout d'une nouvelle couche : Cache pour de meilleures performances ..." #: src/libdar/macro_tools.cpp:1499 msgid "Writing down the initial elastic buffer through the encryption layer..." msgstr "Ecriture du tampon élastique initial via la couche de chiffrement ..." #: src/libdar/macro_tools.cpp:1511 msgid "Adding a new layer on top: Escape layer to allow sequential reading..." msgstr "" "Ajout d'une nouvelle couche : Couche d'échappement pour permettre la lecture " "séquentielle ..." #: src/libdar/macro_tools.cpp:1541 msgid "Adding a new layer on top: compression..." msgstr "Ajout d'une nouvelle couche : Compression ..." #: src/libdar/macro_tools.cpp:1583 msgid "Error creating archive layers: " msgstr "Erreur pendant la création des couches pour l'archive : " #: src/libdar/macro_tools.cpp:1692 msgid "Writing down archive contents..." msgstr "Ecriture du contenu de l'archive ..." #: src/libdar/macro_tools.cpp:1716 msgid "Calculating the signature of the catalogue hash..." msgstr "Calcul de la signature du hashage du catalogue ..." #: src/libdar/macro_tools.cpp:1732 msgid "Writing down the signed hash of the catalogue..." msgstr "Ecriture de la signature du catalogue..." #: src/libdar/macro_tools.cpp:1782 msgid "Closing the compression layer..." msgstr "Fermeture de la couche de compression ..." #: src/libdar/macro_tools.cpp:1797 msgid "Closing the escape layer..." msgstr "Fermeture de la couche d'échappement..." #: src/libdar/macro_tools.cpp:1817 msgid "Writing down the first archive terminator..." msgstr "Ecriture du premier terminateur d'archive..." #: src/libdar/macro_tools.cpp:1823 msgid "writing down the final elastic buffer through the encryption layer..." msgstr "Ecriture du tampon élastique terminal via la couche de chiffrement ..." #: src/libdar/macro_tools.cpp:1862 msgid "Closing the encryption layer..." msgstr "Fermeture de la couche de chiffrement ..." #: src/libdar/macro_tools.cpp:1884 msgid "Writing down archive trailer..." msgstr "Ecriture de la fin d'archive ..." #: src/libdar/macro_tools.cpp:1889 msgid "Writing down the second archive terminator..." msgstr "Ecriture du second terminateur d'archive ..." #: src/libdar/macro_tools.cpp:1900 msgid "Closing archive low layer..." msgstr "Fermeture de la couche de base ..." #: src/libdar/macro_tools.cpp:1917 msgid "Archive is closed." msgstr "L'archive est fermée." #: src/libdar/macro_tools.cpp:2084 msgid "" "The format version of the archive is too high for that software version, try " "reading anyway?" msgstr "" "La version du format ce cette archive est trop grand pour cette version " "d'application. On essaye quand même lire cette archive ?" #: src/libdar/fichier_local.cpp:152 msgid "Error getting size of file: " msgstr "Erreur pendant l'obtention de la taille du fichier : " #: src/libdar/fichier_local.cpp:262 msgid "Error getting file reading position: " msgstr "Erreur lors de l'obtention de la position du fichier à lire : " #: src/libdar/fichier_local.cpp:294 src/libdar/fichier_local.cpp:296 msgid "Error while reading from file: " msgstr "Erreur lors de la lecture du fichier : " #: src/libdar/fichier_local.cpp:345 src/libdar/fichier_local.cpp:351 msgid "Error while writing to file: " msgstr "Erreur lors de l'enregistrement du fichier : " #: src/libdar/fichier_local.cpp:411 src/libdar/archive_options.cpp:474 #: src/libdar/archive_options.cpp:1729 msgid "Furtive read mode" msgstr "Mode de lecture furtif" #: src/libdar/fichier_local.cpp:429 msgid "" "No space left for inode, you have the opportunity to make some room now. " "When done : can we continue ?" msgstr "" "Plus d'espace disponibles pour les inodes, vous avez la possibilité d'en " "faire maintenant. Quand ce sera fait, pouvons-nous continuer ?" #: src/libdar/fichier_local.cpp:440 msgid "Cannot open file : " msgstr "Impossible d'ouvrir le fichier : " #: src/libdar/fichier_local.cpp:463 #, c-format msgid "Cannot dup() filedescriptor while copying \"fichier_local\" object: %s" msgstr "" "Impossible de dup()liquer un descripteur pendant la copie d'un " "\"fichier_local\" : %s" #: src/libdar/erreurs.hpp:129 msgid "Lack of Memory" msgstr "Manque de mémoire" #: src/libdar/erreurs.hpp:146 msgid "Lack of Secured Memory" msgstr "Manque de mémoire sécurisée" #: src/libdar/erreurs.hpp:204 msgid "" "Cannot handle such a too large integer. Use a full version of libdar " "(compiled to rely on the \"infinint\" integer type) to solve this problem" msgstr "" "Impossible de gérer un entier aussi grand. Utiliser la version complète de " "libdar (compilé pour utiliser le type \"infinint\") pour résoudre ce problème" #: src/libdar/erreurs.hpp:385 msgid "Thread cancellation requested, aborting as soon as possible" msgstr "Demande d'arrêt de file d'exécution reçue, arrêt immédiat" #: src/libdar/erreurs.hpp:385 msgid "Thread cancellation requested, aborting as properly as possible" msgstr "" "Demande d'arrêt de file d'exécution reçue, on termine proprement l'opération " "en cours" #: src/libdar/generic_file_overlay_for_gpgme.cpp:55 #, c-format msgid "Error creating data buffer overlay for GPGME: %s" msgstr "Erreur de création d'un tampon pour GPGME: %s " #: src/libdar/generic_file_overlay_for_gpgme.cpp:107 msgid "File offset too large to be stored in off_t type" msgstr "" "Indexe du fichier trop grand pour être stocké var un variable de type off_t" #: src/libdar/fichier_libcurl.hpp:100 msgid "user/group ownership not supported for this repository" msgstr "utilisateur/groupe non supporté pour ce dépôt" #: src/libdar/filesystem_hard_link_write.cpp:292 #, c-format msgid "" "Error creating hard link %s : %s\n" " Trying to duplicate the inode" msgstr "" "Erreur lors de la création du lien dur %s : %s\n" " Tentative de duplication de l'inode" #: src/libdar/filesystem_hard_link_write.cpp:306 #, c-format msgid "" "Error creating hard link : %s , the inode to link with [ %s ] has " "disappeared, re-creating it" msgstr "" "Erreur lors de la création du lien dur : %s , l'inode à lier [ %s ] a " "disparue, elle va être recréée" #: src/libdar/filesystem_hard_link_write.cpp:313 #, c-format msgid "" "Error creating hard link : %s , the inode to link with [ %s ] is not " "present, cannot restore this hard link" msgstr "" "Erreur lors de la création du lien dur : %s , l'inode à lier [ %s ] n'est " "pas présente, le lien dur ne peut pas être restauré" #: src/libdar/filesystem_hard_link_write.cpp:385 msgid "Bad CRC, data corruption occurred" msgstr "Mauvais CRC, une corruption des données a eu lieu" #: src/libdar/filesystem_hard_link_write.cpp:435 #, c-format msgid "" "error restoring Unix socket %s, path too long to be stored properly, socket " "will be created as %s instead, do you confirm?" msgstr "" "Erreur de création de la prise Unix %s: chemin trop long, la prise sera crée " "comme %s, OK ?" #: src/libdar/filesystem_hard_link_write.cpp:437 msgid "Error creating Unix socket file: " msgstr "Erreur lors de la création de la prise Unix : " #: src/libdar/filesystem_hard_link_write.cpp:457 msgid "Could not create inode: " msgstr "Création d'une inode impossible : " #: src/libdar/filesystem_hard_link_write.cpp:459 msgid "Cannot create inode: " msgstr "Création d'une inode impossible : " #: src/libdar/filesystem_hard_link_write.cpp:459 msgid " Ready to continue ?" msgstr " Prêt à continuer ?" #: src/libdar/entrepot.cpp:69 msgid "root's entrepot must be an absolute path: " msgstr "La racine de l'entrepot doit être une chemin absolu: " #: src/libdar/entrepot.cpp:156 msgid "Error met while creating the hash file: " msgstr "Erreur lors de la cration du fichier de hashage : " #: src/libdar/elastic.cpp:80 msgid "Zero is not a valid size for an elastic buffer" msgstr "Zéro n'est pas une taille valide de tampon élastique" #: src/libdar/elastic.cpp:82 msgid "Size too large for an elastic buffer" msgstr "taille trop grande pour un tampon élastique" #: src/libdar/elastic.cpp:99 src/libdar/elastic.cpp:105 #: src/libdar/elastic.cpp:135 src/libdar/elastic.cpp:141 #: src/libdar/elastic.cpp:157 src/libdar/elastic.cpp:165 #: src/libdar/elastic.cpp:194 src/libdar/elastic.cpp:202 #: src/libdar/elastic.cpp:212 msgid "elastic buffer incoherent structure" msgstr "Structure incohérent du tampon élastique" #: src/libdar/elastic.cpp:131 src/libdar/elastic.cpp:190 msgid "too large elastic buffer or elastic buffer incoherent structure" msgstr "Tampon élastique trop grand ou de structure incohérente" #: src/libdar/elastic.cpp:219 msgid "not enough space provided to dump the elastic buffer" msgstr "Pas assez de mémoire pour écrire le tampon élastique" #: src/libdar/fichier_global.cpp:110 src/libdar/tuyau.cpp:368 msgid "" "No space left on device, you have the opportunity to make room now. When " "ready : can we continue ?" msgstr "" "Plus d'espace disque disponible, vous avez la possibilité d'en faire " "maintenant. Quand ce sera fait, pouvons-nous continuer ?" #: src/libdar/mycurl_easyhandle_node.cpp:39 msgid "Error met while creating a libcurl handle" msgstr "Erreur lors de la cration d'un objet libcurl" #: src/libdar/mycurl_easyhandle_node.cpp:51 msgid "Error met while duplicating libcurl handle" msgstr "Erreur lors de la duplication d'un objet libcurl" #: src/libdar/user_interaction5.cpp:167 src/libdar/shell_interaction.cpp:405 msgid "Continue? " msgstr "Continuer ?" #: src/libdar/filesystem_restore.cpp:277 msgid "" "Cannot restore a delta binary patch without a file to patch on filesystem" msgstr "" "Impossible de restaurer un batch binaire (delta patch) sans fichier à " "patcher dans le système de fichier" #: src/libdar/filesystem_restore.cpp:285 msgid "" "Cannot restore a inode metadata only without an existing file on filesystem" msgstr "" "Impossible de restaurer les métadonnées d'une inode sans une inode présente " "sur le système de fichier" #: src/libdar/filesystem_restore.cpp:302 #, c-format msgid "Cannot remove non-existent file from filesystem: %S" msgstr "Impossible de supprimer un fichier qui n'existe pas : %S" #: src/libdar/filesystem_restore.cpp:307 src/libdar/filesystem_restore.cpp:751 msgid "Restoring file's data: " msgstr "Restauration des données du fichier : " #: src/libdar/filesystem_restore.cpp:328 src/libdar/filesystem_restore.cpp:970 msgid "Restoring file's EA: " msgstr "Restauration des Attributs Étendus du fichier : " #: src/libdar/filesystem_restore.cpp:339 src/libdar/filesystem_restore.cpp:522 #, c-format msgid "Restoration of EA for %S aborted: " msgstr "Restauration des EA de %S arrêtée : " #: src/libdar/filesystem_restore.cpp:351 src/libdar/filesystem_restore.cpp:1120 msgid "Restoring file's FSA: " msgstr "Restauration des FSA du fichier : " #: src/libdar/filesystem_restore.cpp:366 src/libdar/filesystem_restore.cpp:536 #, c-format msgid "Restoration of FSA for %S aborted: " msgstr "Restauration des FSA de %S arrêtée : " #: src/libdar/filesystem_restore.cpp:424 src/libdar/filesystem_restore.cpp:619 msgid "Restoring linux immutable FSA for " msgstr "Restauration des FSA du fichier " #: src/libdar/filesystem_restore.cpp:429 #, c-format msgid "Restoration of linux immutable FSA for %S aborted: " msgstr "Restauration du FSA immutable pour %S arrêtée : " #: src/libdar/filesystem_restore.cpp:484 msgid "Restoring file's data using a delta patching: " msgstr "Restauration des données du fichier à l'aide de delta patch : " #: src/libdar/filesystem_restore.cpp:495 #, c-format msgid "Cannot restore delta diff for %S as exsiting inode is not a plain file" msgstr "" "Impossible de restaurer le patch pour %S car l'inode existante n'est un " "fichier normal" #: src/libdar/filesystem_restore.cpp:670 #, c-format msgid "%S is about to be removed from filesystem, continue?" msgstr "" "%S est sur le point d'être supprimé du système de fichier, continue t-on ?" #: src/libdar/filesystem_restore.cpp:675 src/libdar/filesystem_restore.cpp:684 #, c-format msgid "Removing file (reason is file recorded as removed in archive): %S" msgstr "" "Suppressin du fichier (fichier marqué comme supprimé dans l'archive) : %S" #: src/libdar/filesystem_restore.cpp:682 #, c-format msgid "" "%S must be removed, but does not match expected type, remove it anyway ?" msgstr "" "%S est sur le point d'être supprimé, mais le type de fichier ne correspond " "pas avec ce qui est attendu, l'efface t-on ?" #: src/libdar/filesystem_restore.cpp:690 #, c-format msgid "" "%S: Overwriting policy (Data) is undefined for that file, do not know " "whether removal is allowed or not!" msgstr "" "%S : La politique d'écrasement (Données) reste non définie pour ce fichier, " "impossible donc de savoir si la suppression est autorisée ou non !" #: src/libdar/filesystem_restore.cpp:727 #, c-format msgid "" "Directory %S cannot be restored: overwriting not allowed and a non-directory " "inode of that name already exists, all files in that directory will be " "skipped for restoration:" msgstr "" "Le répertoire %S ne peut pas être restauré : écrasement interdit et une " "inode n'étant pas un répertoire existe déjà sous ce nom. Aucun fichiers de " "ce répertoire ne sera restauré : " #: src/libdar/filesystem_restore.cpp:736 #, c-format msgid "%S is about to be overwritten, OK?" msgstr "%S est sur le point d'être écrasé, OK ?" #: src/libdar/filesystem_restore.cpp:768 msgid "" "Existing file is of a different nature, cannot only restore inode metadata" msgstr "" "Impossible de restaurer les seules métadonnées car le fichier présent est de " "nature différente de celui restauré" #: src/libdar/filesystem_restore.cpp:786 #, c-format msgid "Existing EA for %S could not be read and preserved: " msgstr "" "les Attributs Étendus existants de %S n'ont pas pu être lu ni préservé : " #: src/libdar/filesystem_restore.cpp:806 #, c-format msgid "Existing FSA for %S could not be read and preserved: " msgstr "les FSA existants de %S n'ont pas pu être lu ni préservé : " #: src/libdar/filesystem_restore.cpp:820 #, c-format msgid "Removing existing immutable flag in order to restore data for %S" msgstr "Suppression du drapeau immutable afin de restorer les données de %S" #: src/libdar/filesystem_restore.cpp:846 #, c-format msgid "Existing EA for %S could not be preserved : " msgstr "les AE existants de %S n'ont pas pu être préservés : " #: src/libdar/filesystem_restore.cpp:863 #, c-format msgid "Existing FSA for %S could not be preserved : " msgstr "les FSA existants de %S n'ont pas pu être préservés : " #: src/libdar/filesystem_restore.cpp:878 #, c-format msgid "" "%S is about to be deleted (required by overwriting policy), do you agree?" msgstr "" "%S est sur le point d'être supprimé (action requise par la politique " "d'écrasement), OK ?" #: src/libdar/filesystem_restore.cpp:880 #, c-format msgid "Removing file (reason is overwriting policy): %S" msgstr "Suppression du fichier (politique d'écrasement) : %S" #: src/libdar/filesystem_restore.cpp:886 #, c-format msgid "" "%S: Overwriting policy (Data) is undefined for that file, do not know " "whether overwriting is allowed or not!" msgstr "" "%S : La politique d'écrasement (Données) reste non définie pour ce fichier, " "impossible de savoir si l'écrasement est autorisé ou pas !" #: src/libdar/filesystem_restore.cpp:949 #, c-format msgid "EA for %S are about to be overwritten, OK?" msgstr "Des Attributs Étendus de %S sont sur le point d'être écrasés, OK ?" #: src/libdar/filesystem_restore.cpp:964 #, c-format msgid "" "EA for %S have not been overwritten because this file is a hard link " "pointing to an already restored inode" msgstr "" "Les Attributs Étendus pour %S n'ont pas été écrasés à car ce fichier est un " "lien dur pointant sur une inode déjà restaurée" #: src/libdar/filesystem_restore.cpp:984 #, c-format msgid "EA for %S are about to be removed, OK?" msgstr "Les Attributs Étendus de %S sont sur le point d'être supprimés, OK ?" #: src/libdar/filesystem_restore.cpp:995 #, c-format msgid "" "EA for %S have not been cleared as requested by the overwriting policy " "because this file is a hard link pointing to an already restored inode" msgstr "" "Les Attributs Étendus pour %S n'ont pas été effacés tel prévu par la " "politique d'écrasement car ce fichier est un lien dur pointant sur une inode " "déjà restaurée" #: src/libdar/filesystem_restore.cpp:1001 msgid "Clearing file's EA (requested by overwriting policy): " msgstr "" "Effacement des attributs étendus (requis par la politique d'écrasement) : " #: src/libdar/filesystem_restore.cpp:1016 #, c-format msgid "EA for %S are about to be merged, OK?" msgstr "Les Attributs Étendus de %S sont sur le point d'être fusionnés, OK ?" #: src/libdar/filesystem_restore.cpp:1047 #, c-format msgid "" "%S: Overwriting policy (EA) is undefined for that file, do not know whether " "overwriting is allowed or not!" msgstr "" "%S : La politique d'écrasement (Attributs Étendus) reste non définie pour ce " "fichier, impossible de savoir si l'écrasement est autorisé ou pas !" #: src/libdar/filesystem_restore.cpp:1099 #: src/libdar/filesystem_restore.cpp:1145 #, c-format msgid "FSA for %S are about to be overwritten, OK?" msgstr "Les FSA de %S sont sur le point d'être écrasés, OK ?" #: src/libdar/filesystem_restore.cpp:1114 #, c-format msgid "" "FSA for %S have not been overwritten because this file is a hard link " "pointing to an already restored inode" msgstr "" "Les FSA de %S n'ont pas été écrasés car ce fichier est un lien dur pointant " "sur une inode déjà restaurée" #: src/libdar/filesystem_restore.cpp:1171 #, c-format msgid "" "%S: Overwriting policy (FSA) is undefined for that file, do not know whether " "overwriting is allowed or not!" msgstr "" "%S : La politique d'écrasement (FSA) reste non définie pour ce fichier, " "impossible de savoir si l'écrasement est autorisé ou pas !" #: src/libdar/tlv.cpp:66 msgid "Missing data to initiate a TLV object" msgstr "il manque des données initialiser un objet TLV" #: src/libdar/cat_lien.cpp:98 msgid "symbolic link does not point to the same target: " msgstr "le lien symbolique ne pointe pas sur la même cible: " #: src/libdar/hash_fichier.cpp:63 #, c-format msgid "" "Error while initializing hash: Hash algorithm not available in libgcrypt: %s/" "%s" msgstr "" "Erreur lors de l'initialisation d'une fonction de hashage : Algorithme non " "disponible dans libgcrypt : %s/%s" #: src/libdar/hash_fichier.cpp:67 #, c-format msgid "Error while creating hash handle: %s/%s" msgstr "" "Erreur lors de la création d'une référence pour générer un somme de " "contrôle : %s/%s" #: src/libdar/hash_fichier.cpp:69 src/libdar/hash_fichier.cpp:107 #: src/libdar/hash_fichier.cpp:122 msgid "" "Missing hashing algorithms support (which is part of strong encryption " "support, using libgcrypt)" msgstr "" "Aucun support pour les algorithmes de hashage (partie intégrante du support " "pour le chiffrement fort, grâce à libgcrypt)" #: src/libdar/hash_fichier.cpp:155 msgid "Failed writing down the hash: " msgstr "Échec lors de l'écriture de la somme de contrôle : " #: src/libdar/escape_catalogue.cpp:89 msgid "" "incoherent data after escape sequence, cannot read internal data set label" msgstr "" "incohérence à la suite d'une séquence d'échappement : impossible de lire le " "label interne du jeu de données" #: src/libdar/escape_catalogue.cpp:99 msgid "Could not find tape mark for the internal catalogue" msgstr "" "Impossible de trouver la séquence d'échappement pour le catalogue interne" #: src/libdar/escape_catalogue.cpp:489 msgid "" "Escape sequences used for reading lead the archive to place some files out " "of the specified root. To overcome this problem, try reading the archive in " "direct mode (not using sequential reading), try repairing the archive using " "Parchive if redundancy data has been created or in last resort try using the " "lax mode" msgstr "" "Les séquences d'échappement utilisées pour lire l'archive conduisent à " "placer des fichiers en dehors de l'aborescence spécifiée. Pour résoudre ce " "problème, essayer de lire l'archive en mode direct, essayez de réparer " "l'archive avec Parchive si des fichiers de redondance ont été crées ou en " "dernier lieu essayez de lire l'archive en mode relax" #: src/libdar/escape_catalogue.cpp:492 msgid "" "LAX MODE: Archive directory structure is corrupted, it would lead to place " "some files out of the specified root directory. Restoring different " "directory contents at the root not out of it, which will put files of " "different directories in the specified root directory" msgstr "" "La structure de répertoires de l'archive est corrompue, elle conduirait à " "placer des fichiers en dehors de sa racine. Restauration du contenu de tels " "répertoires à la racine et non en dehors" #: src/libdar/escape_catalogue.cpp:507 msgid "" "LAX MODE: found unknown catalogue entry, assuming data corruption occurred. " "Skipping to the next entry, this may lead to improper directory structure " "being restored, if the corrupted data was a directory" msgstr "" "MODE RELAX : entrée inconnue trouvée dans le catalogue, on considère qu'une " "corruption de données a eu lieu. Passage à l'entrée suivante, ce qui peut " "conduire à une mauvaise restauration de la structure de répertoires, si " "cette entrée corrompue était un répertoire" #: src/libdar/escape_catalogue.cpp:514 msgid "Corrupted entry following an escape mark in the archive" msgstr "" "Entrée corrompue dans l'archive à la suite d'une séquence d'échappement" #: src/libdar/escape_catalogue.cpp:543 msgid "" "Uncompleted archive! Assuming it has been interrupted during the backup " "process. If an error has been reported just above, simply ignore it, this is " "about the file that was saved at the time of the interruption." msgstr "" "Archive incomplète ! On considère qu'elle a été interrompue lors du " "processus de sauvegarde. Si une erreur a été reportée juste avant, on peut " "l'ignorer, il s'agit du fichier qui était en cours de sauvegarde au moment " "de l'interruption." #: src/libdar/escape_catalogue.cpp:578 msgid "Cannot extract from the internal catalogue the list of files to remove" msgstr "" "Impossible d'extraire du catalogue interne la liste des fichiers à effacer" #: src/libdar/escape_catalogue.cpp:639 src/libdar/i_archive.cpp:270 #: src/libdar/i_archive.cpp:304 msgid "" "Archive internal catalogue is not identically signed as the archive itself, " "this might be the sign the archive has been compromised" msgstr "" "Le catalogue interne de l'archive n'a pas été signé de la même façon que " "l'archive elle-même, c'est probablement le signe d'une compromission de " "l'archive" #: src/libdar/escape_catalogue.cpp:655 msgid "" "Archive internal catalogue is properly signed but its content does not match " "the tape marks used so far for sequentially reading. Possible data " "corruption or archive compromission occurred! if data extracted in " "sequential read mode does not match the data extracted in direct access " "mode, consider the sequential data has been been modified after the archive " "has been generated" msgstr "" "Le catalogue interne de l'archive est correctement signé mais ne correspond " "pas aux marques de bandes utilisée pour la lecture séquentielle. Il est " "possible qu'une corruption de donnée ou une compromission de l'archive ait " "eu lieu! Si les données extraites en mode séquentiel ne correspondent pas à " "celle extraites en mode drect, on peut considérer que l'archive a été " "modifié suite depuis sa création." #: src/libdar/escape_catalogue.cpp:795 msgid "" "Resetting the sequential reading process of the archive contents while it is " "not finished, will make all data unread so far becoming inaccessible" msgstr "" "Remettre à zéro le processus de lecture séquentielle alors qu'il n'est pas " "terminé, rendra inaccessible toute les données non lues jusque là" #: src/libdar/cat_entree.cpp:84 src/libdar/cat_entree.cpp:132 #: src/libdar/cat_entree.cpp:142 msgid "corrupted file" msgstr "fichier corrompu" #: src/libdar/cat_entree.cpp:134 msgid "" "LAX MODE: Unexpected saved status for end of directory entry, assuming data " "corruption occurred, ignoring and continuing" msgstr "" "MODE RELAX : état de sauvegarde inattendu pour une fin de répertoire, " "corruption des données probable et ignorée. On continue" #: src/libdar/cat_entree.cpp:144 msgid "" "LAX MODE: Unexpected saved status for class \"cat_detruit\" object, assuming " "data corruption occurred, ignoring and continuing" msgstr "" "MODE RELAX : état de sauvegarde inattendu pour un objet de la classe " "\"cat_detruit\", corruption de données probable et ignorée. On continue" #: src/libdar/cat_entree.cpp:153 msgid "unknown type of data in catalogue" msgstr "type de donnée inconnu dans le catalogue" #: src/libdar/cat_entree.cpp:156 msgid "" "LAX MODE: found unknown catalogue entry, assuming data corruption occurred, " "cannot read further the catalogue as I do not know the length of this type " "of entry" msgstr "" "MODE RELAX : Entrée inconnue dans le catalogue, corruption de données " "probable qui interdit de lire plus avant le contenu de l'archive car aucune " "la longueur de cet enregistrement n'est pas connue" #: src/libdar/cat_entree.cpp:207 msgid "unknown entry" msgstr "entrée inconnue" #: src/libdar/cat_entree.cpp:208 #, c-format msgid "Entry information CRC failure for %S. Ignore the failure?" msgstr "Erreur de CRC sur l'entrée %S. Ignorer l'erreur ?" #: src/libdar/cat_entree.cpp:214 #, c-format msgid "Entry information CRC failure for %S" msgstr "Erreur de CRC sur l'entrée %S" #: src/libdar/cat_entree.cpp:216 msgid "Entry information CRC failure" msgstr "Erreur de CRC sur l'information d'une entrée dans la table de contenu" #: src/libdar/cat_entree.cpp:468 msgid "directory" msgstr "répertoire" #: src/libdar/cat_entree.cpp:472 msgid "hard linked inode" msgstr "inode liée en dur" #: src/libdar/cat_entree.cpp:474 msgid "plain file" msgstr "fichier plein" #: src/libdar/cat_entree.cpp:476 msgid "soft link" msgstr "lien symbolique" #: src/libdar/cat_entree.cpp:478 msgid "char device" msgstr "fichier spécial caractère" #: src/libdar/cat_entree.cpp:480 msgid "block device" msgstr "fichier spécial bloc" #: src/libdar/cat_entree.cpp:482 msgid "named pipe" msgstr "tube nommé" #: src/libdar/cat_entree.cpp:484 msgid "unix socket" msgstr "prise unix" #: src/libdar/cat_entree.cpp:486 msgid "deleted entry" msgstr "entrée supprimée" #: src/libdar/cat_entree.cpp:488 msgid "door inode" msgstr "inode Porte" #: src/libdar/shell_interaction.cpp:175 msgid "" "No terminal found for user interaction. All questions will be assumed a " "negative answer (less destructive choice), which most of the time will abort " "the program." msgstr "" "Aucun terminal n'a été trouvé pour interagir avec l'utilisateur. On " "considérera une réponse négative à toutes les questions posées à " "l'utilisateur (choix le moins destructeur), ce qui la plupart du temps " "arrêtera le programme." #: src/libdar/shell_interaction.cpp:239 msgid "Error reading character: " msgstr "Erreur pendant la lecture d'un caractère : " #: src/libdar/shell_interaction.cpp:265 #, c-format msgid "" "[Data ][D][ EA ][FSA][Compr][S]| Permission | User | Group | Size " "| Date | filename" msgstr "" "[Data ][D][ EA ][FSA][compr][S]| Accès |Proprio|Groupe | Taille " "| Date | Nom de fichier" #: src/libdar/shell_interaction.cpp:270 #, c-format msgid "" "Access mode | User | Group | Size | Date |" "[Data ][D][ EA ][FSA][Compr][S]| Filename" msgstr "" "Droit d'accès |Propr.|Groupe | Taille | Date |" "[Infos][D][ EA ][FSA][Compr][S]| Nom de fichier" #: src/libdar/shell_interaction.cpp:315 src/libdar/database5.cpp:78 #, c-format msgid "dar path : %S" msgstr "chemin de dar : %S" #: src/libdar/shell_interaction.cpp:316 src/libdar/database5.cpp:79 #, c-format msgid "dar options : %S" msgstr "options de dar : %S" #: src/libdar/shell_interaction.cpp:317 src/libdar/database5.cpp:80 #, c-format msgid "database version: %S" msgstr "Version B.de D. : %S" #: src/libdar/shell_interaction.cpp:318 src/libdar/database5.cpp:81 #, c-format msgid "compression used: %S" msgstr "compression : %S" #: src/libdar/shell_interaction.cpp:320 src/libdar/database5.cpp:83 #, c-format msgid "archive # | path | basename" msgstr "num archive | chemin | nom d'archive" #: src/libdar/shell_interaction.cpp:328 src/libdar/database5.cpp:97 msgid "" msgstr "" #: src/libdar/shell_interaction.cpp:378 src/libdar/database5.cpp:148 #, c-format msgid " archive # | most recent/total data | most recent/total EA" msgstr " Num. archive | derniers fichiers/total | EA plus récent/total" #: src/libdar/shell_interaction.cpp:379 src/libdar/database5.cpp:149 #, c-format msgid "--------------+-------------------------+-----------------------" msgstr "--------------+-------------------------+-----------------------" #: src/libdar/shell_interaction.cpp:442 msgid " [return = YES | Esc = NO]" msgstr " [Entrée = OUI | Echap = NON]" #: src/libdar/shell_interaction.cpp:461 msgid "Error while reading user answer from terminal: " msgstr "Erreur lors de la lecture de la réponse sur le terminal : " #: src/libdar/shell_interaction.cpp:466 msgid "Continuing..." msgstr "Poursuite..." #: src/libdar/shell_interaction.cpp:468 msgid "Escaping..." msgstr "Annulation..." #: src/libdar/shell_interaction.cpp:533 msgid "Secured string can only be read from a terminal" msgstr "Une chaine sécurés ne peut être saisie que depuis un terminal" #: src/libdar/shell_interaction.cpp:558 msgid "provided password is too long for the allocated memory" msgstr "le mot de passe fourni est trop grand pour la mémoire allouée" #: src/libdar/shell_interaction.cpp:594 msgid "Error while changing user terminal properties: " msgstr "Erreur lors du changement des propriétés du terminal : " #: src/libdar/shell_interaction.cpp:636 #, c-format msgid "%S [%c] [ REMOVED ENTRY ] (%S) %S" msgstr "%S [%c] [ ENTREÉ SUPPR. ] (%S) %S" #: src/libdar/shell_interaction.cpp:666 src/libdar/shell_interaction.cpp:718 msgid " Extended Attribute: [" msgstr " Attribut Étendu : [" #: src/libdar/shell_interaction.cpp:934 src/libdar/database5.cpp:181 msgid "[ Saved ]" msgstr "[ Sauvé ]" #: src/libdar/shell_interaction.cpp:936 src/libdar/shell_interaction.cpp:941 #: src/libdar/database5.cpp:183 src/libdar/database5.cpp:188 msgid "[ ]" msgstr "[ ]" #: src/libdar/shell_interaction.cpp:939 src/libdar/database5.cpp:186 msgid "[ EA ]" msgstr "[Attrib.]" #: src/libdar/shell_interaction.cpp:955 src/libdar/database5.cpp:203 msgid "removed " msgstr "supprimé " #: src/libdar/shell_interaction.cpp:956 src/libdar/database5.cpp:204 msgid "present " msgstr "présent " #: src/libdar/shell_interaction.cpp:957 src/libdar/database5.cpp:205 msgid "saved " msgstr "sauvé " #: src/libdar/shell_interaction.cpp:958 src/libdar/database5.cpp:206 msgid "absent " msgstr "absent " #: src/libdar/shell_interaction.cpp:959 src/libdar/database5.cpp:207 msgid "patch " msgstr "patch " #: src/libdar/shell_interaction.cpp:960 src/libdar/database5.cpp:208 msgid "BROKEN " msgstr "CASSÉ " #: src/libdar/shell_interaction.cpp:961 src/libdar/database5.cpp:209 msgid "inode " msgstr "inode " #: src/libdar/entrepot_local.cpp:169 #, c-format msgid "Cannot remove file %s: %s" msgstr "Suppression du fichier %s impossible: %s" #: src/libdar/i_libdar_xform.cpp:263 msgid "Error transforming the archive :" msgstr "Erreur lors de la transformation de l'archive : " #: src/libdar/list_entry.cpp:50 msgid "[DIRTY]" msgstr "[SALE ]" #: src/libdar/list_entry.cpp:52 src/libdar/list_entry.cpp:71 msgid "[Saved]" msgstr "[Sauvé]" #: src/libdar/list_entry.cpp:54 msgid "[Inode]" msgstr "[Inode]" #: src/libdar/list_entry.cpp:56 src/libdar/list_entry.cpp:73 msgid "[InRef]" msgstr "[EnRef]" #: src/libdar/cat_detruit.cpp:46 msgid "missing data to build" msgstr "manque de données pour construire" #: src/libdar/crit_action.cpp:145 msgid "cannot evaluate an empty chain in an overwriting policy" msgstr "ne peut pas évaluer une chaîne vide comme politique d'écrasement" #: src/libdar/cache.cpp:69 msgid "wrong value given as initial_size argument while initializing cache" msgstr "" "Mauvaise valeur donnée à \"initial_size\" lors de l'initialisation du cache" #: src/libdar/ea_filesystem.cpp:189 #, c-format msgid "Error while adding EA %s : %s" msgstr "Erreur d'ajout de l'EA %s : %s" #: src/libdar/ea_filesystem.cpp:226 #, c-format msgid "Error while removing %s : %s" msgstr "Erreur de suppression de %s : %s" #: src/libdar/ea_filesystem.cpp:258 src/libdar/ea_filesystem.cpp:282 #, c-format msgid "Error reading attribute %s of file %s : %s" msgstr "Erreur lors de la lecture de l'attribut %s du fichier %s : %s" #: src/libdar/ea_filesystem.cpp:332 src/libdar/ea_filesystem.cpp:347 #, c-format msgid "Error retrieving EA list for %s : %s" msgstr "Erreur lors de l'inventaire des AE pour %s : %s" #: src/libdar/database_header.cpp:102 msgid "" "The format version of this database is too high for that software version, " "use a more recent software to read or modify this database" msgstr "" "La version du format de cette base de données et trop élevé pour cette " "version de logiciel. Utiliser une version du logiciel plus récente pour lire " "ou modifier cette base de donnée" #: src/libdar/database_header.cpp:105 msgid "Unknown header option in database, aborting\n" msgstr "En-tête inconnu dans la base de donnée, abandon\n" #: src/libdar/database_header.cpp:148 msgid "Cannot create database, file exists" msgstr "Impossible de créer la base, un fichier de même nom existe déjà" #: src/libdar/database_header.cpp:191 #, c-format msgid "Error reading database %S : " msgstr "Erreur lors de la lecture de la base %S : " #: src/libdar/user_interaction_callback5.cpp:93 #: src/libdar/user_interaction_callback.cpp:98 msgid "nullptr given as argument of user_interaction_callback()" msgstr "l'argument nullptr a été transmis à user_interaction_callback()" #: src/libdar/user_interaction_callback5.cpp:130 #: src/libdar/user_interaction_callback5.cpp:135 #: src/libdar/user_interaction_callback5.cpp:152 #: src/libdar/user_interaction_callback5.cpp:156 #: src/libdar/user_interaction_callback5.cpp:173 #: src/libdar/user_interaction_callback5.cpp:177 #: src/libdar/user_interaction_callback5.cpp:198 #: src/libdar/user_interaction_callback5.cpp:202 #: src/libdar/user_interaction_callback5.cpp:225 #: src/libdar/user_interaction_callback5.cpp:229 #: src/libdar/user_interaction_callback5.cpp:246 #: src/libdar/user_interaction_callback5.cpp:250 #: src/libdar/user_interaction_callback5.cpp:267 #: src/libdar/user_interaction_callback5.cpp:271 #: src/libdar/user_interaction_callback5.cpp:290 #: src/libdar/user_interaction_callback5.cpp:294 #: src/libdar/user_interaction_callback5.cpp:313 #: src/libdar/user_interaction_callback5.cpp:317 msgid "No exception allowed from libdar callbacks" msgstr "" "Aucune exception n'est autorisée en provenance d'une fonction de rappel de " "libdar" #: src/libdar/real_infinint.cpp:70 src/libdar/limitint.hpp:272 msgid "Reached end of file before all data could be read" msgstr "" "Fin de fichier atteinte alors que toutes les données n'ont pu être lues" #: src/libdar/real_infinint.cpp:83 src/libdar/limitint.hpp:285 msgid "Badly formed \"infinint\" or not supported format" msgstr "\"infinint\" malformé ou format non supporté" #: src/libdar/real_infinint.cpp:247 src/libdar/limitint.hpp:423 msgid "" "Subtracting an \"infinint\" greater than the first, \"infinint\" cannot be " "negative" msgstr "" "Soustraction d'un \"infinint\" par un autre plus grand que le premier. Un " "\"infinint\" ne peut pas être négatif" #: src/libdar/real_infinint.cpp:840 src/libdar/limitint.hpp:464 #: src/libdar/limitint.hpp:473 msgid "Division by zero" msgstr "Division par zéro" #: src/libdar/terminateur.cpp:133 msgid "Badly formatted terminator, cannot extract catalogue location: " msgstr "" "Terminateur malformé, la position du catalogue ne peut être déterminée : " #: src/libdar/header_version.cpp:79 msgid "LAX MODE: Failed to read the archive header's format version." msgstr "" "MODE RELAX : Échec de lecture de la version du format de l'en-tête de " "l'archive" #: src/libdar/header_version.cpp:82 #, c-format msgid "" "LAX MODE: Please provide the archive format: You can use the table at %s to " "find the archive format depending on the release version, (for example if " "this archive has been created using dar release 2.3.4 to 2.3.7 answer \"6\" " "without the quotes here): " msgstr "" "MODE RELAX : Merci d'indiquer le format de l'archive : Vous pouvez utiliser " "la table à l'URL %s pour trouver la version de format en fonction de la " "release de dar/libdar (par exemple si l'archive a été crée en utilisant une " "version de dar de 2.3.4 à 2.3.7 indiquer \"6\" sans les guillemets) : " #: src/libdar/header_version.cpp:87 #, c-format msgid "LAX MODE: \"%S\" is not a valid archive format" msgstr "MODE RELAX : \"%S\" n'est pas un format d'archive valide" #: src/libdar/header_version.cpp:93 #, c-format msgid "LAX MODE: Using archive format \"%d\"?" msgstr "MODE RELAX : Suivre le format d'archive \"%d\" ?" #: src/libdar/header_version.cpp:122 msgid "" "LAX MODE: Unknown compression algorithm used, assuming data corruption " "occurred. Please help me, answering with one of the following words \"none" "\", \"gzip\", \"bzip2\", \"lzo\" or \"xz\" at the next prompt:" msgstr "" "MODE RELAX : Un algorithme de compression inconnu a été utilisé, on suppose " "qu'une corruption des données a eu lieu. Merci de m'aider à déterminer le " "mode de compression qui a été utilisé en saisissant l'un des mots suivants " "\"aucun\", \"gzip\", \"bzip2\", \"lzo\" ou \"xz\" : " #: src/libdar/header_version.cpp:123 src/libdar/header_version.cpp:498 #: src/libdar/crypto.cpp:42 msgid "none" msgstr "aucun" #: src/libdar/header_version.cpp:125 msgid "gzip" msgstr "gzip" #: src/libdar/header_version.cpp:127 msgid "bzip2" msgstr "bzip2" #: src/libdar/header_version.cpp:129 msgid "lzo" msgstr "lzo" #: src/libdar/header_version.cpp:131 msgid "xz" msgstr "xz" #: src/libdar/header_version.cpp:138 src/libdar/header_version.cpp:146 #: src/libdar/header_version.cpp:157 src/libdar/header_version.cpp:175 msgid "Reached End of File while reading archive header_version data structure" msgstr "" "Fin de fichier atteinte pendant la lecture de la structure de données " "header_version" #: src/libdar/header_version.cpp:208 msgid "Corruption met while reading header_version data structure" msgstr "" "Corruption rencontrée lors de la lecture de la structure de donnée " "header_version" #: src/libdar/header_version.cpp:225 msgid "Missing data for encrypted symmetrical key" msgstr "Données absentes pour le chiffrement asymétrique des données" #: src/libdar/header_version.cpp:242 msgid "" "Error met while reading archive of reference slicing layout, ignoring this " "field and continuing" msgstr "" "Erreur lors de la lecture des informations de découpage de l'archive de " "référence. Erreur ignorée, on continue sans cette information" #: src/libdar/header_version.cpp:266 msgid "valid hash algoritm needed for key derivation function" msgstr "" "Un algorithme de hashage valide est requis pour la fonction de dérivation de " "clef" #: src/libdar/header_version.cpp:278 msgid "" "please indicate the hash algoritm to use for key derivation function '1' for " "sha1, '5' for sha512, 'm' for md5, or 'q' to abort: " msgstr "" "Merci d'indiquer l'algorithme de hashage à utiliser pour la fonction de " "dérivation de clef '1' pour sha1, '5' pour sha512, 'm' pour md5 ou 'q' pour " "interrompre: " #: src/libdar/header_version.cpp:296 msgid "please answer with a single character" msgstr "merci de répondre avec un unique caractère" #: src/libdar/header_version.cpp:328 src/libdar/header_version.cpp:330 #: src/libdar/header_version.cpp:352 src/libdar/header_version.cpp:354 msgid "Consistency check failed for archive header" msgstr "Échec de la vérification de cohérence pour l'en-tête de l'archive" #: src/libdar/header_version.cpp:480 msgid "invalid hash algorithm provided for key derivation function" msgstr "Algorithme de hashage invalide pour le fonction de dérivation de clef" #: src/libdar/header_version.cpp:489 src/libdar/header_version.cpp:506 #: src/libdar/op_tools.cpp:41 src/libdar/op_tools.cpp:113 #: src/libdar/op_tools.cpp:193 msgid "yes" msgstr "oui" #: src/libdar/header_version.cpp:489 src/libdar/header_version.cpp:506 msgid "no" msgstr "non" #: src/libdar/header_version.cpp:510 #, c-format msgid "Archive version format : %s" msgstr "Version du format de l'archive : %s" #: src/libdar/header_version.cpp:511 #, c-format msgid "Compression algorithm used : %S" msgstr "Algorithme de compression utilisé : %S" #: src/libdar/header_version.cpp:512 #, c-format msgid "Symmetric key encryption used : %S" msgstr "Clef de chiffrement symmétrique : %S" #: src/libdar/header_version.cpp:513 #, c-format msgid "Asymmetric key encryption used : %S" msgstr "Clef de chiffrement asymmétrique : %S" #: src/libdar/header_version.cpp:514 #, c-format msgid "Archive is signed : %S" msgstr "Signature de l'archive : %S" #: src/libdar/header_version.cpp:515 #, c-format msgid "Sequential reading marks : %s" msgstr "Marques pour lecture séquentielle : %s" #: src/libdar/header_version.cpp:515 msgid "present" msgstr "présent" #: src/libdar/header_version.cpp:515 msgid "absent" msgstr "absent" #: src/libdar/header_version.cpp:516 #, c-format msgid "User comment : %S" msgstr "Commentaires de l'utilisateur : %S" #: src/libdar/header_version.cpp:519 #, c-format msgid "KDF iteration count : %S" msgstr "itérations pour dérivation de clef : %S" #: src/libdar/header_version.cpp:520 #, c-format msgid "KDF hash algorithm : %S" msgstr "Algorithme de dérivation de clef : %S" #: src/libdar/header_version.cpp:522 #, c-format msgid "Salt size : %d byte%c" msgstr "Taille du grain de sel : %d octet%c" #: src/libdar/crypto_asym.cpp:86 src/libdar/crypto_asym.cpp:139 #: src/libdar/crypto_asym.cpp:175 src/libdar/crypto_asym.cpp:233 #: src/libdar/crypto_asym.cpp:292 msgid "Unexpected error reported by GPGME: " msgstr "Erreur inattendue retournée par GPGME : " #: src/libdar/crypto_asym.cpp:137 msgid "Key found but users are not all trusted" msgstr "" "Clef trouvée mais tous les utilisateurs n'ont pas un niveau de confiance " "suffisant" #: src/libdar/crypto_asym.cpp:169 msgid "No data to decrypt" msgstr "Aucune donnée à déchiffrer" #: src/libdar/crypto_asym.cpp:171 msgid "Invalid Cipher text" msgstr "Texte de chiffrement invalide" #: src/libdar/crypto_asym.cpp:173 msgid "Failed retreiving passphrase" msgstr "Echec de récupération de la phrase secrète" #: src/libdar/crypto_asym.cpp:188 msgid "Failed creating GPGME context: " msgstr "Échec de création d'un contexte GPGME : " #: src/libdar/crypto_asym.cpp:192 msgid "Failed setting GPGME context with OpenPGP protocol: " msgstr "Echec de paramétrage d'un contexte GPGME avec le protocole OpenPGP : " #: src/libdar/crypto_asym.cpp:306 #, c-format msgid "No valid signing key could be find for %S" msgstr "Aucune clef de signature valide n'a pu être trouvée pour %S" #: src/libdar/crypto_asym.cpp:308 #, c-format msgid "No valid encryption key could be find for %S" msgstr "Aucune clef de chiffrement valide n'a pu être trouvée pour %S" #: src/libdar/crypto_asym.cpp:319 msgid "No signatory remain with a valid key, signing is impossible, aborting" msgstr "" "Aucun signataire avec une clef valide ne demeure, la signature de l'archive " "est impossible, arrêt de l'opération" #: src/libdar/crypto_asym.cpp:321 msgid "" "No recipient remain with a valid key, encryption is impossible, aborting" msgstr "" "Aucun destinataire avec une clef valide ne demeure, le chiffrement est " "impossible, arrêt de l'opération" #: src/libdar/crypto_asym.cpp:394 #, c-format msgid "Passphrase required for key %s :" msgstr "Phrase secrète requise pour la clef %s : " #: src/libdar/crypto_asym.cpp:415 msgid "Error, invalid passphrase given, try again:" msgstr "Erreur, phrase secrète invalide, nouvel essai : " #: src/libdar/crypto_asym.cpp:423 msgid "Error, while sending the passphrase to GPGME:" msgstr "Erreur lors de l'envoie de la phrase secrète à GPGME : " #: src/libdar/crypto_asym.cpp:425 msgid "Failed sending the totality of the passphrase to GPGME" msgstr "Echec de l'envoi de la totalité de la phrase secrète à GPGME" #: src/libdar/crypto_asym.cpp:432 msgid "Failed sending CR after the passphrase" msgstr "Impossible d'envoyer un retour-chariot après la phrase secrète" #: src/libdar/i_entrepot_libcurl.cpp:69 #, c-format msgid "protocol %S is not supported by libcurl, aborting" msgstr "le protocole %S n'est pas supporté par libcurl, arrêt de l'opération" #: src/libdar/i_entrepot_libcurl.cpp:85 #, c-format msgid "Error met while setting verbosity on handle: %s" msgstr "Erreur lors l'activation de la verbosité d'un objet : %s" #: src/libdar/i_entrepot_libcurl.cpp:125 #, c-format msgid "Error met while preparing directory listing: %s" msgstr "Erreur lors de préparation d'un répertoire pour lecture : %s\"" #: src/libdar/i_entrepot_libcurl.cpp:135 #, c-format msgid "Error met while listing FTP/SFTP directory %s" msgstr "Erreur lors de lecture du répertoire FTP/SFTP %s" #: src/libdar/i_entrepot_libcurl.cpp:312 src/libdar/i_entrepot_libcurl.cpp:317 #, c-format msgid "Error met while setting up connection for file %S removal: %s" msgstr "" "Erreur lors de l'établissement de la connexion pour la suppression du " "fichier %S : %s" #: src/libdar/i_entrepot_libcurl.cpp:325 #, c-format msgid "Error met while removing file %S" msgstr "Erreur lors de la suppression du fichier %S" #: src/libdar/i_entrepot_libcurl.cpp:332 #, c-format msgid "Error met while removing file %S: %s" msgstr "Erreur lors de la suppression du fichier %S : %s" #: src/libdar/i_entrepot_libcurl.cpp:391 #, c-format msgid "Failed assigning URL to libcurl: %s" msgstr "Impossible d'assigner une URL à libcurl : %s" #: src/libdar/i_entrepot_libcurl.cpp:421 #, c-format msgid "Error met while setting known_hosts file: %s" msgstr "Erreur lors de la désignation du fichier known_file : %s" #: src/libdar/i_entrepot_libcurl.cpp:432 #, c-format msgid "Error met while assigning public key file: %s" msgstr "Erreur lors de la désignation du fichier de clef publique : %s" #: src/libdar/i_entrepot_libcurl.cpp:440 #, c-format msgid "Error met while assigning private key file: %s" msgstr "Erreur lors de la désignation du fichier de clef privée : %s" #: src/libdar/i_entrepot_libcurl.cpp:448 #, c-format msgid "Error met while assigning sftp authentication methods: %s" msgstr "" "Erreur lors de l'assignation de la méthode d'authentification sftp : %s" #: src/libdar/i_entrepot_libcurl.cpp:465 #, c-format msgid "Error met while passing username to libcurl: %s" msgstr "Erreur lors du passage du nom d'utilisateur à libcurl : %s" #: src/libdar/i_entrepot_libcurl.cpp:471 #, c-format msgid "" "Error met while asking libcurl to consider ~/.netrc for authentication: %s" msgstr "" "Erreur lors de la demande à libcurl de considérer ~/.netrc pour " "l'authentification : %s" #: src/libdar/i_entrepot_libcurl.cpp:478 #, c-format msgid "Please provide the password for login %S at host %S: " msgstr "Merci de fournir le mot de passe pour le login %S sur l'hôte %S : " #: src/libdar/i_entrepot_libcurl.cpp:493 #, c-format msgid "Error met while setting libcurl authentication: %s" msgstr "" "Erreur lors de la configuration de l'authentification pour libcurl : %s" #: src/libdar/cat_mirage.cpp:141 msgid "Incoherent catalogue structure: hard linked inode's data not found" msgstr "" "structure incohérente du catalogue : données non trouvée pour une inode " "ayant un lien dur" #: src/libdar/cat_mirage.cpp:178 msgid "Incoherent catalogue structure: hard linked data is not an inode" msgstr "" "structure incohérente du catalogue : les données liées en dur ne sont pas " "celles d'une inode" #: src/libdar/cat_mirage.cpp:215 msgid "Incoherent catalogue structure: duplicated hard linked inode's data" msgstr "" "Incohérence dans le catalogue : duplication des données associées à une " "inode liée en dur" #: src/libdar/cat_mirage.cpp:229 msgid "" "Incoherent catalogue structure: unknown status flag for hard linked inode" msgstr "" "Incohérence dans le catalogue : drapeau d'état inconnu pour une inode liée " "en dur" #: src/libdar/datetime.cpp:405 msgid "Unknown time unit" msgstr "Unité de temps inconnue" #: src/libdar/slice_layout.cpp:52 msgid "Missing data while reading slice_layout object" msgstr "Défaut de données lors de la lecture d'un object slice_layout" #: src/libdar/mask.hpp:118 msgid "TRUE" msgstr "VRAI" #: src/libdar/mask.hpp:118 msgid "FALSE" msgstr "FAUX" #: src/libdar/mask.hpp:309 msgid "AND" msgstr "ET" #: src/libdar/mask.hpp:342 msgid "No mask in the list of mask to operate on" msgstr "Aucun masque sur lequel opérer dans la liste de masque" #: src/libdar/mask.hpp:372 msgid "OR" msgstr "OU" #: src/libdar/mask.hpp:382 msgid "No mask to operate on in the list of mask" msgstr "Aucun masque sur lequel opérer dans la liste de masque" #: src/libdar/archive_version.cpp:47 msgid "Archive version too high, use a more recent version of libdar" msgstr "" "Version d'archive trop élevé, utiliser une version plus récente de libdar" #: src/libdar/archive_version.cpp:77 msgid "Reached End of File while reading archive version" msgstr "Fin de fichier atteinte pendant la lecture de la version d'archive" #: src/libdar/archive_version.cpp:90 src/libdar/archive_version.cpp:99 msgid "Unexpected value while reading archive version" msgstr "Valeur inattendue pendant la lecture de la version d'archive" #: src/libdar/archive_version.cpp:97 msgid "Reached premature end of file while reading archive version" msgstr "Fin de fichier atteinte lors de la lecture de la version d'archive" #: src/libdar/cat_device.cpp:95 src/libdar/cat_device.cpp:98 msgid "missing data to build a special device" msgstr "il manque des données pour construire un fichier spécial" #: src/libdar/cat_device.cpp:146 #, c-format msgid "devices have not the same major number: %d <--> %d" msgstr "les fichiers spéciaux n'ont pas le même numéro majeur : %d <--> %d" #: src/libdar/cat_device.cpp:148 #, c-format msgid "devices have not the same minor number: %d <--> %d" msgstr "les fichiers spéciaux n'ont pas le même numéro mineur : %d <--> %d" #: src/libdar/tuyau.cpp:143 msgid "Error while creating anonymous pipe: " msgstr "Erreur lors de la création d'un tube anonyme : " #: src/libdar/tuyau.cpp:171 msgid "Pipe's other end is not known, cannot provide a filedescriptor on it" msgstr "" "L'autre extrémité du tube est inconnue, impossible d'en fournir un " "descripteur de fichier" #: src/libdar/tuyau.cpp:185 msgid "" "Pipe's other end is not known, cannot close any filedescriptor pointing on it" msgstr "" "L'autre extrémité du tube est inconnue, impossible de fermer un quelconque " "descripteur de fichier pointant dessus" #: src/libdar/tuyau.cpp:312 msgid "Error while reading from pipe: " msgstr "Erreur lors de la lecture depuis un tube : " #: src/libdar/tuyau.cpp:366 src/libdar/tuyau.cpp:371 msgid "Error while writing data to pipe: " msgstr "Erreur lors de l'écriture sur un tube : " #: src/libdar/tuyau.cpp:422 msgid "Error opening pipe: " msgstr "Erreur lors de l'ouverture du tube : " #: src/libdar/tuyau.cpp:511 msgid "File mode is neither read nor write" msgstr "Le mode d'ouverture du fichier n'est ni en lecture ni en écriture" #: src/libdar/cat_inode.cpp:163 msgid "badly structured inode: unknown inode flag" msgstr "inode mal-formée : drapeau d'inode inconnu" #: src/libdar/cat_inode.cpp:174 src/libdar/cat_inode.cpp:177 #: src/libdar/cat_inode.cpp:187 msgid "missing data to build an inode" msgstr "données absentes pour construire une inode" #: src/libdar/cat_inode.cpp:278 msgid "badly structured inode: unknown inode flag for FSA" msgstr "inode mal-formée : drapeau de FSA inconnu" #: src/libdar/cat_inode.cpp:392 msgid "different file type" msgstr "types de fichier différents" #: src/libdar/cat_inode.cpp:397 #, c-format msgid "different owner (uid): %i <--> %i" msgstr "groupes propriétaires différents (uid) : %i <--> %i" #: src/libdar/cat_inode.cpp:403 #, c-format msgid "different owner group (gid): %i <--> %i" msgstr "groupes propriétaires différents (gid) : %i <--> %i" #: src/libdar/cat_inode.cpp:409 #, c-format msgid "different permission: %S <--> %S" msgstr "droits d'accès différents: %S <--> %S" #: src/libdar/cat_inode.cpp:417 src/libdar/cat_file.cpp:1213 #, c-format msgid "difference of last modification date: %S <--> %S" msgstr "différence de date de dernière modification : %S <--> %S" #: src/libdar/cat_inode.cpp:432 msgid "different Extended Attributes" msgstr "Attributs Étendus différents" #: src/libdar/cat_inode.cpp:438 msgid "no Extended Attribute to compare with" msgstr "aucun Attribut Étendu avec lequel comparer" #: src/libdar/cat_inode.cpp:440 src/libdar/cat_inode.cpp:461 msgid "" "Cannot compare EA: EA support has not been activated at compilation time" msgstr "" "Comparaison d'AE impossible : le support pour les Attributs Étendus n'a pas " "été activé lors de la compilation" #: src/libdar/cat_inode.cpp:454 msgid "inode last change date (ctime) greater, EA might be different" msgstr "" "date de dernier changement de l'inode (ctime) plus récent, les Attributs " "Étendus peuvent être différents" #: src/libdar/cat_inode.cpp:459 msgid "no Extended Attributes to compare with" msgstr "aucun Attribut Étendu avec lequel comparer" #: src/libdar/cat_inode.cpp:488 msgid "different Filesystem Specific Attributes" msgstr "attributs spécifiques au système de fichier différents" #: src/libdar/cat_inode.cpp:494 msgid "No Filesystem Specific Attribute to compare with" msgstr "Aucun attribut spécifique au système de fichier avec lequel comparer" #: src/libdar/cat_inode.cpp:502 msgid "inode last change date (ctime) greater, FSA might be different" msgstr "" "date de dernier changement de l'inode (ctime) plus récent, les FSA peuvent " "être différents" #: src/libdar/cat_inode.cpp:505 msgid "Filesystem Specific Attribute are missing" msgstr "Les attributs spécifiques au système de fichier (FSA) font défaut" #: src/libdar/cat_inode.cpp:792 msgid "CRC error detected while reading EA" msgstr "Erreur de CRC détectée lors de la lecture des AE" #: src/libdar/cat_inode.cpp:936 msgid "" "Error while reading CRC for EA from the archive: No escape mark found for " "that file" msgstr "" "Erreur lors de la lecture du CRC pour des AE à partir de l'archive : Aucune " "séquence d'échappement trouvé pour ce fichier" #: src/libdar/cat_inode.cpp:1163 msgid "CRC error detected while reading FSA" msgstr "Erreur de CRC détectée lors de la lecture des FSA" #: src/libdar/cat_inode.cpp:1287 msgid "" "Error while reading CRC for FSA from the archive: No escape mark found for " "that file" msgstr "" "Erreur lors de la lecture du CRC pour des FSA à partir de l'archive : Aucune " "séquence d'échappement trouvé pour ce fichier" #: src/libdar/header.cpp:103 src/libdar/header.cpp:111 #: src/libdar/header.cpp:114 src/libdar/header.cpp:116 msgid "Reached end of file while reading slice header" msgstr "" "Fin de fichier atteinte lors de la lecture d'un en-tête de tranche d'archive" #: src/libdar/header.cpp:130 src/libdar/header.cpp:152 msgid "" "LAX MODE: slice size is not possible to read, (lack of virtual memory?), " "continuing anyway..." msgstr "" "MODE RELAX : Impossible de lire la taille des tranches, (manque de mémoire " "virtuelle ?), on continue malgré tout ..." #: src/libdar/header.cpp:167 msgid "" "LAX MODE: first slice size is not possible to read, (lack of virtual " "memory?), continuing anyway..." msgstr "" "MODE RELAX : la taille de la première tranche est impossible à lire (manque " "de mémoire virtuelle ?), on continue malgré tout ..." #: src/libdar/header.cpp:180 msgid "" "Archive format older than \"08\" (release 2.4.0) cannot be read through a " "single pipe. It only can be read using dar_slave or normal plain file (slice)" msgstr "" "Un format d'archive plus ancien que \"08\" (release 2.4.0) ne peut être lu à " "partir d'un seul tube, seule la lecture avec dar_slave ou à partir de " "fichiers (c'est à dire de tranches) est possible" #: src/libdar/header.cpp:182 msgid "" "LAX MODE: first slice size is not possible to read, continuing anyway..." msgstr "" "MODE RELAX : la taille de la première tranche est impossible à lire, on " "continue malgré tout ... " #: src/libdar/header.cpp:197 msgid "Badly formatted SAR header (unknown TLV type in slice header)" msgstr "" "En-tête SAR mal-formé (type de TLV inconnu dans l'en-tête d'une tranche)" #: src/libdar/header.cpp:200 msgid "LAX MODE: Unknown data in slice header, ignoring and continuing" msgstr "" "MODE RELAX : Data inconnue dans un en-tête de tranche, on ignore et on " "continue" #: src/libdar/header.cpp:379 msgid "incomplete data set name found in a slice header" msgstr "Nom incomplet de jeu de données rencontré dans un en-tête de tranche" #: src/libdar/header.cpp:383 #, c-format msgid "" "Unknown entry found in slice header (type = %d), option not supported. The " "archive you are reading may have been generated by a more recent version of " "libdar, ignore this entry and continue anyway?" msgstr "" "Entrée inconnue trouvé dans l'en-tête de tranche (type = %d), option non " "supportée. L'archive lue peut avoir été crée par une version plus récente de " "libdar, est-ce qu'on ignore cette entrée et on continue ?" #: src/libdar/cat_file.cpp:470 msgid "cannot provide data from a \"not saved\" file object" msgstr "impossible de fournir des données pour un fichier non sauvegardé" #: src/libdar/cat_file.cpp:481 msgid "data has been cleaned, object is now empty" msgstr "données purgées, l'objet est maintenant vide" #: src/libdar/cat_file.cpp:877 msgid "can't read data CRC: No escape mark found for that file" msgstr "" "Ne peut lire le CRC sur les données : aucune séquence d'échappement trouvée " "pour ce fichier" #: src/libdar/cat_file.cpp:1080 msgid "can't find mark for delta signature" msgstr "Marque pour signature delta introuvable" #: src/libdar/cat_file.cpp:1097 msgid "Error while retrieving delta signature from the archive: " msgstr "Erreur lors de la récupération de signature delta depuis l'archive : " #: src/libdar/cat_file.cpp:1206 #, c-format msgid "not same size: %i <--> %i" msgstr "tailles différentes : %i <--> %i" #: src/libdar/cat_file.cpp:1274 #, c-format msgid "different file data, offset of first difference is: %i" msgstr "" "les donnes ne correspondent pas: position de la première différence au sein " "du fichier: %i" #: src/libdar/cat_file.cpp:1282 msgid "" "Same data but CRC value could not be verified because we did not guessed " "properly its width (sequential read restriction)" msgstr "" "Données identiques mais CRC non vérifié, car sa largeur n'a pas été devinée " "correctement (restriction due à la lecture sequentielle)" #: src/libdar/cat_file.cpp:1284 msgid "Same data but stored CRC does not match the data!?!" msgstr "" "Même donnée, mais le CRC enregistré ne correspond pas à ces données !?!" #: src/libdar/cat_file.cpp:1325 msgid "Delta signature do not match" msgstr "Les signatures delta ne correspondent pas" #: src/libdar/cat_file.cpp:1367 #, c-format msgid "Delta signature do not have the same size: %i <--> %i" msgstr "tailles différentes des signatures delta : %i <--> %i" # #: src/libdar/cat_file.cpp:1369 msgid "Delta signature have the same size but do not match" msgstr "" "Les signatures delta ont bien la même taille mais leurs contenus ne " "correspondent pas" #: src/libdar/cat_file.cpp:1412 msgid "CRC difference concerning file's data" msgstr "Difference de CRC pour les données du fichier" #: src/libdar/semaphore.cpp:127 msgid "Error while converting UID/GID to string for backup hook file: " msgstr "" "Erreur lors de la conversion UID/GID en chaîne pour le crochet d'exécution " "lors de la sauvegarde d'un fichier" #: src/libdar/filesystem_backup.cpp:215 src/libdar/filesystem_diff.cpp:193 msgid "Non existent file: " msgstr "Fichier inexistant : " #: src/libdar/filesystem_backup.cpp:217 src/libdar/filesystem_diff.cpp:195 msgid "File must be a directory: " msgstr "Le fichier doit être un répertoire : " #: src/libdar/filesystem_backup.cpp:302 #, c-format msgid "Cannot read directory contents: %s : " msgstr "Impossible de lire le contenu du répertoire : %s : " #: src/libdar/filesystem_backup.cpp:339 msgid "Ignoring file with NODUMP flag set: " msgstr "Fichier sans le drapeau NODUMP ignoré : " #: src/libdar/filesystem_backup.cpp:347 src/libdar/filesystem_backup.cpp:352 msgid "Error reading directory contents: " msgstr "Erreur lors de la lecture du contenu du répertoire : " #: src/libdar/filesystem_backup.cpp:347 src/libdar/filesystem_backup.cpp:352 msgid " . Ignoring file or directory" msgstr " . Fichier ou répertoire ignoré" #: src/libdar/data_dir.cpp:74 msgid "Unexpected end of file" msgstr "Fin de fichier inattendue" #: src/libdar/data_dir.cpp:327 msgid "" "This database has been corrupted probably due to a bug in release 2.4.0 to " "2.4.9, and it has not been possible to cleanup this corruption, please " "rebuild the database from archives or extracted \"catalogues\", if the " "database has never been used by one of the previously mentioned released, " "you are welcome to open a bug report and provide as much as possible details " "about the circumstances" msgstr "" "Cette base de donnée a été corrompue probablement à cause d'un bogue présent " "dans les versions 2.4.0 à 2.4.9, et il n'a pas été possible de corriger " "cette corruption. Merci de reconstruire cette base à partir des archives (ou " "catalogues extraits). Si cette base n'a jamais été utilisée avec l'une " "quelconque des versions mentionnées précédemment, merci d'ouvrir un rapport " "de bogue en fournissant le plus de détails concernant les circonstances de " "ce problème." #: src/libdar/data_dir.cpp:614 msgid "Unknown record type" msgstr "Type d'enregistrement inconnu" #: src/libdar/archive5.cpp:57 msgid "listing() method must be given" msgstr "la méthode listing() doit être fournie" #: src/libdar/label.cpp:123 msgid "Incomplete label" msgstr "label incomplet" #: src/libdar/crypto_sym.cpp:77 msgid "" "Current implementation of blowfish encryption is not compatible with old " "(weak) implementation, use dar-2.3.x software or later (or other software " "based on libdar-4.4.x or greater) to read this archive" msgstr "" "L'implémentation actuelle du chiffrement blowfish n'est pas compatible avec " "l'ancienne implémentation (boguée), utiliser dar-2.3.x (ou autre logiciel " "basé sur libdar-4.4.x) pour lire cette archive" #: src/libdar/crypto_sym.cpp:80 msgid "cannot use 'none' as hashing algorithm for key derivation function" msgstr "" "'none' n'est pas une algorithme de hashage valide pour la fonction de " "dérivation de clef" #: src/libdar/crypto_sym.cpp:91 src/libdar/crypto_sym.cpp:164 #, c-format msgid "Cyphering algorithm not available in libgcrypt: %s/%s" msgstr "Algorithme de chiffrement non disponible dans libgcrypt : %s/%s" #: src/libdar/crypto_sym.cpp:97 src/libdar/crypto_sym.cpp:631 #, c-format msgid "" "Failed retrieving from libgcrypt the block size used by the cyphering " "algorithm: %s/%s" msgstr "" "Échec lors de la récupération de la taille de bloc utilisée par libgcrypt " "pour l'algorithme de chiffrement : %s/%s" #: src/libdar/crypto_sym.cpp:114 msgid "Too large value give for key derivation interation count" msgstr "Valeur trop grande pour l'intération de dérivation de clef" #: src/libdar/crypto_sym.cpp:125 #, c-format msgid "Error while opening libgcrypt key handle: %s/%s" msgstr "" "Erreur lors de l'ouverture d'une référence de clef pour libgcrypt: %s/%s" #: src/libdar/crypto_sym.cpp:133 #, c-format msgid "Error while assigning key to libgcrypt key handle: %s/%s" msgstr "" "Erreur lors de l'assignation d'une clef à une référence de libgcrypt : %s/%s" #: src/libdar/crypto_sym.cpp:150 msgid "Missing strong encryption support (libgcrypt)" msgstr "Absence de support pour le chiffrement fort (libgcrypt)" #: src/libdar/crypto_sym.cpp:169 msgid "Failed retrieving from libgcrypt the maximum key length" msgstr "Échec de la récupération de la longueur maximum possible des clefs" #: src/libdar/crypto_sym.cpp:202 #, c-format msgid "" "Error while opening libgcrypt key handle to check password strength: %s/%s" msgstr "" "Erreur lors de la création d'un manipulateur de clef lors de la vérification " "de la force du mot de passe : %s/%s" #: src/libdar/crypto_sym.cpp:214 #, c-format msgid "" "Error while assigning key to libgcrypt key handle to check password " "strength: %s/%s" msgstr "" "Erreur lors de l'assignation d'une clef à un manipulateur de clef lors de la " "vérification de la force du mot de passe : %s/%s" #: src/libdar/crypto_sym.cpp:315 #, c-format msgid "Error while resetting encryption key for a new block: %s/%s" msgstr "" "Erreur lors de la réinitialisation de la clef de chiffrement pour un nouveau " "bloc : %s/%s" #: src/libdar/crypto_sym.cpp:319 src/libdar/crypto_sym.cpp:343 #, c-format msgid "Error while setting IV for current block: %s/%s" msgstr "" "Erreur lors de la détermination du vecteur initial (IV) pour le bloc " "courant : %s/%s" #: src/libdar/crypto_sym.cpp:322 #, c-format msgid "Error while cyphering data: %s/%s" msgstr "Erreur lors du chiffrement des données : %s/%s" #: src/libdar/crypto_sym.cpp:328 src/libdar/crypto_sym.cpp:352 msgid "blowfish strong encryption support" msgstr "Support pour le chiffrement avec l'algorithme blowfish" #: src/libdar/crypto_sym.cpp:346 #, c-format msgid "Error while decyphering data: %s/%s" msgstr "Erreur lors du déchiffrement des données : %s/%s" #: src/libdar/crypto_sym.cpp:387 #, c-format msgid "Error while generating IV: %s/%s" msgstr "Erreur lors de la génération du vecteur initial (IV) : %s/%s" #: src/libdar/crypto_sym.cpp:429 #, c-format msgid "Error! SHA1 not available in libgcrypt: %s/%s" msgstr "Erreur ! SHA1 n'est pas disponible dans libgcrypt : %s/%s" #: src/libdar/crypto_sym.cpp:435 #, c-format msgid "Error while derivating key from password (HMAC open): %s/%s" msgstr "" "Erreur lors de la création de la clef à partir du mot de passe (ouverture " "HMAC) : %s/%s" #: src/libdar/crypto_sym.cpp:441 #, c-format msgid "Error while derivating key from password (HMAC set key): %s/%s" msgstr "" "Erreur lors du calcul de la clef à partir du mot de passe (HMAC set key) : " "%s/%s" #: src/libdar/crypto_sym.cpp:583 #, c-format msgid "Error while creating ESSIV handle: %s/%s" msgstr "Erreur lors de la création d'une référence ESSIV : %s/%s" #: src/libdar/crypto_sym.cpp:591 #, c-format msgid "" "Failed retrieving from libgcrypt the key length to use (essiv key): %s/%s" msgstr "Échec de la récupération de la longueur de clef à utiliser : %s/%s" #: src/libdar/crypto_sym.cpp:612 #, c-format msgid "Error while assigning key to libgcrypt key handle (essiv): %s/%s" msgstr "" "Erreur lors de l'assignation d'une clef à une référence de clef de libgcrypt " "(essiv) : %s/%s" #: src/libdar/crypto_sym.cpp:621 #, c-format msgid "" "Failed retrieving from libgcrypt the block size used by the cyphering " "algorithm (essiv): %s/%s" msgstr "" "Impossible de connaître la taille de bloc utilisée par l'algorithme de " "chiffrement : %s/%s" #: src/libdar/crypto_sym.cpp:676 src/libdar/crypto_sym.cpp:682 #: src/libdar/crypto_sym.cpp:691 src/libdar/crypto_sym.cpp:699 #: src/libdar/crypto_sym.cpp:732 msgid "Library used for blowfish encryption does not respect RFC 3962" msgstr "" "La bibliothèque de chiffrement pour l'algorithme blowfish ne respecte pas la " "RFC 3962" #: src/libdar/trivial_sar.cpp:191 #, c-format msgid "%S already exists, and overwritten is forbidden, aborting" msgstr "%S existe déjà et l'écrasement est interdit, j'abandonne" #: src/libdar/trivial_sar.cpp:193 #, c-format msgid "%S is about to be overwritten, continue ?" msgstr "%S est sur le point d'être écrasé, on continue ?" #: src/libdar/trivial_sar.cpp:219 src/libdar/trivial_sar.cpp:237 #, c-format msgid "" "Directory component in %S does not exist or is a dangling symbolic link: " msgstr "" "Le répertoire %S n'existe pas ou est un lien symbolic pointant nulle part" #: src/libdar/trivial_sar.cpp:510 msgid "This archive has slices and is not possible to read from a pipe" msgstr "" "Cette archive est constituée de plusieurs tranches et ne peut pas être lue " "depuis un tube" #: src/libdar/trivial_sar.cpp:547 msgid "" "This archive is not single sliced, more data exists in the next slices but " "cannot be read from the current pipe, aborting" msgstr "" "Cette archive est constituée de plusieurs tranches et ne peut pas être lue " "depuis un tube" #: src/libdar/trivial_sar.cpp:585 msgid "Cannot skip to a valid position in file" msgstr "Impossible de se positionner à un endroit valide dans le fichier" #: src/libdar/compressor.cpp:171 src/libdar/compressor.cpp:199 msgid "" "incompatible compression library version or unsupported feature required " "from compression library" msgstr "" "version incompatible de la bibliothèque de compression ou fonctionnalité " "requises incompatible aveec cette bibliothèque" #: src/libdar/compressor.cpp:344 msgid "compressed data is corrupted" msgstr "données compressées corrompues" #: src/libdar/compressor.cpp:484 msgid "compressed data CRC error" msgstr "erreur de CRC sur les données compressées" #: src/libdar/compressor.cpp:571 src/libdar/compressor.cpp:601 #: src/libdar/compressor.cpp:753 src/libdar/compressor.cpp:818 msgid "lzo compression" msgstr "compression lzo" #: src/libdar/compressor.cpp:739 #, c-format msgid "Probable bug in liblzo2: lzo1x_*_compress returned unexpected code %d" msgstr "" "Bogue probable dans liblzo2 : lzo1x_*_compress a retourné un code non " "prévu : %d" #: src/libdar/compressor.cpp:774 msgid "data corruption detected: Incoherence in LZO compressed data" msgstr "" "Corruption de données détectée : incohérence dans les données compressées en " "LZO" #: src/libdar/compressor.cpp:778 src/libdar/compressor.cpp:800 #: src/libdar/compressor.cpp:814 msgid "compressed data corruption detected" msgstr "données compressées corrompues" #: src/libdar/compressor.cpp:788 msgid "data corruption detected: Too large block of compressed data" msgstr "" "Corruption de données détectée : Bloc de données compressées trop grand" #: src/libdar/compressor.cpp:790 msgid "" "Too large block of compressed data: Either due to data corruption or current " "system limitation where SSIZE_MAX value implied smaller buffers than required" msgstr "" "Bloc de données compressées trop grand : Soit à cause d'une corruption de " "données soit à cause d'une limitation système où la valeur de SSIZE_MAX " "implique un tampon mémoire plus petit que celui requis" #: src/libdar/tronc.cpp:256 src/libdar/tronc.cpp:298 msgid "Cannot skip to the current position in \"tronc\"" msgstr "Impossible de se déplacer à la position courante dans un \"tronc\"" #: src/libdar/tronc.cpp:310 msgid "Tried to write out of size limited file" msgstr "Tentative d'écriture hors d'un fichier de taille limitée" #: src/libdar/deci.cpp:46 msgid "invalid decimal digit" msgstr "chiffre décimal non valide" #: src/libdar/deci.cpp:132 msgid "an empty string is an invalid argument" msgstr "une chaîne vide n'est pas un argument valide" #: src/libdar/cat_etoile.cpp:44 msgid "Hard links of directories are not supported" msgstr "Les liens durs pour les répertoires ne sont pas supportés" #: src/libdar/secu_string.cpp:102 src/libdar/secu_string.cpp:141 msgid "Error while reading data for a secure memory:" msgstr "" "Erreur lors de la lecture de donnée destinée à une mémoire sécurisée : " #: src/libdar/secu_string.cpp:119 msgid "appending data over secure_memory its end" msgstr "ajout de données après la fin d'un secure_memory" #: src/libdar/secu_string.cpp:133 msgid "appending data after the end of a secure_memory" msgstr "ajout de données après la fin d'un secure_memory" #: src/libdar/secu_string.cpp:136 msgid "Cannot receive that much data in regard to the allocated memory" msgstr "" "Ne peut recevoir autant de donnée en regard de la place mémoire allouée" #: src/libdar/secu_string.cpp:154 msgid "Cannot reduce the string to a size that is larger than its current size" msgstr "" "Impossible de réduire la taille du chaîne à une valeur qui est plus grande " "que sa taille actuelle" #: src/libdar/secu_string.cpp:164 msgid "secu_string randomization requested exceeds storage capacity" msgstr "" "La requète de génération d'une chaîne sécurisée aléatoire dépasse sa " "capacité de stockage" #: src/libdar/secu_string.cpp:177 msgid "Out of range index requested for a secu_string" msgstr "Index hors limite demandé pour un secu_string" #: src/libdar/generic_file.cpp:145 msgid "Reading ahead a write only generic_file" msgstr "Lecture par anticipation sur un \"generic_file\" en écriture seule" #: src/libdar/generic_file.cpp:160 msgid "Reading a write only generic_file" msgstr "Lecture sur un \"generic_file\" ouvert en écriture seule" #: src/libdar/generic_file.cpp:170 msgid "Writing to a read only generic_file" msgstr "\"Écriture sur un \"generic_file\" ouvert en lecture seule" #: src/libdar/generic_file.cpp:340 msgid "Cannot compare files in write only mode" msgstr "Impossible de comparer des fichiers ouverts en écriture seule" #: src/libdar/generic_file.cpp:427 msgid "Cannot sync write on a read-only generic_file" msgstr "" "Ne peut pas synchroniser l'écriture sur un generic_file en lecture seule" #: src/libdar/generic_file.cpp:438 msgid "Cannot flush read a write-only generic_file" msgstr "Ne peut pas purger la lecture d'un generic_file en écriture seule" #: src/libdar/tools.cpp:220 msgid "Not a zero terminated string in file" msgstr "" "La chaîne lue depuis un fichier n'est pas terminée par le caractère zéro" #: src/libdar/tools.cpp:258 #, c-format msgid "Cannot get file size: %s" msgstr "Imposible de connaître la taille du fichier : %s" #: src/libdar/tools.cpp:344 msgid "Cannot read \"fcntl\" file's flags : " msgstr "Impossible de lire les drapeaux \"fcntl\" du fichier : " #: src/libdar/tools.cpp:350 msgid "Cannot set \"fcntl\" file's flags : " msgstr "Impossible de modifier les drapeaux \"fcntl\" du fichier : " #: src/libdar/tools.cpp:490 src/libdar/tools.cpp:495 #: src/dar_suite/line_tools.cpp:1139 src/dar_suite/line_tools.cpp:1144 msgid "Invalid number: " msgstr "Numéro invalide : " #: src/libdar/tools.cpp:529 src/libdar/tools.cpp:620 msgid "Error while calling fork() to launch dar: " msgstr "Erreur lors de l'appel à fork() pour lancer dar : " #: src/libdar/tools.cpp:543 src/libdar/tools.cpp:650 msgid "Unexpected error while waiting for dar to terminate: " msgstr "" "Erreur inattendue reçue lors de l'attente de la fin d'execution de dar : " #: src/libdar/tools.cpp:549 src/libdar/tools.cpp:656 msgid "DAR terminated upon signal reception: " msgstr "Arrêt de DAR suite à la reception du signal : " #: src/libdar/tools.cpp:555 src/libdar/tools.cpp:662 msgid " . Retry to launch dar as previously ?" msgstr " . Doit-on relancer dar comme précédemment ?" #: src/libdar/tools.cpp:560 src/libdar/tools.cpp:567 src/libdar/tools.cpp:667 #: src/libdar/tools.cpp:674 msgid " Continue anyway ?" msgstr " Doit-on continuer malgré tout ?" #: src/libdar/tools.cpp:565 src/libdar/tools.cpp:672 msgid "DAR sub-process has terminated with exit code " msgstr "Le sous-processus DAR a terminé avec le code de sortie " #: src/libdar/tools.cpp:770 msgid "nullptr argument given to tools_readlink()" msgstr "Valeur nullptr donnée à tools_readlink()" #: src/libdar/tools.cpp:772 msgid "Empty string given as argument to tools_readlink()" msgstr "Valeur NULL donnée à en argument de tools_readlink()" #: src/libdar/tools.cpp:799 #, c-format msgid "Cannot read file information for %s : %s" msgstr "Impossible d'obtenir d'information sur %s : %s" #: src/libdar/tools.cpp:886 msgid "Cannot set birth time: " msgstr "Impossible de définir la date de naissance: " #: src/libdar/tools.cpp:916 msgid "Cannot set last access and last modification time: " msgstr "" "Impossible de positionner les dates de dernier accès et de dernière " "modification : " #: src/libdar/tools.cpp:994 msgid "Aborting program: child process died unexpectedly" msgstr "Arrêt du programme : le processus fils est mort de façon inattendue" #: src/libdar/tools.cpp:1002 #, c-format msgid "Error trying to run %s: %s" msgstr "Erreur lors de l'execution de %s : %s" #: src/libdar/tools.cpp:1005 msgid "execvp() failed but did not returned error code" msgstr "execvp() a échoué mais n'a pas retourné de code d'erreur" #: src/libdar/tools.cpp:1136 #, c-format msgid "Removing file %s" msgstr "Suppression du fichier %s" #: src/libdar/tools.cpp:1157 #, c-format msgid "Error removing file %s: %S" msgstr "Erreur lors de la suppression du fichier %s : %S" #: src/libdar/tools.cpp:1191 #, c-format msgid "" "Overwriting not allowed while a slice of a previous archive with the same " "basename has been found in the %s directory, Operation aborted" msgstr "" "Écrasement interdit alors qu'une tranche de même nom de base a été trouvée " "dans le répertoire %s. Opération avortée" #: src/libdar/tools.cpp:1197 #, c-format msgid "" "At least one slice of an old archive with the same name remains in the " "directory %s. It is advised to remove all the old archive's slices before " "creating an archive of same name. Can I remove these old slices?" msgstr "" "Au moins une tranche d'une ancienne archive ayant le même nom reste présente " "dans le répertoire %s. Il est conseillé de supprimer toutes les anciennes " "tranches avant de créer une archive de même nom. Peut-on supprimer les " "anciennes tranches ?" #: src/libdar/tools.cpp:1217 src/libdar/tools.cpp:1224 #, c-format msgid "Cannot get inode information for %s: %s" msgstr "Impossible d'obtenir des informations sur l'inode %s : %s" #: src/libdar/tools.cpp:1234 msgid "Current Working Directory cannot be a relative path" msgstr "Le répetoire courant ne peut pas être un chemin relatif" #: src/libdar/tools.cpp:1251 msgid "Cannot block signals: " msgstr "Impossible de bloquer les signaux : " #: src/libdar/tools.cpp:1261 msgid "Cannot unblock signals: " msgstr "Impossible de débloquer les signaux : " #: src/libdar/tools.cpp:1292 src/libdar/tools.cpp:1325 #, c-format msgid "Cannot get last modification date: %s" msgstr "Impossible d'obtenir la date de dernière modification : %s" #: src/libdar/tools.cpp:1329 #, c-format msgid "Cannot get size of %S: not a plain file" msgstr "Impossible de connaître la taill de %S: ce n'est pas un fichier plein" #: src/libdar/tools.cpp:1380 #, c-format msgid "Unknown suffix [%c] in string %S" msgstr "Suffixe inconnu [%c] dans la chaîne %S" #: src/libdar/tools.cpp:1407 msgid "Unknown substitution string: %" msgstr "Chaîne de substitution inconnue : %" #: src/libdar/tools.cpp:1414 msgid "" "last char of user command-line to execute is '%', (use '%%' instead to avoid " "this message)" msgstr "" "Le dernier caractère de la ligne de commande utilisateur à exécuter est '%', " "(utiliser '%%' à la place pour éviter ce message)" #: src/libdar/tools.cpp:1470 msgid "execve() failed. (process table is full ?)" msgstr "execve() a échoué. (table de processus pleine ?)" #: src/libdar/tools.cpp:1472 msgid "system() call failed: " msgstr "system() a échoué : " #: src/libdar/tools.cpp:1474 #, c-format msgid "execution of [ %S ] returned error code: %d" msgstr "l'exécution de [ %S ] a retourné un code d'erreur : %d" #: src/libdar/tools.cpp:1481 msgid "Error during user command line execution: " msgstr "Erreur pendant l'exécution de la ligne de commande utilisateur : " #: src/libdar/tools.cpp:1481 msgid " . Retry command-line ?" msgstr " . Essayer à nouveau la ligne de commande ?" #: src/libdar/tools.cpp:1486 msgid "Ignore previous error on user command line and continue ?" msgstr "" "Ignorer l'erreur précédente concernant la ligne de commande utilisateur et " "continuer ?" #: src/libdar/tools.cpp:1528 msgid "Fatal error on user command line: " msgstr "Erreur fatale sur la ligne de commande utilisateur : " #: src/libdar/tools.cpp:1610 src/libdar/tools.cpp:1616 msgid "Badly formated octal number" msgstr "Nombre octal malformée" #: src/libdar/tools.cpp:1721 msgid "Cannot get effective permission given a file descriptor: " msgstr "" "Impossible d'obtenir les droits d'accès à partir d'un description de " "fichier : " #: src/libdar/tools.cpp:1737 #, c-format msgid "Error while setting file permission: %s" msgstr "Erreur lors de la configuration des droits d'accès : %s" #: src/libdar/tools.cpp:1758 msgid "An empty string is not a valid user name" msgstr "Une chaîne vide n'est pas un nom d'utilisateur valide" #: src/libdar/tools.cpp:1796 src/libdar/tools.cpp:1818 msgid "Unknown user" msgstr "Utilisateur inconnu" #: src/libdar/tools.cpp:1798 src/libdar/tools.cpp:1820 #, c-format msgid "Error found while looking for UID of user %s: %S" msgstr "" "Erreur rencontrée lors de la recherche de l'UID pour l'utilisateur %s: %S" #: src/libdar/tools.cpp:1828 src/libdar/tools.cpp:1922 msgid "" "Cannot convert username to uid in statically linked binary, either directly " "provide the UID or run libdar from a dynamically linked executable" msgstr "" "Impossible de convertir un nom d'utilisateur en numéro (UID) dans un " "executable lié statiquement. Fournir soit directement l'UID soit utiliser un " "exécutable lié dynamiquement" #: src/libdar/tools.cpp:1853 msgid "An empty string is not a valid group name" msgstr "Une chaîne vide n'est pas un nom de groupe valide" #: src/libdar/tools.cpp:1889 src/libdar/tools.cpp:1912 msgid "Unknown group" msgstr "Groupe inconnu" #: src/libdar/tools.cpp:1891 #, c-format msgid "Error found while looking fo GID of group %s: %S" msgstr "Erreur recontrée lors de la recherche de GID pour le groupe %s : %S" #: src/libdar/tools.cpp:1914 #, c-format msgid "Error found while looking for GID of group %s: %S" msgstr "Erreur recontrée lors de la recherche de GID pour le groupe %s : %S" #: src/libdar/tools.cpp:1951 #, c-format msgid "Error while setting file user ownership: %s" msgstr "Erreur lors de la changement de propriétaire : %s" #: src/libdar/tools.cpp:2077 src/dar_suite/line_tools.cpp:1497 msgid "Cannot get full path of current working directory: " msgstr "Impossible d'obtenir le chemin complet du répertoire courant : " #: src/libdar/tools.cpp:2110 msgid "Worse" msgstr "Pire " #: src/libdar/tools.cpp:2130 #, c-format msgid "Error code %d to message conversion failed" msgstr "Erreur lors de la conversion du code %d en message" #: src/libdar/tools.cpp:2185 msgid "Invalid wide-char found in string: " msgstr "Caractère large invalide dans la chaîne : " #: src/libdar/tools.cpp:2298 #, c-format msgid "Error unlinking %S: %s" msgstr "Erreur lors de la suppression de %S : %s" #: src/libdar/tools.cpp:2345 msgid "" "Cannot convert negative floating point value to unsigned (positive) integer" msgstr "" "Impossible de convertir un entier flotant négatif en un entier non signé " "(positif)" #: src/libdar/archive_options.cpp:169 msgid "Cannot get catalogue of reference as it has not been provided" msgstr "" "Impossible d'obtenir le catalogue de référence car il n'a pas été fourni" #: src/libdar/archive_options.cpp:187 msgid "Error, catalogue of reference has not been provided" msgstr "Erreur, le catalogue de référence n'a pas été fourni" #: src/libdar/archive_options.cpp:1518 src/libdar/archive_options.cpp:1525 msgid "No mask available" msgstr "Aucun masque disponible" #: src/libdar/get_version.cpp:142 msgid "Initialization problem for liblzo2 library" msgstr "Problème d'initialisation pour la bibliothèque liblzo2" #: src/libdar/get_version.cpp:158 #, c-format msgid "Error while activating libgcrypt's memory guard: %s/%s" msgstr "Erreur lors de l'activation du garde de mémoire de libgcrypt : %s/%s" #: src/libdar/get_version.cpp:165 src/libdar/get_version.cpp:183 #, c-format msgid "Too old version for libgcrypt, minimum required version is %s" msgstr "" "Version trop ancienne pour libgcrypt, la version minimale requise est %s" #: src/libdar/get_version.cpp:174 #, c-format msgid "Error while telling libgcrypt that initialization is finished: %s/%s" msgstr "" "Erreur lors de la communication à libgcrypt que l'initialisation est " "terminée : %s/%s" #: src/libdar/get_version.cpp:179 msgid "libgcrypt not initialized and libdar not allowed to do so" msgstr "" "libgcrypt n'a pas été initialisée et libdar n'est pas autorisé à le faire" #: src/libdar/get_version.cpp:194 #, c-format msgid "GPGME version requirement is not satisfied, requires version > %s" msgstr "" "prérequis insatisfait pour GPGME, une version supérieure à %s est nécessaire" #: src/libdar/get_version.cpp:198 #, c-format msgid "GPGME engine not available: %s" msgstr "le moteur GPGME n'est pas disponible: %s" #: src/libdar/get_version.cpp:208 src/libdar/get_version.cpp:212 #: src/libdar/get_version.cpp:214 #, c-format msgid "libcurl initialization failed: %s" msgstr "Echec d'initialisation de libcurl : %s" #: src/libdar/i_archive.cpp:110 #, c-format msgid "Opening archive %s ..." msgstr "Ouverture de l'archive %s ..." #: src/libdar/i_archive.cpp:140 msgid "header only mode asked" msgstr "mode en-tête seul demandé" #: src/libdar/i_archive.cpp:153 #, c-format msgid "" "Opening the archive of reference %s to retreive the isolated catalog ... " msgstr "" "Ouverture de l'archive de référence %s pour récupérer le catalogue isolé ..." #: src/libdar/i_archive.cpp:164 msgid "" "Reading the archive of reference from pipe or standard input is not possible" msgstr "" "La lecture de l'archive de référence à partir d'une tube ou de l'entrée " "standard n'est pas possible" #: src/libdar/i_archive.cpp:166 msgid "" "The basename '+' is reserved for special a purpose that has no meaning in " "this context" msgstr "" "Le nom de base '+' est réservé pour un usage particulier qui n'a aucun sens " "dans ce contexte" #: src/libdar/i_archive.cpp:209 msgid "Error while opening the archive of reference: " msgstr "Erreur lors de l'ouverture de l'archive de référence : " #: src/libdar/i_archive.cpp:224 msgid "Loading isolated catalogue in memory..." msgstr "Chargement en mémoire du catalogue isolé..." #: src/libdar/i_archive.cpp:236 msgid "" "Archive of reference is not signed properly (no the same signatories for the " "archive and the internal catalogue), do we continue?" msgstr "" "L'archive de référence n'est pas signée correctement (difference de " "signataire entre le catalogue interne et l'archive elle-même), est-ce qu'on " "continue ?" #: src/libdar/i_archive.cpp:243 msgid "" "The archive and the isolated catalogue do not correspond to the same data, " "they are thus incompatible between them" msgstr "" "L'archive et le catalogue isolé ne correspondent pas aux mêmes données, ils " "sont donc incompatibles entre eux" #: src/libdar/i_archive.cpp:259 msgid "Loading catalogue into memory..." msgstr "Chargement en mémoire du catalogue..." #: src/libdar/i_archive.cpp:284 msgid "" "No data found in that archive, sequentially reading the catalogue found at " "the end of the archive..." msgstr "" "Aucune donnée trouvée dans cette archive, lecture séquentielle du catalogue " "situé à la fin de l 'archive..." #: src/libdar/i_archive.cpp:314 msgid "" "The catalogue will be filled while sequentially reading the archive, " "preparing the data structure..." msgstr "" "Le catalogue sera construit au fil de la lecture séquentielle de l'archive. " "Préparation de la structure de données ..." #: src/libdar/i_archive.cpp:354 #, c-format msgid "" "LAX MODE: The end of the archive is corrupted, cannot get the archive " "contents (the \"catalogue\")" msgstr "" "MODE RELAX : La fin de l'archive est corrompue, impossible d'obtenir le " "contenu de l'archive (le \"catalogue\")" #: src/libdar/i_archive.cpp:355 msgid "" "LAX MODE: Do you want to bypass some sanity checks and try again reading the " "archive contents (this may take some time, this may also fail)?" msgstr "" "MODE RELAX : Voulez-vous ignorer les tests de cohérence et tenter à nouveau " "la lecture du contenu de l'archive (ce qui peut prendre du temps et peut " "aussi échouer) ?" #: src/libdar/i_archive.cpp:370 #, c-format msgid "" "LAX MODE: Could not find a whole catalogue in the archive. If you have an " "isolated catalogue, stop here and use it as backup of the internal " "catalogue, else continue but be advised that all data will not be able to be " "retrieved..." msgstr "" "MODE RELAX : Impossible de trouver un catalogue entier dans l'archive. Si " "vous possédez un catalogue isolé, arrêter à ce stade et utilisez-le en " "secours du catalogue interne. Sinon continuez mais soyez prévenus que toutes " "les données ne pourront pas être récupérées" #: src/libdar/i_archive.cpp:371 msgid "" "LAX MODE: Do you want to try finding portions of the original catalogue if " "some remain (this may take even more time and in any case, it will only " "permit to recover some files, at most)?" msgstr "" "MODE RELAX : Voulez-vous rechercher des morceaux du catalogue d'origine s'il " "en reste (ce qui peut prendre encore plus de temps et en tout état de cause, " "ne permettra au plus de récupérer que quelques fichiers) ?" #: src/libdar/i_archive.cpp:549 src/libdar/i_archive.cpp:1947 msgid "Compression_level must be between 1 and 9 included" msgstr "\"Compression_level\" doit être compris entre 1 et 9 inclus" #: src/libdar/i_archive.cpp:551 src/libdar/i_archive.cpp:1949 msgid "" "\"first_file_size\" cannot be different from zero if \"file_size\" is equal " "to zero" msgstr "" "\"first_file_size\" ne peut être différent de zéro si \"file_size est égal à " "zéro" #: src/libdar/i_archive.cpp:553 src/libdar/i_archive.cpp:1951 msgid "Crypto block size must be greater than 10 bytes" msgstr "La taille d'un bloc de chiffrement doit être supérieure à 10 octets" #: src/libdar/i_archive.cpp:581 msgid "Both reference archive are nullptr, cannot merge archive from nothing" msgstr "" "Les deux archives de référence sont nullptr, impossible de fusionner à " "partir de rien" #: src/libdar/i_archive.cpp:613 msgid "" "the \"Keep file compressed\" feature is not possible when merging two " "archives using different compression algorithms (This is for a future " "version of dar). You can still merge these two archives but without keeping " "file compressed (thus you will probably like to use compression (-z or -y " "options) for the resulting archive" msgstr "" "La fonctionnalité de fusion sans décompression/recompression n'est pas " "disponible avec des archives utilisant différents algorithmes de compression " "(Ce sera pour une prochaine version de dar). Vous pouvez cependant fusionner " "ces deux archives avec décompression/recompression et aurez probablement " "intérêt à compresser (option -z ou -y) l'archive résultante" #: src/libdar/i_archive.cpp:637 msgid "" "Cannot calculate delta signature when merging if keep compressed is asked" msgstr "" "Impossible de calculer les signatures delta lors d'une fusion s'il a été " "demandé de garder les fichiers compressés" #: src/libdar/i_archive.cpp:639 msgid "" "To calculate delta signatures of files saved as sparse files, you need to " "activate sparse file detection mechanism with merging operation" msgstr "" "Afin de calculer les signature delta des fichier creux sauvés, vous devez " "activer la redétection des fichiers creux lors de cette opération de fusion" #: src/libdar/i_archive.cpp:874 #, c-format msgid "" "Archive repairing completed. WARNING! it is strongly advised to test the " "resulting archive before removing the damaged one" msgstr "" "Réparation d'archive terminée. ATTENTION ! Il est fortement recommandé de " "tester l'archive resultante avant de supprimer celle qui est abîmée" #: src/libdar/i_archive.cpp:901 src/libdar/i_archive.cpp:1309 msgid "This archive is not exploitable, check documentation for more" msgstr "" "Cette archive n'est pas exploitable, vérifier la documentation pour plus " "d'informations" #: src/libdar/i_archive.cpp:915 msgid "" "File ownership will not be restored du to the lack of privilege, you can " "disable this message by asking not to restore file ownership" msgstr "" "Le propriétaire des fichiers ne sera pas restauré faute de privilège pour ce " "faire. Vous pouvez supprimer ce message en demandant à ne pas restaurer ces " "propriétés" #: src/libdar/i_archive.cpp:964 msgid "Error while restoring data: " msgstr "Erreur lors de la restauration de : \"" #: src/libdar/i_archive.cpp:992 #, c-format msgid "Catalogue size in archive : %i bytes" msgstr "Taille du catalogue dans l'archive : %i octets" #: src/libdar/i_archive.cpp:994 #, c-format msgid "Catalogue size in archive : N/A" msgstr "Taille du catalogue dans l'archive : NC" #: src/libdar/i_archive.cpp:1000 #, c-format msgid "Archive is composed of %i file(s)" msgstr "Archive composée de %i fichier(s)" #: src/libdar/i_archive.cpp:1004 #, c-format msgid "File size: %i bytes" msgstr "Taille du fichier : %i octets" #: src/libdar/i_archive.cpp:1013 #, c-format msgid "First file size : %i bytes" msgstr "Fichier initial : %i octets" #: src/libdar/i_archive.cpp:1014 #, c-format msgid "File size : %i bytes" msgstr "Taille des fichiers : %i octets" #: src/libdar/i_archive.cpp:1015 #, c-format msgid "Last file size : %i bytes" msgstr "Fichier final : %i octets" #: src/libdar/i_archive.cpp:1016 #, c-format msgid "Archive total size is : %i bytes" msgstr "Taille totale : %i octets" #: src/libdar/i_archive.cpp:1024 #, c-format msgid "Archive size is: %i bytes" msgstr "Taille de l'archive : %i octets" #: src/libdar/i_archive.cpp:1025 #, c-format msgid "Previous archive size does not include headers present in each slice" msgstr "" "La taille de l'archive ne tient pas compte des en-têtes de chaque tranche" #: src/libdar/i_archive.cpp:1028 #, c-format msgid "Archive size is unknown (reading from a pipe)" msgstr "Taille d'archive inconnue (lecture depuis un tube)" #: src/libdar/i_archive.cpp:1034 #, c-format msgid "" "The overall archive size includes %i byte(s) wasted due to bad compression " "ratio" msgstr "" "La taille totale de l'archive inclut %i octet(s) gaspillé(s) à cause d'un " "mauvais taux de compression" #: src/libdar/i_archive.cpp:1039 msgid "The global data compression ratio is: " msgstr "La taux de compression global est : " #: src/libdar/i_archive.cpp:1046 #, c-format msgid "" "\n" "WARNING! This archive only contains the catalogue of another archive, it can " "only be used as reference for differential backup or as rescue in case of " "corruption of the original archive's content. You cannot restore any data " "from this archive alone\n" msgstr "" "\n" "ATTENTION ! Cette archive ne contient que la liste du contenu d'une autre " "archive et ne peut être utilisé qu'en tant que référence pour une sauvegarde " "différentielle ou comme secours en cas de corruption de l'archive d'origine. " "Il est impossible de restaurer des données à partir de cette archive seule\n" #: src/libdar/i_archive.cpp:1072 src/libdar/i_archive.cpp:1387 msgid "" "This archive is not exploitable, check the archive class usage in the API " "documentation" msgstr "" "Cette archive n'est pas exploitable, vérifier la documentation de l'API pour " "plus d'informations" #: src/libdar/i_archive.cpp:1157 src/libdar/i_archive.cpp:1637 #: src/libdar/i_archive.cpp:1670 msgid "" "Fetching EA value while listing an archive is not possible in sequential " "read mode" msgstr "" "La récupération d'EA lors de la lecture d'une archive n'est pas possible en " "mode séquentiel" #: src/libdar/i_archive.cpp:1163 msgid "slicing focused output is not available in sequential-read mode" msgstr "" "L'affichage focalisé sur les tranches est impossible en lecture séquentielle" #: src/libdar/i_archive.cpp:1171 #, c-format msgid "" "Using user provided modified slicing (first slice = %i bytes, other slices = " "%i bytes)" msgstr "" "Utilisation du découpage fourni par l'utilisateur (première tranche = %i " "octets, autres tranches = %i octets)" #: src/libdar/i_archive.cpp:1174 msgid "" "No slice layout of the archive of reference for the current isolated " "catalogue is available, cannot provide slicing information, aborting" msgstr "" "Aucune information de découpage en tranche n'est disponible à propos de " "l'archive dont a été isolé le présent catalogue. Impossible de localiser les " "fichiers au sein de l'archive, arrêt des opérations." #: src/libdar/i_archive.cpp:1247 #, c-format msgid "Exception caught from archive_listing_callback execution: %s" msgstr "Exception attrapée depuis l'execution de archive_listing_callback : %s" #: src/libdar/i_archive.cpp:1252 msgid "Exception caught from archive_listing_callback execution" msgstr "Exception attrapée depuis l'execution de archive_listing_callback" #: src/libdar/i_archive.cpp:1281 msgid "Error while listing archive contents: " msgstr "Erreur lors de l'affichage du contenu de l'archive : " #: src/libdar/i_archive.cpp:1357 msgid "Error while comparing archive with filesystem: " msgstr "" "Erreur pendant la comparaison de l'archive avec le système de fichiers : " #: src/libdar/i_archive.cpp:1399 msgid "" "WARNING! This is an isolated catalogue, no data or EA is present in this " "archive, only the catalogue structure can be checked" msgstr "" "ATTENTION ! Ceci est un catalogue isolé, aucune donnée ou AE n'est présent " "dans cette archive, seule la structure du catalogue peut être validée" #: src/libdar/i_archive.cpp:1433 msgid "A problem occurred while reading this archive contents: " msgstr "Une erreur est survenue lors de la lecture du contenu de l'archive : " #: src/libdar/i_archive.cpp:1448 msgid "Error while testing archive: " msgstr "Erreur lors du test de l'archive : " #: src/libdar/i_archive.cpp:1497 msgid "" "Isolation with delta signature is not possible on a just created archive (on-" "fly isolation)" msgstr "" "L'isolation avec signature delta est impossible en une seule opération " "(isolation à la volée)" #: src/libdar/i_archive.cpp:1954 msgid "" "nodump flag feature has not been activated at compilation time, it is thus " "not available" msgstr "" "La fonctionnalité --nodump n'a pas été activée lors de la compilation, cette " "option n'est donc pas disponible" #: src/libdar/i_archive.cpp:1981 msgid "" "Cannot sequentially read an archive of reference when delta signature or " "delta patch is requested" msgstr "" "Impossible de lire séquentiellement une archive de référence quand des " "signature delta ou des patch sont demandés" #: src/libdar/i_archive.cpp:2017 #, c-format msgid "" "WARNING! The archive is located in the directory to backup, this may create " "an endless loop when the archive will try to save itself. You can either add " "-X \"%S.*.%S\" on the command line, or change the location of the archive " "(see -h for help). Do you really want to continue?" msgstr "" "ATTENTION ! L'archive se trouve dans un des répertoires à sauvegarder, cela " "peut créer une boucle sans fin lorsque l'archive essaiera de se sauvegarder " "elle-même. Il est soit possible d'ajouter -X \"%S.*.%S\" sur la ligne de " "commande ou bien il faut changer l'archive de place (voir l'option -h pour " "l'aide). Voulez-vous vraiment continuer ?" #: src/libdar/i_archive.cpp:2203 msgid "Ready to start writing down the archive?" msgstr "Prêt à commencer l'écriture de l'archive ?" #: src/libdar/i_archive.cpp:2249 msgid "Building the catalog object..." msgstr "Construction de l'objet catalogue ..." #: src/libdar/i_archive.cpp:2267 #, c-format msgid "Error while fetching information for %S: " msgstr "Erreur lors de la recherche d'informations pour %S : " #: src/libdar/i_archive.cpp:2320 msgid "Processing files for backup..." msgstr "Traitement des fichiers pour la sauvegarde ..." #: src/libdar/i_archive.cpp:2383 msgid "Processing files for merging..." msgstr "Traitement des fichier pour la fusion ..." #: src/libdar/i_archive.cpp:2415 msgid "Processing files for fixing..." msgstr "Traitement des fichier pour la réparation ..." #: src/libdar/i_archive.cpp:2497 msgid "" "Adding reference to files that have been destroyed since reference backup..." msgstr "" "Ajout des fichiers qui ont été détruits depuis la sauvegarde de référence ..." #: src/libdar/i_archive.cpp:2552 msgid "Error while saving data: " msgstr "Erreur lors de la sauvegarde des données : " #: src/libdar/i_archive.cpp:2578 msgid "WARNING! Incorrect signature found for archive, continue anyway?" msgstr "" "ATTENTION ! Signature incorrecte trouvée pour l'archive, on continue malgré " "tout ?" #: src/libdar/i_archive.cpp:2621 msgid "" "Cannot get data name of the archive, this archive is not completely " "initialized" msgstr "" "Impossible d'obtenir le nom de données de cette archive ; cette archive " "n'est pas complètement initialisée" #: src/libdar/i_archive.cpp:2646 msgid "" "This archive contains an isolated catalogue, it cannot be used for this " "operation. It can only be used as reference for a incremental/differential " "backup or as backup of the original archive's catalogue" msgstr "" "Cette archive ne contient qu'un catalogue isolé et ne peut pas être utilisée " "pour cette opération. Elle ne peut servir que de référence pour une " "sauvegarde incrémentale/décrémentale ou comme secours du catalogue de " "l'archive d'origine" #: src/libdar/i_archive.cpp:2653 msgid "" "LAX MODE: Archive seems to be only an isolated catalogue (no data in it), " "Can I assume data corruption occurred and consider the archive as being a " "real archive?" msgstr "" "MODE RELAX : L'archive semble n'être qu'un catalogue isolé (aucune donnée " "présente), Doit-on considérer qu'il s'agit plutôt d'une corruption et peut-" "on considérer que l'archive est une vraie archive ?" #: src/libdar/i_archive.cpp:2658 msgid "Error while fetching archive properties: " msgstr "Erreur lors de la recherche des propriétés de l'archive : " #: src/libdar/i_archive.cpp:2683 msgid "Sorry, file size is unknown at this step of the program." msgstr "Désolé, la taille du fichier est inconnue à cette étape du programme" #: src/libdar/i_archive.cpp:2782 #, c-format msgid "" "libgcrypt version < %s. Ligcrypt used has a bug that leads md5 and sha1 hash " "results to be erroneous for files larger than 256 Gio (gibioctet), do you " "really want to spend CPU cycles calculating a useless hash?" msgstr "" "version de libgcrypt < %s. Cette version de ligcrypt a un bug qui conduit " "les calculs de hash md5 et sha1 à être faux pour des fichiers plus grands " "que 256 Gio (gibioctet), voulez-vous vraiment perdre du temps à calculer ces " "hash inutiles ?" #: src/libdar/data_tree.cpp:94 msgid "reached End of File before all expected data could be read" msgstr "" "Fin de fichier atteinte alors que toutes les données n'ont pu être lues" #: src/libdar/data_tree.cpp:119 msgid "Unexpected value found in database" msgstr "Valeur inattendue trouvée dans la base" #: src/libdar/data_tree.cpp:928 #, c-format msgid "" "Dates of file's %S are not increasing when database's archive number grows. " "Concerned file is: %S" msgstr "" "Les dates de %S ne sont pas croissante avec le numéro des archives. Fichier " "concerné : %S" #: src/libdar/data_tree.cpp:931 msgid "" "Dates are not increasing for all files when database's archive number grows, " "working with this database may lead to improper file's restored version. " "Please reorder the archive within the database in the way that the older is " "the first archive and so on up to the most recent archive being the last of " "the database" msgstr "" "Les dates ne croissent pas avec le numéro des archives pour tous les " "fichiers, l'utilisation de cette base de donnée peut conduire à la " "restauration d'une mauvaise version d'un tel fichier. S'il s'agit d'une " "erreur d'ordonnancement des archives dans la base, il est recommandé de le " "corriger afin d'avoir les archives les plus anciennes aux numéros les plus " "petits et les plus récentes aux numéros les plus grands " #: src/libdar/data_tree.cpp:934 msgid "Do you want to ignore the same type of error for other files?" msgstr "" "Voulez-vous ignorer le même type d'erreur pour les prochains fichiers ?" #: src/libdar/slave_zapette.cpp:66 msgid "Input cannot be read" msgstr "l'entrée ne peut être lue" #: src/libdar/slave_zapette.cpp:68 msgid "Cannot write to output" msgstr "Impossible d'écrire sur la sortie" #: src/libdar/slave_zapette.cpp:70 msgid "Data should be read-only" msgstr "Les données devraient être en lecture seule" #: src/libdar/slave_zapette.cpp:146 msgid "Cannot skip at end of file" msgstr "Ne peut pas atteindre la fin du fichier" #: src/libdar/slave_zapette.cpp:199 msgid "Received unknown special order" msgstr "l'ordre ordre spécial reçu est inconnu" #: src/libdar/delta_sig_block_size.cpp:66 msgid "Invalid divisor used for delta signature block len calculation" msgstr "" "Valeur incorrecte du diviseur pour la détermination de la taille des block " "de signature delta" #: src/libdar/delta_sig_block_size.cpp:68 msgid "" "minimum size should be lesser or equal than maximum size when specifying " "delta signature block size formula" msgstr "" "la taille minimale devrait être supérieure ou égale à la taille maximale des " "block de signature delta" #: src/libdar/filesystem_specific_attribute.hpp:273 msgid "true" msgstr "vrai" #: src/libdar/filesystem_specific_attribute.hpp:273 msgid "false" msgstr "faux" #: src/libdar/filesystem_specific_attribute.cpp:233 #: src/libdar/filesystem_specific_attribute.cpp:1166 msgid "invalid length for FSA family flag" msgstr "Le champ de famille de FSA a une longueur invalide" #: src/libdar/filesystem_specific_attribute.cpp:239 msgid "invalid length for FSA nature flag" msgstr "le champ de nature de FSA a une longueur invalide" #: src/libdar/filesystem_specific_attribute.cpp:716 msgid "Failed setting (opening) extX family FSA: " msgstr "Echec d'affectation (ouverture) des FSA extX: " #: src/libdar/filesystem_specific_attribute.cpp:729 msgid "Failed reading existing extX family FSA: " msgstr "Echec de lecture des FSA existant de la famille extX : " #: src/libdar/filesystem_specific_attribute.cpp:758 #: src/libdar/filesystem_specific_attribute.cpp:773 #: src/libdar/filesystem_specific_attribute.cpp:788 #: src/libdar/filesystem_specific_attribute.cpp:803 #: src/libdar/filesystem_specific_attribute.cpp:824 #: src/libdar/filesystem_specific_attribute.cpp:840 #: src/libdar/filesystem_specific_attribute.cpp:855 #: src/libdar/filesystem_specific_attribute.cpp:870 #: src/libdar/filesystem_specific_attribute.cpp:885 #: src/libdar/filesystem_specific_attribute.cpp:900 #: src/libdar/filesystem_specific_attribute.cpp:915 #: src/libdar/filesystem_specific_attribute.cpp:930 #, c-format msgid "" "Warning: FSA %s/%s support has not been found at compilation time, cannot " "restore it for inode %s" msgstr "" "Attention: le support des FSA %s/%s n'a pas été activé lors de la " "compilation. Impossible de le restaurer pour l'inode %s" #: src/libdar/filesystem_specific_attribute.cpp:975 msgid "Failed set extX family FSA: " msgstr "Echec d'affectation des FSA extX : " #: src/libdar/filesystem_specific_attribute.cpp:1002 #, c-format msgid "" "Not setting FSA extX IMMUTABLE flags for %s due to of lack of capability" msgstr "" "Pas d'affectation du FSA extX IMMUTABLE pour %s à cause du défaut de " "privilège pour le faire" #: src/libdar/filesystem_specific_attribute.cpp:1030 #, c-format msgid "" "Not setting FSA extX SYSTEM RESOURCE flags for %s due to of lack of " "capability" msgstr "" "Pas d'affectation du FSA extX SYSTEM RESOURCE pour %s à cause du défaut de " "privilège pour le faire" #: src/libdar/filesystem_specific_attribute.cpp:1048 #, c-format msgid "" "Warning! %s Filesystem Specific Attribute support have not been activated at " "compilation time and could not be restored for %s" msgstr "" "Attention ! Le support des FSA %s n'a pas été activé lors de la compilation. " "Ces attributs ne peuvent être restaurés pour %s" #: src/libdar/filesystem_specific_attribute.cpp:1068 #, c-format msgid "" "Birth Time attribute cannot be restored for %s because no FSA familly able " "to carry that attribute could be activated at compilation time." msgstr "" "L'attribut date de création ne peut pas être restauré pour %s car aucune " "famille de FSA capable de le supporté n'a été activée lors de la compilation." #: src/libdar/filesystem_specific_attribute.cpp:1172 #: src/libdar/filesystem_specific_attribute.cpp:1173 msgid "invalid FSA family flag" msgstr "Drapeau de famille FSA invalide" #: src/libdar/filesystem_specific_attribute.cpp:1207 #: src/libdar/filesystem_specific_attribute.cpp:1209 msgid "invalid FSA nature flag" msgstr "nature de FSA invalide" #: src/libdar/filesystem_specific_attribute.cpp:1233 msgid "Unexepected value for boolean FSA, data corruption may have occurred" msgstr "" "Valeur inattendue pour un FSA boolean, une corruption de données est peut-" "être survenue" #: src/libdar/filesystem_specific_attribute.cpp:1237 msgid "Error while reading FSA: " msgstr "Erreur lors de la lecture des FSA : " #: src/libdar/op_tools.cpp:48 src/libdar/op_tools.cpp:119 #: src/libdar/op_tools.cpp:199 #, c-format msgid "" "Conflict found while selecting the file to retain in the resulting archive:" msgstr "" "Conflit lors de la sélection du fichier à conserver dans l'archive " "résultante : " #: src/libdar/op_tools.cpp:49 #, c-format msgid "User Decision requested for data of file %S" msgstr "Décision de l'utilisateur requise pour les données de %S" #: src/libdar/op_tools.cpp:52 msgid "" "\n" "Your decision about file's data:\n" "[P]reserve\n" "[O]verwrite\n" "mark [S]aved and preserve\n" "mark saved and overwri[T]e\n" "[R]emove\n" "[*] keep undefined\n" "[A]bort\n" " Your Choice? " msgstr "" "\n" "Votre décision à propos des données du fichier :\n" "[P] préserver\n" "[O] écraser\n" "[S] marquer comme déjà sauvé et préserver\n" "[T] marquer comme déjà sauvé et écraser\n" "[R] supprimer l'entrée\n" "[*] laisser non défini\n" "[A] arrêter l'opération\n" " Votre choix ?" #: src/libdar/op_tools.cpp:54 src/libdar/op_tools.cpp:125 #: src/libdar/op_tools.cpp:205 msgid "" "Please answer by the character between brackets ('[' and ']') and press " "return" msgstr "" "Merci de répondre avec un des caractères entre crochets et appuyer sur " "\"entrée\"" #: src/libdar/op_tools.cpp:84 src/libdar/op_tools.cpp:163 #: src/libdar/op_tools.cpp:231 #, c-format msgid "" "Warning, are you sure you want to abort (please answer \"%S\" to confirm)? " msgstr "" "Attention, êtes-vous sûr de vouloir arrêter (répondre \"%S\" pour confirmer)?" #: src/libdar/op_tools.cpp:88 src/libdar/op_tools.cpp:167 #: src/libdar/op_tools.cpp:235 msgid "Cancellation no confirmed" msgstr "Arrêt non confirmé" #: src/libdar/op_tools.cpp:91 src/libdar/op_tools.cpp:170 #: src/libdar/op_tools.cpp:238 msgid "Unknown choice: " msgstr "Choix inconnu : " #: src/libdar/op_tools.cpp:120 #, c-format msgid "User Decision requested for EA of file %S" msgstr "Décision de l'utilisateur requise pour les AE du fichier %S" #: src/libdar/op_tools.cpp:123 msgid "" "\n" "Your decision about file's EA:\n" "[p]reserve\n" "[o]verwrite\n" "mark [s]aved and preserve\n" "mark saved and overwri[t]e\n" "[m]erge EA and preserve\n" "merge EA a[n]d overwrite\n" "[r]emove\n" "[*] keep undefined\n" "[a]bort\n" " Your choice? " msgstr "" "\n" "Votre décision pour les AE du fichier :\n" "[p] préserver\n" "[o] écraser\n" "[s] marquer comme déjà sauvé et préserver\n" "[t] marquer comme déjà sauvé et écraser\n" "[m] fusionner les EA et préserver\n" "[n] fusionner les EA et écraser\n" "[r] supprimer tous les EA\n" "[*] laisser l'action non définie\n" "[a] arrêter l'opération\n" " Votre choix ? " #: src/libdar/op_tools.cpp:200 #, c-format msgid "User Decision requested for FSA of file %S" msgstr "Décision de l'utilisateur requise pour les FSA du fichier %S" #: src/libdar/op_tools.cpp:203 msgid "" "\n" "Your decision about file's FSA:\n" "[p]reserve\n" "[o]verwrite\n" "mark [s]aved and preserve\n" "mark saved and overwri[t]e\n" "[*] keep undefined\n" "[a]bort\n" " Your choice? " msgstr "" "\n" "Votre décision à propos des FSA du fichier :\n" "[p] préserver\n" "[o] écraser\n" "marquer comme déjà [s]auvé et préserver\n" "[t] marquer comme déjà sauvé et écraser\n" "[*] laisser non défini\n" "[a]rrêter l'opération\n" " Votre choix ?" #: src/libdar/op_tools.cpp:259 src/dar_suite/line_tools.cpp:58 #: src/dar_suite/dar_suite.cpp:279 msgid "YES" msgstr "OUI" #: src/libdar/op_tools.cpp:260 src/dar_suite/line_tools.cpp:58 #: src/dar_suite/dar_suite.cpp:281 msgid "NO" msgstr "NON" #: src/libdar/op_tools.cpp:272 #, c-format msgid "Entry information:\t\"in place\"\t\"to be added\"" msgstr "Informations sur les entrées :\t\"en place\"\t\"à ajouter\"" #: src/libdar/op_tools.cpp:273 #, c-format msgid "Is inode :\t %S \t\t %S" msgstr "Est une inode :\t %S \t\t %S" #: src/libdar/op_tools.cpp:274 #, c-format msgid "Is directory :\t %S \t\t %S" msgstr "Est un répertoire :\t %S \t\t %S" #: src/libdar/op_tools.cpp:275 #, c-format msgid "Is plain file :\t %S \t\t %S" msgstr "Est un fichier :\t %S \t\t %S" #: src/libdar/op_tools.cpp:276 #, c-format msgid "Is hard linked :\t %S \t\t %S" msgstr "Est un lien dur :\t %S \t\t %S" #: src/libdar/op_tools.cpp:277 #, c-format msgid "Entry type :\t %s \t %s" msgstr "Type d'entrée :\t %s \t %s" #: src/libdar/op_tools.cpp:283 msgid "me" msgstr "moi" #: src/libdar/op_tools.cpp:292 #, c-format msgid "Data more recent :\t %S \t\t %S" msgstr "Données plus récentes :\t %S \t\t %S" #: src/libdar/op_tools.cpp:302 #, c-format msgid "Data size :\t %i \t\t %i" msgstr "Taille des données :\t %i \t\t %i" #: src/libdar/op_tools.cpp:303 #, c-format msgid "Sparse file :\t %S \t\t %S" msgstr "Fichier à trou :\t %S \t\t %S" #: src/libdar/op_tools.cpp:304 #, c-format msgid "Dirty file :\t %S \t\t %S" msgstr "Inode sale :\t %S \t\t %S" #: src/libdar/op_tools.cpp:306 #, c-format msgid "Data full saved :\t %S \t\t %S" msgstr "Données sauvées :\t %S \t\t %S" #: src/libdar/op_tools.cpp:307 #, c-format msgid "EA full saved :\t %S \t\t %S" msgstr "Attributs Étendus sauvés :\t %S \t\t %S" #: src/libdar/op_tools.cpp:309 #, c-format msgid "EA more recent :\t %S \t\t %S" msgstr "Attr. Etend. plus récents :\t %S \t\t %S" #: src/libdar/op_tools.cpp:310 #, c-format msgid "FSA full saved :\t %S \t\t %S" msgstr "FSA sauvés complètement :\t %S \t\t %S" #: src/libdar/op_tools.cpp:315 #, c-format msgid "FSA familly :\t %S \t\t %S" msgstr "Famille de FSA :\t %S \t\t %S" #: src/libdar/op_tools.cpp:324 #, c-format msgid "EA number :\t %i \t\t %i" msgstr "Nombre d'Attributs Étendus :\t %i \t\t %i" #: src/libdar/op_tools.cpp:327 #, c-format msgid "EA size :\t %i \t\t %i" msgstr "Taille totale des Attr. Eten.:\t %i \t\t %i" #: src/libdar/mask.cpp:69 #, c-format msgid "%Sglob expression: %S [%S]" msgstr "%Sexpression glob: %S [%S]" #: src/libdar/mask.cpp:110 #, c-format msgid "%Sregular expression: %S [%S]" msgstr "%Sexpression régulière: %S [%S]" #: src/libdar/mask.cpp:160 #, c-format msgid "" "%Snot(\n" "%S\n" "%S)" msgstr "" "%Snon(\n" "%S\n" "%S)" #: src/libdar/mask.cpp:279 #, c-format msgid "%SIs subdir of: %S [%S]" msgstr "%SSous-répertoire de : %S [%S]" #: src/libdar/mask.cpp:297 #, c-format msgid "%SPath is: %S [%S]" msgstr "%SLe chemin est : %S [%S]" #: src/libdar/mask.cpp:307 #, c-format msgid "%SPath leads to: %S [%S]" msgstr "%SLe chemin mène à : %S [%S]" #: src/libdar/mask.cpp:315 msgid "case sensitive" msgstr "sensible à la casse" #: src/libdar/mask.cpp:315 msgid "case in-sensitive" msgstr "insensible à la casse" #: src/libdar/cat_directory.cpp:142 msgid "" "LAX MODE: Error met building a catalogue entry, skipping this entry and " "continuing. Skipped error is: " msgstr "" "MODE RELAX : Erreur lors de la construction d'une entrée du catalogue, on " "ignore et on passe à la suivante. L'erreur rencontrée était : " #: src/libdar/cat_directory.cpp:185 msgid "missing data to build a cat_directory" msgstr "il manque des données pour construire un répertoire" #: src/libdar/cat_directory.cpp:443 #, c-format msgid "Cannot remove nonexistent entry %S from catalogue" msgstr "Ne peut pas supprimer l'entrée %S du catalogue car elle n'y existe pas" #: src/libdar/scrambler.cpp:34 msgid "Key cannot be an empty string" msgstr "Une clef ne peut être une chaîne vide" #: src/libdar/path.cpp:65 src/libdar/path.cpp:87 msgid "Empty string is not a valid path" msgstr "Une chaîne vide n'est pas un chemin valide" #: src/libdar/path.cpp:95 #, c-format msgid "%S is an not a valid path: %S" msgstr "%S n'est pas un chemin valide : %S" #: src/libdar/path.cpp:199 msgid "Cannot add an absolute path" msgstr "Impossible d'ajouter un chemin absolu" #: src/libdar/path.cpp:357 msgid "Empty string as subdirectory does not make a valid path" msgstr "" "Une chaîne vide en tant que sous-répertoire ne donne pas un chemin valide" #: src/libdar/crc.cpp:210 src/libdar/crc.cpp:311 msgid "Invalid size for CRC width" msgstr "Taille invalide pour un CRC" #: src/libdar/i_database.cpp:122 msgid "Badly formatted database" msgstr "Base de donnée malformée" #: src/libdar/i_database.cpp:171 msgid "Cannot write down a read-only database" msgstr "Impossible d'écrire une base en lecture seule" #: src/libdar/i_database.cpp:231 msgid "Empty string is an invalid archive basename" msgstr "Une chaîne vide est un nom d'archive invalide" #: src/libdar/i_database.cpp:257 src/libdar/i_database.cpp:259 msgid "Incorrect archive range in database" msgstr "Intervalle d'archive incorrecte pour la base" #: src/libdar/i_database.cpp:286 src/libdar/i_database.cpp:305 #: src/libdar/i_database.cpp:408 msgid "Non existent archive in database" msgstr "Archive inexistante dans la base" #: src/libdar/i_database.cpp:325 src/libdar/i_database.cpp:327 msgid "Invalid archive number: " msgstr "Numéro d'archive invalide : " #: src/libdar/i_database.cpp:434 msgid "Invalid path, path must be relative" msgstr "Chemin invalide, le chemin doit être relatif" #: src/libdar/i_database.cpp:440 src/libdar/i_database.cpp:445 #: src/libdar/i_database.cpp:449 msgid "Non existent file in database" msgstr "Fichier inexistant dans la base" #: src/libdar/i_database.cpp:513 msgid "Checking chronological ordering of files between the archives..." msgstr "" "Vérification de l'ordonnancement chronologique des fichiers entre les " "archives ..." #: src/libdar/i_database.cpp:539 msgid "File recorded as removed at this date in database: " msgstr "" "Fichier enregistré comme supprimé à cette date dans la base de données : " #: src/libdar/i_database.cpp:543 msgid "File not found in database: " msgstr "Fichier non trouvé dans la base" #: src/libdar/i_database.cpp:547 msgid "" "File found in database but impossible to restore (only found \"unchanged\" " "in differential backups, or delta patch without reference to base it on in " "any previous archive of the base): " msgstr "" "Fichier trouvé dans la base mais impossible à restaurer (n'est présent que " "comme \"inchangé\" dans des archives différentielles ou bien comme \"delta " "patch\" mais sans la référence sur laquelle le patch s'applique présente " "dans une archive de la base):" #: src/libdar/i_database.cpp:566 msgid "" "Extended Attribute of file found in database but impossible to restore (only " "found \"unchanged\" in differential backups): " msgstr "" "Attributs Étendus du fichier trouvés dans la base mais impossibles à " "restaurer (ne sont présents que comme \"inchangés\" dans des archives " "différentielles)" #: src/libdar/i_database.cpp:579 #, c-format msgid "%S did not exist before specified date and cannot be restored" msgstr "%S n'existait pas avant la date spécifiée et ne peut être restauré" #: src/libdar/i_database.cpp:608 #, c-format msgid "" "Either archives in database are not properly tidied, or file last " "modification date has been artificially set to an more ancient date. This " "may lead improper Extended Attribute restoration for inode %S" msgstr "" "Soit les archives dans la base ne sont pas correctement rangées, soit la " "date de dernière modification du fichier a été artificiellement modifiée à " "une valeur plus ancienne. Ceci peut mener à une restauration incorrecte des " "Attributs Étendus de l'inode %S" #: src/libdar/i_database.cpp:627 #, c-format msgid "Cannot restore file %S : non existent file in database" msgstr "" "Impossible de restaurer le fichier %S : fichier inexistant dans le base" #: src/libdar/i_database.cpp:683 msgid "Error while restoring the following files: " msgstr "Erreur pendant la restauration des fichiers suivants : " #: src/libdar/i_database.cpp:692 msgid "Cannot restore any file, nothing done" msgstr "Aucun fichier ne peut être restauré, aucune action n'a été réalisée" #: src/libdar/i_database.cpp:705 src/libdar/i_database.cpp:713 #, c-format msgid "Invalid archive number: %d" msgstr "Numéro d'archive invalide : %d" #: src/libdar/catalogue.cpp:154 msgid "" "LAX MODE: catalogue label does not match archive label, as if it was an " "extracted catalogue, assuming data corruption occurred and fixing the " "catalogue to be considered an a plain internal catalogue" msgstr "" "MODE RELAX : l'étiquette du catalogue ne correspond pas à celle de " "l'archive, comme s'il s'agissait d'un catalogue isolé, on suppose qu'une " "corruption a eu lieu et que le catalogue est une catalogue normal interne à " "l'archive" #: src/libdar/catalogue.cpp:210 msgid "CRC failed for the catalogue" msgstr "Erreur de CRC lors de la lecture du catalogue" #: src/libdar/catalogue.cpp:212 msgid "" "LAX MODE: CRC failed for catalogue, the archive contents is corrupted. This " "may even lead dar to see files in the archive that never existed, but this " "will most probably lead to other failures in restoring files. Shall we " "proceed anyway?" msgstr "" "MODE RELAX : erreur de CRC pour le catalogue, le contenu de l'archive est " "corrompu. Ceci peut même amener dar à voir des fichiers dans l'archive qui " "n'ont jamais existé, mais conduira plus probablement à d'autres échecs lors " "de la restauration des fichiers. Doit-on poursuivre malgré tout ?" #: src/libdar/catalogue.cpp:262 msgid "root does not have a parent directory" msgstr "la racine n'a pas de répertoire père" #: src/libdar/catalogue.cpp:300 msgid "no current directory defined" msgstr "aucun répertoire courant défini" #: src/libdar/catalogue.cpp:304 msgid "root directory has no parent directory" msgstr "la racine n'a pas de répertoire père" #: src/libdar/catalogue.cpp:326 msgid "no current reading directory defined" msgstr "aucun répertoire courant défini pour la lecture" #: src/libdar/catalogue.cpp:399 src/libdar/catalogue.cpp:413 msgid " is not present in the archive" msgstr " n'est pas présent dans l'archive" #: src/libdar/catalogue.cpp:491 msgid "Cannot recurs in a non directory entry" msgstr "" "Impossible de poursuivre la récursion dans une entrée qui n'est pas un " "répertoire" #: src/libdar/catalogue.cpp:494 msgid "" "The entry to recurs in does not exist, cannot add further entry to that " "absent subdirectory" msgstr "" "L'entrée dans laquelle poursuivre la récursion n'existe pas. Impossible " "d'ajouter celle-ci à un répertoire absent" #: src/libdar/catalogue.cpp:559 msgid "root has no parent directory" msgstr "la racine n'a pas de répertoire père" #: src/libdar/archive_aux.cpp:134 #, c-format msgid "unknown hash algorithm corresponding to char `%c'" msgstr "le caractère '%c' ne correspond à aucun algorithme de hashage connu" #: src/libdar/integers.cpp:36 #, c-format msgid "%s type length is not %d byte(s) but %d" msgstr "la longueur du type %s n'est pas %d octet(s) mais %d" #: src/libdar/integers.cpp:44 #, c-format msgid "%s type is not a signed type as expected" msgstr "%s n'est pas un type signé tel attendu" #: src/libdar/integers.cpp:46 #, c-format msgid "%s type is not an unsigned type as expected" msgstr "%s n'est pas un type non signé tel attendu" #: src/libdar/integers.cpp:101 #, c-format msgid "" "type %s is neither big nor little endian! Do not know how to handle integer " "in a portable manner on this host, aborting" msgstr "" "le type %s n'est ni grand-boutiste ni petit-boutiste ! Impossible de savoir " "comment gérer ce type d'entier d'une manière portable sur ce système." #: src/libdar/integers.cpp:154 msgid "incoherent endian between U_16 and U_32" msgstr "boutisme incohérent entre U_16 et U_32" #: src/libdar/integers.cpp:156 msgid "incoherent endian between U_16 and U_64" msgstr "boutisme incohérent entre U_16 et U_64" #: src/libdar/integers.cpp:158 msgid "incoherent endian between U_16 and U_I" msgstr "boutisme incohérent entre U_16 et U_U" #: src/libdar/storage.cpp:85 msgid "Not enough data to initialize storage field" msgstr "Pas assez de données pour initialiser le champ \"storage\"" #: src/libdar/storage.cpp:110 msgid "Asking for an element out of array" msgstr "Un élément hors des limites est demandé" #: src/libdar/storage.cpp:162 msgid "The iterator is not indexing the object it has been asked to write to" msgstr "" "L'itérateur n'indexe pas l'objet sur lequel il lui a été demandé d'écrire" #: src/libdar/storage.cpp:196 msgid "The iterator is not indexing the object it has been asked to read from" msgstr "" "L'itérateur n'indexe pas l'objet sur lequel il lui a été demandé de lire" #: src/libdar/storage.cpp:515 msgid "The iterator is not indexing the object it has been defined for" msgstr "L'itérateur n'indexe pas l'objet pour lequel il a été défini" #: src/libdar/storage.cpp:767 src/libdar/storage.cpp:832 msgid "Iterator does not point to data" msgstr "L'itérateur ne pointe pas sur des données" #: src/libdar/storage.cpp:826 msgid "Reference storage of the iterator is empty or non existent" msgstr "Le \"storage\" de référence de l'itérateur est vide ou non existant" #: src/libdar/storage.cpp:843 msgid "The iterator position is not inside the storage of reference" msgstr "" "La position de l'itérateur n'est pas contenue dans le \"storage\" de " "référence" #: src/libdar/etage.cpp:118 msgid "Error opening directory in furtive read mode: " msgstr "Erreur lors de l'ouverture d'un répertoire en mode furtif : " #: src/libdar/etage.cpp:122 #, c-format msgid "" "Could not open directory %s in furtive read mode (%s), using normal mode" msgstr "" "Échec lors de l'ouverture du répertoire %s en mode furtif (%s), ouverture en " "mode normal" #: src/libdar/etage.cpp:147 msgid "Error opening directory: " msgstr "Erreur lors de l'ouverture du répertoire : " #: src/libdar/etage.cpp:165 #, c-format msgid "" "Filename provided by the operating system seems truncated in directory %s, " "storing filename as is: %s" msgstr "" "Le nom de fichier fourni par le système d'exploitation semble coupé au " "niveau du répertoire %s, enregistrement du nom de fichier tel quel: %s" #: src/libdar/etage.cpp:197 #, c-format msgid "" "Detected Cache Directory Tagging Standard for %s, the contents of that " "directory will not be saved" msgstr "" "Détection d'une marque de répertoire cache pour %s, le contenu de ce " "répertoire ne sera pas sauvegardé" #: src/libdar/zapette_protocol.cpp:69 msgid "Partial request received, aborting\n" msgstr "Requête partielle reçue, opération abandonnée\n" #: src/libdar/zapette_protocol.cpp:142 msgid "Corrupted data read on pipe" msgstr "Les données lues depuis un tube sont corrompues" #: src/libdar/libdar5.cpp:162 msgid "Caught an unknown Egeneric exception: " msgstr "Une exception Egeneric inconnue a été interceptée : " #: src/libdar/libdar5.cpp:167 msgid "Caught a none libdar exception" msgstr "Une exception non \"libdar\" a été interceptée" #: src/libdar/libdar5.cpp:256 msgid "Invald nullptr argument given to 'ptr'" msgstr "valeur nullptr donnée à \"ptr\"" #: src/libdar/libdar5.cpp:303 msgid "Invalid nullptr pointer given to close_archive" msgstr "valeur nullptr donnée à \"close_archive\"" #: src/libdar/libdar5.cpp:326 src/libdar/libdar5.cpp:346 #: src/libdar/libdar5.cpp:365 src/libdar/libdar5.cpp:387 #: src/libdar/libdar5.cpp:407 msgid "Invalid nullptr argument given to 'ptr'" msgstr "valeur nullptr donnée à \"ptr\"" #: src/libdar/zapette.cpp:65 msgid "Cannot read on input" msgstr "Lecture impossible en entrée" #: src/libdar/zapette.cpp:67 msgid "Cannot write on output" msgstr "Écriture impossible en sortie" #: src/libdar/zapette.cpp:97 #, c-format msgid "" "Failed driving dar_slave to the end of archive: %S. Trying to open the " "archive from the first bytes" msgstr "" "dar_slave n'est pas arrivé à atteindre la fin de l'archive: %S. Tentantive " "d'ouverture de l'archive depuis les premiers octets" #: src/libdar/zapette.cpp:213 msgid "Uncomplete answer received from peer" msgstr "Réponse incomplète reçue du correspondant" #: src/libdar/zapette.cpp:297 msgid "Communication problem with peer, retry ?" msgstr "Problème de communication, essayer à nouveau ?" #: src/libdar/zapette.cpp:313 src/libdar/zapette.cpp:327 #: src/libdar/zapette.cpp:347 src/libdar/zapette.cpp:352 msgid "Incoherent answer from peer" msgstr "Réponse incohérente du correspondant" #: src/libdar/zapette.cpp:322 msgid "Bad answer from peer, while closing connection" msgstr "" "Réponse incohérente du correspondant pendant la terminaison de la connexion" #: src/libdar/zapette.cpp:332 src/libdar/zapette.cpp:337 #: src/libdar/zapette.cpp:342 msgid "" "Unexpected answer from slave, communication problem or bug may hang the " "operation" msgstr "" "Réponse inattendue de l'esclave, problème un de communication ou un bogue " "risque de suspendre l'opération" #: src/libdar/zapette.cpp:355 msgid "Corrupted data read from pipe" msgstr "Données corrompues lues depuis un tube" #: src/libdar/mask_list.cpp:97 #, c-format msgid "Cannot allocate memory for buffer while reading %S" msgstr "Impossible d'allouer de mémoire pour un tampon lors de la lecture %S" #: src/libdar/mask_list.cpp:122 msgid "Found '" msgstr "Trouvé '" #: src/libdar/mask_list.cpp:146 #, c-format msgid "" "line exceeding the maximum of %d characters in listing file %S, aborting. " "Concerned line starts with: %s" msgstr "" "une ligne excède le maximum de %d caractères lors du listing du fichier %S, " "Abandon de l'opération. La ligne concernée commence par : %s" #: src/libdar/mask_list.cpp:180 msgid "" "Mask_list's prefix must be an absolute path or start with \"\" string " "for archive merging" msgstr "" "Le préfixe pour une mask_list doit être un chemin absolu ou commencer par la " "chaîne \"\" pour la fusion d'archives" #: src/libdar/mask_list.cpp:202 #, c-format msgid "" "Error met while reading line\n" "\t%S\n" " from file %S: %S" msgstr "" "Erreur lors de la lecture de la ligne\n" "\t%S\n" " à partir du fichier %S : %S" #: src/libdar/mask_list.cpp:219 #, c-format msgid "Too much line in file %S (integer overflow)" msgstr "Trop de lignes dans le fichier %S (dépassement d'entiers)" #: src/libdar/mask_list.cpp:223 #, c-format msgid "Error met while opening %S: " msgstr "Erreur d'ouverture de %S : " #: src/libdar/filesystem_hard_link_read.cpp:158 #, c-format msgid "Error reading inode of file %s : %s" msgstr "Erreur lors de la lecture de l'inode de %s : %s" #: src/libdar/filesystem_hard_link_read.cpp:163 #, c-format msgid "Failed reading inode information for %s: " msgstr "Echec de lecture des informations d'inode de %s : " #: src/libdar/filesystem_hard_link_read.cpp:171 msgid "Cannot read inode for " msgstr "Impossible de lire l'inode de " #: src/libdar/filesystem_hard_link_read.cpp:182 #: src/libdar/filesystem_hard_link_read.cpp:211 msgid "atime, data access time" msgstr "atime, date d'accès aux données" #: src/libdar/filesystem_hard_link_read.cpp:188 #: src/libdar/filesystem_hard_link_read.cpp:217 msgid "mtime, data modification time" msgstr "mtime, heure de modification des données" #: src/libdar/filesystem_hard_link_read.cpp:194 #: src/libdar/filesystem_hard_link_read.cpp:223 msgid "ctime, inode change time" msgstr "ctime, date de modification des métadonnées d'inode" #: src/libdar/filesystem_hard_link_read.cpp:305 msgid "Unknown file type! file name is: " msgstr "Type de fichier inconnu ! Le nom du fichier est : " #: src/libdar/filesystem_hard_link_read.cpp:338 msgid "Error reading EA for " msgstr "Erreur lors de la lecture des AE pour " #: src/libdar/capabilities.cpp:114 #, c-format msgid "Error met while checking for capability %S: %s" msgstr "Erreur lors de la vérifiation de la capacité %S : %s" #: src/libdar/capabilities.cpp:144 src/libdar/capabilities.cpp:151 #, c-format msgid "Error met while setting capability %S: %s" msgstr "Erreur lors de l'assignation de la capacité %S : %s" #: src/libdar/crypto.cpp:44 msgid "scrambling (weak encryption)" msgstr "scrambling (chiffrement faible)" #: src/libdar/crypto.cpp:102 msgid "Unknown crypto algorithm" msgstr "Algorithme de chiffrement inconnu" #: src/libdar/mycurl_protocol.cpp:44 #, c-format msgid "Unknown protocol: %S" msgstr "Protocole inconnu : %S" #: src/libdar/filtre.cpp:258 src/libdar/filtre.cpp:638 #: src/libdar/filtre.cpp:1249 src/libdar/filtre.cpp:1430 #: src/libdar/filtre.cpp:2711 msgid "Inspecting directory " msgstr "Inspection d'un répertoire " #: src/libdar/filtre.cpp:294 #, c-format msgid "" "File %S has changed during backup and is probably not saved in a valid state " "(\"dirty file\"), do you want to consider it for restoration anyway?" msgstr "" "Le fichier %S a changé lors de sa sauvegarde and n'est probablement pas " "sauvé un état valide (\"fichier sale\"), voulez-vous malgré tout le " "considérer pour la restauration ?" #: src/libdar/filtre.cpp:304 msgid "" "File had changed during backup and had been copied another time, restoring " "the next copy of file: " msgstr "" "Le fichier a changé lors de sauvegarde mais a été sauvé plusieurs fois, " "restauration de la copie suivante : " #: src/libdar/filtre.cpp:375 #, c-format msgid "" "The just restored file %S has been marked as dirty (sequential reading can " "only detect the dirty status after restoration), do we remove this just " "restored dirty file?" msgstr "" "Le fichier juste restauré %S a été marqué comme sale (en lecture " "séquentielle cet état ne peut être détecté qu'après la restauration du " "fichier), voulez-vous supprimer le fichier sale qui vient juste d'être " "restauré ?" #: src/libdar/filtre.cpp:382 #, c-format msgid "" "The just restored file %S has been marked as dirty (sequential reading can " "only detect the dirty status after restoration), removing the just restored " "dirty file as it is asked to ignore this type of file" msgstr "" "Le fichier juste restauré %S a été marqué comme sale (en lecture " "séquentielle cet état ne peut être détecté qu'après la restauration du " "fichier), suppression du fichier sale qui vient juste d'être restauré, " "puisqu'il a été demandé d'ignorer ce type de fichier" #: src/libdar/filtre.cpp:384 #, c-format msgid "Removing the dirty file %S" msgstr "Suppression du fichier sale %S" #: src/libdar/filtre.cpp:457 msgid " not restored (user choice)" msgstr " non restauré (choix de l'utilisateur)" #: src/libdar/filtre.cpp:461 msgid "No file in this directory will be restored." msgstr "Aucun fichier de ce répertoire ne sera restauré." #: src/libdar/filtre.cpp:479 msgid "Error while restoring " msgstr "Erreur lors de la restauration de " #: src/libdar/filtre.cpp:484 msgid "Warning! No file in that directory will be restored: " msgstr "Attention ! Aucun fichier de ce répertoire ne sera restauré : " #: src/libdar/filtre.cpp:679 msgid "Recording hard link into the archive: " msgstr "Enregistrement de lien dur dans l'archive : " #: src/libdar/filtre.cpp:746 #, c-format msgid "" "SECURITY WARNING! SUSPICIOUS FILE %S: ctime changed since archive of " "reference was done, while no other inode information changed" msgstr "" "AVERTISSEMENT DE SECURITÉ ! FICHIER SUSPECT %S : la date ctime a changé " "depuis que l'archive de référence a été faite, alors qu'aucune autre " "propriété de cette inode n'a changé" #: src/libdar/filtre.cpp:1097 msgid "Error while saving " msgstr "Erreur lors de la sauvegarde de " #: src/libdar/filtre.cpp:1112 msgid "NO FILE IN THAT DIRECTORY CAN BE SAVED." msgstr "AUCUN FICHIER DE CE RÉPERTOIRE NE PEUT ÊTRE SAUVÉ." #: src/libdar/filtre.cpp:1117 msgid "Cannot write down the archive: " msgstr "Impossible d'écrire l'archive : " #: src/libdar/filtre.cpp:1134 msgid ", compression ratio " msgstr ", taux de compression " #: src/libdar/filtre.cpp:1140 #, c-format msgid "Finished Inspecting directory %S , saved %S%S" msgstr "Fin de l'inspection du répertoire %S , sauvé %S%S" #: src/libdar/filtre.cpp:1241 msgid "SKIPPED (hard link in sequential read mode): " msgstr "SAUTÉ (lien dur en mode sequentiel) : " #: src/libdar/filtre.cpp:1277 msgid "OK " msgstr "OK " #: src/libdar/filtre.cpp:1288 src/libdar/filtre.cpp:1320 msgid "DIFF " msgstr "DIFF " #: src/libdar/filtre.cpp:1320 msgid ": file not present in filesystem" msgstr " : fichier absent du système de fichiers" #: src/libdar/filtre.cpp:1372 msgid "ERR " msgstr "ERR " #: src/libdar/filtre.cpp:1455 msgid "Data" msgstr "Données" #: src/libdar/filtre.cpp:1464 msgid "Can't read saved data." msgstr "Impossible de lire les données sauvegardées." #: src/libdar/filtre.cpp:1510 msgid "CRC error: data corruption." msgstr "Erreur de parité : données corrompues." #: src/libdar/filtre.cpp:1611 msgid "OK " msgstr "OK " #: src/libdar/filtre.cpp:1646 msgid "ERR " msgstr "ERR " #: src/libdar/filtre.cpp:1768 msgid "" "Decremental mode is useless when merging is not applied to both an archive " "of reference and an auxiliary archive of reference. Ignore decremental mode " "and continue?" msgstr "" "Le mode décrémental est inutile lorsqu'il n'est pas appliqué à la fois à une " "archive de référence et à une archive auxiliaire de référence. Ignorer le " "mode décrémental et continuer ?" #: src/libdar/filtre.cpp:1902 msgid "first" msgstr "première" #: src/libdar/filtre.cpp:1905 msgid "second" msgstr "deuxième" #: src/libdar/filtre.cpp:1908 msgid "next" msgstr "suivante" #: src/libdar/filtre.cpp:1911 #, c-format msgid "Merging/filtering files from the %s archive..." msgstr "Fusion/Filtrage des fichiers de l'archive %s ..." #: src/libdar/filtre.cpp:1993 msgid "overwritten" msgstr "écrasé" #: src/libdar/filtre.cpp:1997 src/libdar/filtre.cpp:2041 msgid "dropped from the archive and marked as already saved" msgstr "retiré de l'archive et marqué comme déjà sauvé" #: src/libdar/filtre.cpp:2000 msgid "removed" msgstr "supprimé" #: src/libdar/filtre.cpp:2005 #, c-format msgid "Data of file %S is about to be %S, proceed?" msgstr "Le fichier %S est sur le point d'être %S, OK?" #: src/libdar/filtre.cpp:2034 msgid "replaced" msgstr "remplacé" #: src/libdar/filtre.cpp:2037 msgid "removed from the archive" msgstr "supprimé de l'archive" #: src/libdar/filtre.cpp:2044 msgid "merged with possible overwriting" msgstr "fusionné avec écrasement possible" #: src/libdar/filtre.cpp:2049 #, c-format msgid "EA and FSA of file %S are about to be %S, proceed?" msgstr "Les EA et FSA du le fichier %S sont sur le point d'être %S, d'accord ?" #: src/libdar/filtre.cpp:2115 #, c-format msgid "" "EA and FSA of file %S from first archive have been updated with those of " "same named file of the auxiliary archive" msgstr "" "Les EA et FSA du fichier %S de la première archive ont été mis à jour avec " "ceux du fichier de même nom provenant de l'archive auxiliaire" #: src/libdar/filtre.cpp:2125 #, c-format msgid "" "EA of file %S from first archive have been dropped and marked as already " "saved" msgstr "" "Les Attributs Étendus du fichier %S de la première archive ont été retirés " "et marqués comme déjà sauvés" #: src/libdar/filtre.cpp:2131 #, c-format msgid "" "FSA of file %S from first archive have been dropped and marked as already " "saved" msgstr "" "Les FSA du fichier %S de la première archive ont été retirés et marqués " "comme déjà sauvés" #: src/libdar/filtre.cpp:2141 src/libdar/filtre.cpp:2279 #, c-format msgid "EA of file %S from first archive have been removed" msgstr "" "Les attributs étendus du fichier %S de la première archive ont été retirés" #: src/libdar/filtre.cpp:2149 #, c-format msgid "FSA of file %S from first archive have been removed" msgstr "Les FSA du fichier %S de la première archive ont été retirés" #: src/libdar/filtre.cpp:2163 #, c-format msgid "Data of file %S from first archive has been preserved from overwriting" msgstr "" "Les données du fichier %S de la première archive ont été préservés de " "l'écrasement" #: src/libdar/filtre.cpp:2214 #, c-format msgid "" "Data of file %S taken from the first archive of reference has been removed" msgstr "" "Les données du fichier %S pris de la première archive de référence ont été " "retirés" #: src/libdar/filtre.cpp:2217 #, c-format msgid "" "Data of file %S taken from the first archive of reference has been " "overwritten" msgstr "" "Les données du fichier %S pris de la première archive de référence ont été " "écrasées" #: src/libdar/filtre.cpp:2252 #, c-format msgid "EA of file %S has been overwritten" msgstr "Les Attributs Étendus du fichier %S ont été écrasés" #: src/libdar/filtre.cpp:2256 src/libdar/filtre.cpp:2272 #, c-format msgid "EA of file %S has been overwritten and marked as already saved" msgstr "" "Les Attributs Étendus du fichier %S ont été écrasés et marqués comme déjà " "sauvés" #: src/libdar/filtre.cpp:2262 src/libdar/filtre.cpp:2267 #, c-format msgid "" "EA of file %S from first archive have been updated with those of the same " "named file of the auxiliary archive" msgstr "" "Les Attributs Étendus du fichier %S de la première archive ont été mis à " "jour avec ceux du fichier de même nom pris de l'archive auxiliaire" #: src/libdar/filtre.cpp:2551 msgid " not merged (user choice)" msgstr " non fusionné (choix de l'utilisateur)" #: src/libdar/filtre.cpp:2555 msgid "No file in this directory will be considered for merging." msgstr "Aucun fichier de ce répertoire ne sera pris en compte pour la fusion." #: src/libdar/filtre.cpp:2575 msgid "Error while considering file " msgstr "Erreur lors de la prise en compte du fichier : " #: src/libdar/filtre.cpp:2579 msgid "Warning! No file in this directory will be considered for merging: " msgstr "" "Attention ! Aucun fichier de ce répertoire ne sera pris en compte pour la " "fusion : " #: src/libdar/filtre.cpp:2616 msgid "" "File selection has been aborted. Now building the resulting archive with the " "already selected files" msgstr "" "La sélection des fichiers a été interrompue. Construction de l'archive " "résultante avec les fichiers déjà sélectionnés" #: src/libdar/filtre.cpp:2795 #, c-format msgid "" "Need to activate sparse file detection in order to calculate delta signature " "for sparse file %S" msgstr "" "Il est nécessaire d'activer la détection des fichiers creux afin de calculer " "la signature delta pour le fichier creux %S" #: src/libdar/filtre.cpp:2882 msgid "Adding Hard link to archive: " msgstr "Ajout de lien dur à l'archive : " #: src/libdar/filtre.cpp:2898 #, c-format msgid "error met while creating archive: %S" msgstr "Erreur lors du test de l'archive : %S" #: src/libdar/filtre.cpp:2966 msgid "failed reading CRC from file: " msgstr "Echec de lecture du CRC depuis le fichier : " #: src/libdar/filtre.cpp:2994 msgid "Failed reading CRC for EA and FSA: " msgstr "Echec de lecture du CRC pour les EA et FSA : " #: src/libdar/filtre.cpp:3005 msgid "Error met while reading next entry: " msgstr "Erreur lors de la lecture de l'entrée suivante : " #: src/libdar/filtre.cpp:3116 #, c-format msgid "Adding only inode metadata to archive: %S" msgstr "Enregistrement uniquement des métadonnées dans l'archive : %S" #: src/libdar/filtre.cpp:3128 msgid "Resaving file without compression: " msgstr "Sauve à nouveau mais sans compression: " #: src/libdar/filtre.cpp:3132 msgid "Delta saving file to archive: " msgstr "Sauvegarde sous forme de batch binaire: " #: src/libdar/filtre.cpp:3136 #, c-format msgid "Adding %S to archive: %S" msgstr "Ajout de %S à l'archive : %S" #: src/libdar/filtre.cpp:3176 src/libdar/filtre.cpp:3188 #, c-format msgid "building delta signature with block size of %d bytes" msgstr "construction de signature delta avec des blocks de %d octets" #: src/libdar/filtre.cpp:3280 #, c-format msgid "" "Failed reading data CRC for %S, file may be damaged and will be marked dirty" msgstr "" "Echec de lecture du CRC de données pour %S, le fichier est probablement " "abîmé et sera marqué comme sale" #: src/libdar/filtre.cpp:3300 #, c-format msgid "" "Failed setting storage size to zero for this file with missing data CRC, CRC " "error will be reported for that file while reading the repaired archive" msgstr "" "Impossible d'enregistrer une taille de stockage nulle pour ce fichier sans " "CRC, une erreur de CRC sera signalée pour ce fichier lors de la lecture de " "l'archive réparée" #: src/libdar/filtre.cpp:3337 msgid "Copied data does not match CRC" msgstr "Les données copiées ne correspondent pas au CRC" #: src/libdar/filtre.cpp:3484 msgid " : Failed resaving uncompressed the inode data" msgstr " : Echec de la sauvegarde sans compression des données de l'inode" #: src/libdar/filtre.cpp:3494 msgid "" " : Resaving uncompressed the inode data to gain space is not possible, " "keeping data compressed" msgstr "" " : Resauver sans compression les données de l'inode pour gagner de la place " "n'est pas possible, on conserve les données compressées" #: src/libdar/filtre.cpp:3518 #, c-format msgid "" "File has disappeared while we were reading it, cannot check whether it has " "changed during its backup: %S" msgstr "" "Le fichier a disparu alors que nous étions en train de le lire, impossible " "de vérifier s'il a changé lors de sa sauvegarde : %S" #: src/libdar/filtre.cpp:3553 #, c-format msgid "" "WARNING! File modified while reading it for backup. Performing retry %i of %i" msgstr "ATTENTION ! Fichier modifié pendant sa sauvegarde. Tentative %i sur %i" #: src/libdar/filtre.cpp:3570 msgid "" "WARNING! File modified while reading it for backup. No more retry for that " "file to not exceed the wasted byte limit. File is " msgstr "" "ATTENTION ! Fichier modifié lors de la lecture pour sa sauvegarde. Pas de " "nouvelle tentative pour ce fichier afin de de pas dépasser la taille " "maximale de gaspillage autorisée. Le fichier est " #: src/libdar/filtre.cpp:3577 msgid "" "WARNING! File modified while reading it for backup, but no more retry " "allowed: " msgstr "" "ATTENTION ! Fichier modifié pendant sa sauvegarde, plus de nouvelle tentative" #: src/libdar/filtre.cpp:3599 msgid "Dumping delta signature structure for saved file: " msgstr "Enregistrement de la signature delta pour le fichier : " #: src/libdar/filtre.cpp:3755 msgid "Saving Extended Attributes for " msgstr "Sauvegarde des Attributs Étendus de " #: src/libdar/filtre.cpp:3790 #, c-format msgid "" "Computed EA CRC for file %S differs from what was stored in the archive, " "this file's EA may have been corrupted" msgstr "" "Le CRC calculé pour sur EA du fichier %S diffère de celui stocké dans " "l'archive, les EA de ce fichier sont probablement corrompus" #: src/libdar/filtre.cpp:3837 msgid "Error saving Extended Attributes for " msgstr "Erreur lors de la sauvegarde des Attributs Étendus de " #: src/libdar/filtre.cpp:3841 msgid "" "be advised that a CRC error will be reported for the EA of that file while " "sequentially reading the repaired archive" msgstr "" "Nota bene : une erreur de CRC sera signalée pour les EA de ce fichier lors " "d'une lecture séquentielle de l'archive réparée" #: src/libdar/filtre.cpp:3876 msgid "Saving Filesystem Specific Attributes for " msgstr "" "Sauvegarde des attributs spécifiques au système de fichier Étendus pour " #: src/libdar/filtre.cpp:3906 #, c-format msgid "" "Computed FSA CRC for file %S differs from what was stored in the archive, " "this file's EA may have been corrupted" msgstr "" "Le CRC calculé sur les FSA du fichier %S diffère de celui stocké dans " "l'archive, les FSA de ce fichier sont probablement corrompus" #: src/libdar/filtre.cpp:3951 msgid "Error saving Filesystem Specific Attributes for " msgstr "Erreur lors de la sauvegarde des FSA de " #: src/libdar/filtre.cpp:3955 msgid "" "be advised that a CRC error will be reported for the FSA of that file while " "sequentially reading the repaired archive" msgstr "" "Nota bene : une erreur de CRC sera signalée pour les FSA de ce fichier lors " "d'une lecture séquentielle de l'archive réparée" #: src/libdar/filtre.cpp:4513 msgid "Copying delta signature structure from the archive of reference: " msgstr "Copie des signature delta depuis l'archive de référence : " #: src/libdar/filtre.cpp:4552 msgid "Calculating delta signature from filesystem: " msgstr "Calcul de signature delta à partir du système de fichiers : " #: src/libdar/filtre.cpp:4632 #, c-format msgid "" "Furtive read mode requires either root permission and FOWNER capability, " "falling back to normal filesystem read" msgstr "" "Le mode de lecture furtive requièrt soit les droits root soit la capacité " "FOWNER, retour à une un mode lecture normal" #: src/libdar/escape.cpp:138 msgid "" "Adding an explicit escape sequence of type seqt_not_a_sequence is forbidden" msgstr "" "L'ajout explicite d'une séquence d'échappement de type seqt_not_a_sequence " "est interdit" #: src/libdar/escape.cpp:898 msgid "Unknown escape sequence type" msgstr "Type de séquence d'échappement inconnue" #: src/libdar/sparse_file.cpp:157 msgid "Incoherent structure in data carrying sparse files: unknown mark" msgstr "" "Incohérence dans la structure de données gérant les fichiers à trou : marque " "inconnue" #: src/libdar/sparse_file.cpp:311 msgid "Cannot skip forward to restore a hole" msgstr "Ne peut peut aller plus loin pour restaurer un trou" #: src/libdar/sparse_file.cpp:325 msgid "Data corruption or unknown sparse_file mark found in file's data" msgstr "" "Corruption de donnée ou mark inconnue au sein des données d'un fichier à trou" #: src/libdar/criterium.cpp:412 msgid "" "Cannot evaluate this crit_and criterium as no criterium has been added to it" msgstr "" "Ne peut pas évaluer cet objet crit_and car aucun autre objet criterium n'y a " "été ajouté" #: src/libdar/criterium.cpp:481 msgid "" "Cannot evaluate this crit_or criterium as no criterium has been added to it" msgstr "" "Ne peut pas évaluer cet objet crit_or car aucun autre objet criterium n'y a " "été ajouté" #: src/dar_suite/dar_slave.cpp:150 msgid "Cannot read arguments on command line, aborting" msgstr "Impossible de lire les arguments en ligne de commande, abandon" #: src/dar_suite/dar_slave.cpp:160 msgid "Missing argument to -i option" msgstr "Pas d'argument à l'option -i" #: src/dar_suite/dar_slave.cpp:168 msgid "Missing argument to -o option" msgstr "Pas d'argument à l'option -o" #: src/dar_suite/dar_slave.cpp:182 msgid "Missing argument to -E option" msgstr "Pas d'argument à l'option -E" #: src/dar_suite/dar_slave.cpp:192 src/dar_suite/command_line.cpp:1834 #: src/dar_suite/dar_xform.cpp:347 msgid "Missing argument to --min-digits" msgstr "Il manque un argument à --min-digits" #: src/dar_suite/dar_slave.cpp:200 src/dar_suite/dar_xform.cpp:355 #, c-format msgid "Missing parameter to option -%c" msgstr "Pas d'argument à l'option -%c" #: src/dar_suite/dar_slave.cpp:202 src/dar_suite/dar_slave.cpp:204 #: src/dar_suite/dar_manager.cpp:514 src/dar_suite/dar_manager.cpp:516 #: src/dar_suite/dar_xform.cpp:357 #, c-format msgid "Ignoring unknown option -%c" msgstr "Option inconnue -%c ignorée" #: src/dar_suite/dar_slave.cpp:210 msgid "Missing archive basename, see -h option for help" msgstr "Il manque le nom de base de l'archive, voir l'option -h pour l'aide" #: src/dar_suite/dar_slave.cpp:216 src/dar_suite/dar_xform.cpp:371 msgid "Too many argument on command line, see -h option for help" msgstr "" "Trop d'arguments sur la ligne de commande, voir l'option -h pour l'aide" #: src/dar_suite/dar_slave.cpp:256 src/dar_suite/dar_slave.cpp:261 #: src/dar_suite/dar_manager.cpp:906 src/dar_suite/dar_manager.cpp:930 #: src/dar_suite/dar_manager.cpp:933 src/dar_suite/command_line.cpp:2062 #: src/dar_suite/command_line.cpp:2072 src/dar_suite/command_line.cpp:2075 #: src/dar_suite/command_line.cpp:2113 src/dar_suite/command_line.cpp:2139 #: src/dar_suite/command_line.cpp:2144 src/dar_suite/command_line.cpp:2148 #: src/dar_suite/dar_xform.cpp:418 src/dar_suite/dar_xform.cpp:420 #: src/dar_suite/dar_xform.cpp:422 src/dar_suite/dar_xform.cpp:437 #, c-format msgid "\n" msgstr "\n" #: src/dar_suite/dar_slave.cpp:257 src/dar_suite/command_line.cpp:2076 #: src/dar_suite/dar_xform.cpp:423 #, c-format msgid "Common options:\n" msgstr "Options communes :\n" #: src/dar_suite/dar_slave.cpp:258 #, c-format msgid "" " -i pipe to use instead of std input to read orders from dar\n" msgstr "" " -i tube à utiliser à la place de l'entrée standard pour lire " "les ordres de dar\n" #: src/dar_suite/dar_slave.cpp:259 #, c-format msgid "" " -o pipe to use instead of std output to write data to dar\n" msgstr "" " -o tube à utiliser en place de la sortie standard pour " "envoyer les données à dar\n" #: src/dar_suite/dar_slave.cpp:260 #, c-format msgid "" " -E \t command line to execute between slices of the archive\n" msgstr "" " -E \t ligne de commande à exécuter entre les tranches d'une " "archive\n" #: src/dar_suite/dar_slave.cpp:262 src/dar_suite/dar_manager.cpp:934 #: src/dar_suite/dar_xform.cpp:438 #, c-format msgid "See man page for more options.\n" msgstr "Voir la page de manuel pour plus d'options.\n" #: src/dar_suite/dar_slave.cpp:275 src/dar_suite/dar_manager.cpp:948 #: src/dar_suite/dar_xform.cpp:452 #, c-format msgid " Using libdar %u.%u.%u built with compilation time options:\n" msgstr " Libdar %u.%u.%u utilisée, compilé avec les options :\n" #: src/dar_suite/dar_slave.cpp:277 src/dar_suite/dar_manager.cpp:950 #: src/dar_suite/dar_xform.cpp:454 #, c-format msgid " Using libdar %u.%u built with compilation time options:\n" msgstr " Libdar %u%u utilisé, compilé avec les options :\n" #: src/dar_suite/dar_slave.cpp:280 src/dar_suite/dar_cp.cpp:145 #: src/dar_suite/dar_manager.cpp:953 src/dar_suite/command_line.cpp:2533 #: src/dar_suite/dar_xform.cpp:457 #, c-format msgid " compiled the %s with %s version %s\n" msgstr "compilé le %s avec %s version %s\n" #: src/dar_suite/dar_slave.cpp:281 src/dar_suite/dar_cp.cpp:146 #: src/dar_suite/dar_manager.cpp:954 src/dar_suite/command_line.cpp:2534 #: src/dar_suite/dar_xform.cpp:458 #, c-format msgid " %s is part of the Disk ARchive suite (Release %s)\n" msgstr " %s fait partie de la suite Disk ARchive (Release %s)\n" #: src/dar_suite/dar_slave.cpp:282 #, c-format msgid " %s comes with ABSOLUTELY NO WARRANTY;" msgstr " %s est fourni sans AUCUNE GARANTIE ; " #: src/dar_suite/dar_slave.cpp:283 msgid "" " for details\n" " type `dar -W'." msgstr "" "Pour les détails\n" "taper `dar -W'." #: src/dar_suite/dar_slave.cpp:284 src/dar_suite/dar_manager.cpp:956 #: src/dar_suite/command_line.cpp:2536 src/dar_suite/dar_xform.cpp:460 msgid "" " This is free software, and you are welcome\n" " to redistribute it under certain conditions;" msgstr "" " Ceci est un logiciel libre, vous pouvez\n" " le redistribuer sous certaines conditions ;" #: src/dar_suite/dar_slave.cpp:285 msgid "" " type `dar -L | more'\n" " for details.\n" "\n" msgstr "" " taper `dar -L | more'\n" " pour les détails.\n" "\n" #: src/dar_suite/line_tools.hpp:387 #, c-format msgid "Parse error: Unmatched `%c'" msgstr "Erreur de syntaxe : `%c' sans correspondance" #: src/dar_suite/dar_cp.cpp:133 #, c-format msgid "usage : %s \n" msgstr "usage : %s \n" #: src/dar_suite/dar_cp.cpp:147 #, c-format msgid " %s comes with ABSOLUTELY NO WARRANTY; for details type `dar -W'." msgstr "" " %s est fourni SANS AUCUNE GARANTIE; pour les détails\n" " taper `dar -W'." #: src/dar_suite/dar_cp.cpp:148 msgid " This is free software, and you are welcome to redistribute it under" msgstr "" " Ceci est un logiciel libre, vous pouvez\n" " le redistribuer sous certaines conditions ;" #: src/dar_suite/dar_cp.cpp:149 msgid "" " certain conditions; type `dar -L | more' for details.\n" "\n" msgstr "" " taper `dar -L | more'\n" " pour les détails.\n" "\n" #: src/dar_suite/dar_cp.cpp:153 msgid "Unexpected exception from libdar" msgstr "Exception non attendue levée par libdar" #: src/dar_suite/dar_cp.cpp:170 #, c-format msgid "Memory allocation failed : %s" msgstr "Échec de l'allocation mémoire : %s" #: src/dar_suite/dar_cp.cpp:186 #, c-format msgid "Cannot open source file : %s" msgstr "Fichier source impossible à ouvrir : %s" #: src/dar_suite/dar_cp.cpp:197 #, c-format msgid "Cannot open destination file : %s" msgstr "Fichier destination impossible à ouvrir : %s" #: src/dar_suite/dar_cp.cpp:222 #, c-format msgid "Starting the copy of %u byte(s)" msgstr "Début de la copie de %u octet(s)" #: src/dar_suite/dar_cp.cpp:232 #, c-format msgid "" "Error reading source file (we are at %.2f %% of data copied), trying to read " "further: %s\n" msgstr "" "Erreur lors de la lecture du fichier source (%.2f %% des données a été " "copiée), on essaie de lire plus loin : %s\n" #: src/dar_suite/dar_cp.cpp:236 #, c-format msgid "" "Skipping done (missing %.0f byte(s)), found correct data to read, continuing " "the copy...\n" msgstr "" "Saut effectué (manque %.0f octet(s)), données correctes trouvées, on " "continue la copie...\n" #: src/dar_suite/dar_cp.cpp:242 msgid "" "Reached End of File, no correct data could be found after the last error\n" msgstr "" "Fin de fichier atteinte, aucune donnée correcte n'a pu être trouvée après la " "dernière erreur\n" #: src/dar_suite/dar_cp.cpp:250 #, c-format msgid "Copy finished. Missing %.0f byte(s) of data\n" msgstr "Copie terminée. Il manque %.0f octet(s) de données\n" #: src/dar_suite/dar_cp.cpp:251 #, c-format msgid "Which is %.2f %% of the total amount of data\n" msgstr "Ce qui correspond à %.2f %% de la quantité totale de donnés\n" #: src/dar_suite/dar_cp.cpp:260 msgid "Cannot seek back one char" msgstr "Ne peut pas lire un caractère en arrière" #: src/dar_suite/dar_cp.cpp:271 src/dar_suite/dar_cp.cpp:356 msgid "Cannot write to destination, aborting" msgstr "" "Impossible d'écrire dans le fichier destination, abandon de l'opération" #: src/dar_suite/dar_cp.cpp:315 msgid "Cannot seek in file" msgstr "Ne peut pas déplacer la lecture dans le fichier" #: src/dar_suite/dar_cp.cpp:359 #, c-format msgid "Non fatal error while writing to destination file, retrying\n" msgstr "" "Erreur non fatale lors de l'écriture des données, on essaie à nouveau\n" #: src/dar_suite/dar_manager.cpp:244 msgid "Decompressing and loading database header to memory..." msgstr "" "Décompression et chargement de l'en-tête de la base de donnée en mémoire..." #: src/dar_suite/dar_manager.cpp:246 msgid "Decompressing and loading database to memory..." msgstr "Décompression et chargement de la base de données en mémoire ..." #: src/dar_suite/dar_manager.cpp:258 msgid "Error met while processing operation: " msgstr "Erreur lors du traitement de l'opération : " #: src/dar_suite/dar_manager.cpp:331 msgid "-B option cannot be given inside a batch file" msgstr "" "l'option -B ne peut pas être utilisée à l'intérieur d'un fichier de " "traitement par lot" #: src/dar_suite/dar_manager.cpp:469 msgid "Running batch file from a batch file is not allowed" msgstr "" "Lancer un fichier de traitement par lot à partir d'un autre fichier de " "traitement par lot n'est pas autorisée" #: src/dar_suite/dar_manager.cpp:495 #, c-format msgid "invalid number given to -9 option: %s" msgstr "L'argument fourni à l'option -9 est un nombre invalide : %s" #: src/dar_suite/dar_manager.cpp:541 msgid "-e option is only available when using -r option, aborting" msgstr "" "-e n'est disponible qu'avec l'option -r, abandon de l'opération en cours" #: src/dar_suite/dar_manager.cpp:552 msgid "-w option is only valid with -r option, ignoring it" msgstr "" "l'option -w n'est valide uniquement qu'avec l'option -r et sera ignorée ici" #: src/dar_suite/dar_manager.cpp:559 msgid "-9 option is only valid with -A option, ignoring it" msgstr "" "l'option -9 n'est valide uniquement qu'avec l'option -A et sera ignorée ici" #: src/dar_suite/dar_manager.cpp:570 msgid "No action specified, aborting" msgstr "Aucune action spécifiée, abandon" #: src/dar_suite/dar_manager.cpp:582 src/dar_suite/dar_manager.cpp:586 msgid "Ignoring extra arguments on command line" msgstr "Les arguments supplémentaires sur la ligne de commande sont ignorés" #: src/dar_suite/dar_manager.cpp:592 src/dar_suite/dar_manager.cpp:609 msgid "Missing argument to command line, aborting" msgstr "Pas assez d'arguments sur la ligne de commande, abandon" #: src/dar_suite/dar_manager.cpp:601 msgid "Arguments to -r must be relative path (never begin by '/')" msgstr "" "L'argument donnée à l'option -r doit être un chemin relatif (jamais " "commencer par '/')" #: src/dar_suite/dar_manager.cpp:623 msgid "No database specified, aborting" msgstr "Pas de base de donnée spécifiée, abandon" #: src/dar_suite/dar_manager.cpp:629 msgid "Parse error on command line (or included files): " msgstr "Erreur de syntaxe en ligne de commande (ou dans un fichier inclus) : " #: src/dar_suite/dar_manager.cpp:643 msgid "Creating file..." msgstr "Création de fichier ..." #: src/dar_suite/dar_manager.cpp:644 msgid "Formatting file as an empty database..." msgstr "Mise en forme du fichier en base de donnée vide ..." #: src/dar_suite/dar_manager.cpp:648 msgid "Database has been successfully created empty." msgstr "La base de donnée a été initialisée avec succès." #: src/dar_suite/dar_manager.cpp:663 src/dar_suite/dar_manager.cpp:1136 msgid "Reading catalogue of the archive to add..." msgstr "Lecture du catalogue de l'archive à ajouter ..." #: src/dar_suite/dar_manager.cpp:677 src/dar_suite/dar_manager.cpp:1145 msgid "Updating database with catalogue..." msgstr "Ajout des informations issues du catalogue à la base de donnée..." #: src/dar_suite/dar_manager.cpp:684 src/dar_suite/dar_manager.cpp:877 #: src/dar_suite/dar_manager.cpp:1148 src/dar_suite/dar_manager.cpp:1178 #: src/dar_suite/dar_manager.cpp:1272 msgid "Checking date ordering of files between archives..." msgstr "Vérification de l'ordonnancement des dates entre les archives ..." #: src/dar_suite/dar_manager.cpp:697 src/dar_suite/dar_manager.cpp:881 #: src/dar_suite/dar_manager.cpp:1274 msgid "" "Some files do not follow chronological order when archive index increases " "withing the database, this can lead dar_manager to restored a wrong version " "of these files" msgstr "" "Certains fichiers ne sont pas rangés par ordre chronologique en suivant des " "indices croissants d'archive au sein de la base de donnée, ceci peut amener " "dar_manager à ne pas restaurer la bonne version d'un de ces fichiers" #: src/dar_suite/dar_manager.cpp:724 src/dar_suite/dar_manager.cpp:1167 msgid "Removing information from the database..." msgstr "Suppression des informations de la base ..." #: src/dar_suite/dar_manager.cpp:744 src/dar_suite/dar_manager.cpp:764 #: src/dar_suite/dar_manager.cpp:778 src/dar_suite/dar_manager.cpp:792 msgid "Changing database header information..." msgstr "Mise à jour des en-têtes de la base de données ..." #: src/dar_suite/dar_manager.cpp:809 msgid "" "Looking in archives for requested files, classifying files archive by " "archive..." msgstr "" "Recherche des archives contenant les versions les plus récentes des fichiers " "et tri des fichiers par archive à utiliser pour la restauration ..." #: src/dar_suite/dar_manager.cpp:856 src/dar_suite/dar_manager.cpp:1197 msgid "Computing statistics..." msgstr "Calcul des statistiques ..." #: src/dar_suite/dar_manager.cpp:866 msgid "" "Negative number or zero not allowed when moving an archive inside a database" msgstr "" "Nombre négatif ou nul non autorisé pour le déplacement d'une archive au sein " "de la base" #: src/dar_suite/dar_manager.cpp:873 msgid "Changing database information..." msgstr "Mise à jour des informations de la base de données ..." #: src/dar_suite/dar_manager.cpp:907 #, c-format msgid "Commands:\n" msgstr "Commandes :\n" #: src/dar_suite/dar_manager.cpp:908 #, c-format msgid " -C creates an empty database\n" msgstr " -C création d'une base vide\n" #: src/dar_suite/dar_manager.cpp:909 #, c-format msgid " -B specify the database to use (read or modify)\n" msgstr " -B base de donnée à utiliser\n" #: src/dar_suite/dar_manager.cpp:910 #, c-format msgid " -A add an archive to the database\n" msgstr " -A ajoute une archive à la base\n" #: src/dar_suite/dar_manager.cpp:911 #, c-format msgid "" " -l\t\t gives information about the archive compiled in the database\n" msgstr " -l\t\t affiche les archives utilisées dans la base de donnée\n" #: src/dar_suite/dar_manager.cpp:912 #, c-format msgid " -D delete an archive from the database\n" msgstr " -D suppression d'archives de la base\n" #: src/dar_suite/dar_manager.cpp:913 #, c-format msgid "" " -b \t change the basename to use for the give archive number\n" msgstr "" " -b \t change le nom de base de l'archive dont le numéro est " "donnée\n" #: src/dar_suite/dar_manager.cpp:914 #, c-format msgid "" " -p \t change the path to use for the given archive number\n" msgstr "" " -p \t change le chemin associé à l'archive du numéro donné en " "argument\n" #: src/dar_suite/dar_manager.cpp:915 #, c-format msgid " -o specify a list of option to always pass to dar\n" msgstr " -o liste d'options à transmettre à dar\n" #: src/dar_suite/dar_manager.cpp:916 #, c-format msgid " -d specify the path to dar\n" msgstr " -d chemin de la commande dar\n" #: src/dar_suite/dar_manager.cpp:917 #, c-format msgid " -r \t restores the given files\n" msgstr " -r \t restauration des fichiers spécifiés\n" #: src/dar_suite/dar_manager.cpp:918 #, c-format msgid "" " -w \t only with -r, restores in state just before the given date\n" msgstr "" " -w \t uniquement avec -r, restaure les fichiers dans leur état à " "la date donnée\n" #: src/dar_suite/dar_manager.cpp:919 #, c-format msgid "" " \t date format: [[[year/]month]/day-]hour:minute[:second]\n" msgstr "" " \t format de date: [[[année/]mois]/jour-]heure:minute[:" "seconde]\n" #: src/dar_suite/dar_manager.cpp:920 #, c-format msgid "" " -u \t list the most recent files contained in the given " "archive\n" msgstr "" " -u \t liste les fichiers les plus récents contenus dans " "l'archive donnée\n" #: src/dar_suite/dar_manager.cpp:921 #, c-format msgid " -f \t list the archives where the given file is present\n" msgstr "" " -f \t liste les archives où est présent le fichier donnée\n" #: src/dar_suite/dar_manager.cpp:922 #, c-format msgid " -s\t\t shows the number of most recent file by archive\n" msgstr "" " -s\t\t affiche la quantité de fichiers les plus récent par archive\n" #: src/dar_suite/dar_manager.cpp:923 #, c-format msgid " -m \t move an archive within a given database.\n" msgstr " -m \t change la position d'une archive dans une base.\n" #: src/dar_suite/dar_manager.cpp:924 #, c-format msgid " -i\t\t user interactive mode\n" msgstr " -i\t\t mode interactif\n" #: src/dar_suite/dar_manager.cpp:925 #, c-format msgid " -c\t\t check database for dates order\n" msgstr " -c\t\t vérifie la base pour l'ordonnancement des dates\n" #: src/dar_suite/dar_manager.cpp:926 #, c-format msgid "" " -L execute on a given database a batch of action as defined " "by\n" msgstr "" " -L exécute un fichier de traitement par lot sur une base tel " "défini par\n" #: src/dar_suite/dar_manager.cpp:927 #, c-format msgid "\t\t the provided file.\n" msgstr "\t\t le fichier fourni.\n" #: src/dar_suite/dar_manager.cpp:928 src/dar_suite/dar_xform.cpp:424 #, c-format msgid " -h\t\t displays this help information\n" msgstr " -h\t\t affiche cette page d'aide\n" #: src/dar_suite/dar_manager.cpp:929 #, c-format msgid " -V\t\t displays software version\n" msgstr " -V\t\t Informations de version\n" #: src/dar_suite/dar_manager.cpp:931 #, c-format msgid "Options:\n" msgstr "Options :\n" #: src/dar_suite/dar_manager.cpp:932 #, c-format msgid " -v\t\t display more information about what is going on\n" msgstr " -v\t\t mode verbeux\n" #: src/dar_suite/dar_manager.cpp:955 src/dar_suite/command_line.cpp:2535 #: src/dar_suite/dar_xform.cpp:459 #, c-format msgid "" " %s comes with ABSOLUTELY NO WARRANTY; for details\n" " type `%s -W'." msgstr "" " %s est fourni SANS AUCUNE GARANTIE ; pour les détails\n" " taper '%s -W'." #: src/dar_suite/dar_manager.cpp:957 src/dar_suite/command_line.cpp:2537 #: src/dar_suite/dar_xform.cpp:461 #, c-format msgid "" " type `%s -L | more'\n" " for details.\n" "\n" msgstr "" " taper '%s -L | more'\n" "pour les détails.\n" "\n" #: src/dar_suite/dar_manager.cpp:1015 msgid "Corrupted database :" msgstr "Base de données corrompue : " #: src/dar_suite/dar_manager.cpp:1067 #, c-format msgid "" "\n" "\n" "\t Dar Manager Database used [%s] : %S\n" msgstr "" "\n" "\n" "\t Base de donnée utilisée [%s] : %S\n" #: src/dar_suite/dar_manager.cpp:1067 msgid "Saved" msgstr "Sauvé" #: src/dar_suite/dar_manager.cpp:1067 msgid "Not Saved" msgstr "Non Sauvé" #: src/dar_suite/dar_manager.cpp:1069 #, c-format msgid "" "\t Pause each %d line of output\n" "\n" msgstr "" "\t Pause toutes les %d lignes d'affichage\n" "\n" #: src/dar_suite/dar_manager.cpp:1071 #, c-format msgid "" "\t No pause in output\n" "\n" msgstr "" "\t Pas de pause lors de l'affichage\n" "\n" #: src/dar_suite/dar_manager.cpp:1072 #, c-format msgid " l : list database contents \t A : Add an archive\n" msgstr " l : contenu de la base \t\tA : Ajout d'une archive\n" #: src/dar_suite/dar_manager.cpp:1073 #, c-format msgid " u : list archive contents \t D : Remove an archive\n" msgstr " u : contenu d'une archive \t\tD : Suppression d'une archive\n" #: src/dar_suite/dar_manager.cpp:1074 #, c-format msgid " f : give file localization \t m : modify archive order\n" msgstr " f : localisation d'un fichier \t\tm : change l'ordre des archives\n" #: src/dar_suite/dar_manager.cpp:1075 #, c-format msgid " p : modify path of archives \t b : modify basename of archives\n" msgstr "" " p : modifie le chemin d'un archive \tb : modifie le nom de base d'une " "archive\n" #: src/dar_suite/dar_manager.cpp:1076 #, c-format msgid " d : path to dar \t o : options to dar\n" msgstr " d : chemin de dar \to : option passées à dar\n" #: src/dar_suite/dar_manager.cpp:1077 #, c-format msgid " w : write changes to file \t s : database statistics\n" msgstr "" " w : enregistre les changements \ts : statistiques de la base de données\n" #: src/dar_suite/dar_manager.cpp:1078 #, c-format msgid "" " a : Save as \t n : pause each 'n' line (zero for no pause)\n" msgstr "" " a : Sauver sous\n" " n : pause chaque 'n' ligne d'affichage (zéro pour aucune pause)\n" #: src/dar_suite/dar_manager.cpp:1079 #, c-format msgid "" " c : check date order\n" "\n" msgstr " c : vérification de l'ordre des dates\n" #: src/dar_suite/dar_manager.cpp:1080 #, c-format msgid "" " q : quit\n" "\n" msgstr "" " q : Sortir\n" "\n" #: src/dar_suite/dar_manager.cpp:1081 #, c-format msgid " Choice: " msgstr " Choix : " #: src/dar_suite/dar_manager.cpp:1098 msgid "Archive number: " msgstr "Numéro d'archive : " #: src/dar_suite/dar_manager.cpp:1105 msgid "File to look for: " msgstr "Fichier à chercher : " #: src/dar_suite/dar_manager.cpp:1109 msgid "Archive number to modify: " msgstr "Numéro d'archive à modifier : " #: src/dar_suite/dar_manager.cpp:1113 #, c-format msgid "New basename for archive number %d: " msgstr "Nouveau numéro pour l'archive %d : " #: src/dar_suite/dar_manager.cpp:1118 msgid "Path to dar (empty string to use the default from PATH variable): " msgstr "Chemin de dar (chemin vide pour l'utilisation de la variable PATH) : " #: src/dar_suite/dar_manager.cpp:1123 src/dar_suite/dar_manager.cpp:1129 #: src/dar_suite/dar_manager.cpp:1419 msgid "Compressing and writing back database to file..." msgstr "Compression et enregistrement de la base de donnée dans le fichier..." #: src/dar_suite/dar_manager.cpp:1128 msgid "New database name: " msgstr "Nouveau nom pour la base de données : " #: src/dar_suite/dar_manager.cpp:1135 msgid "Archive basename (or extracted catalogue basename) to add: " msgstr "Nom de base de l'archive à ajouter : " #: src/dar_suite/dar_manager.cpp:1162 msgid "Archive number to remove: " msgstr "Numéro d'archive à supprimer : " #: src/dar_suite/dar_manager.cpp:1166 #, c-format msgid "Are you sure to remove archive number %d ?" msgstr "Étes vous sûr de vouloir supprimer l'archive numéro %d ?" #: src/dar_suite/dar_manager.cpp:1172 msgid "Archive number to move: " msgstr "Numéro de l'archive à déplacer : " #: src/dar_suite/dar_manager.cpp:1174 msgid "In which position to insert this archive: " msgstr "Position à laquelle insérer l'archive : " #: src/dar_suite/dar_manager.cpp:1183 msgid "Archive number who's path to modify: " msgstr "Numéro de l'archive dont le chemin doit être modifié : " #: src/dar_suite/dar_manager.cpp:1187 #, c-format msgid "New path to give to archive number %d: " msgstr "Nouveau chemin de l'archive %d : " #: src/dar_suite/dar_manager.cpp:1201 msgid "How much line to display at once: " msgstr "Nombre de ligne à afficher à chaque fois : " #: src/dar_suite/dar_manager.cpp:1205 msgid "Checking file's dates ordering..." msgstr "Vérification de l'ordonnancement des dates de fichiers ..." #: src/dar_suite/dar_manager.cpp:1213 src/dar_suite/dar_manager.cpp:1234 msgid "Database not saved, Do you really want to quit ?" msgstr "Base de données non sauvegardée, voulez-vous vraiment sortir ?" #: src/dar_suite/dar_manager.cpp:1214 src/dar_suite/dar_manager.cpp:1235 #, c-format msgid "Continuing the action under process which is to exit... so we exit!" msgstr "On continue l'action en cours qui est de sorti ... donc on sort !" #: src/dar_suite/dar_manager.cpp:1223 #, c-format msgid "Unknown choice\n" msgstr "Choix inconnu\n" #: src/dar_suite/dar_manager.cpp:1249 #, c-format msgid "re-enabling all signal handlers and continuing\n" msgstr "" "réactivation des gestionnaires de signaux et poursuite des opérations\n" #: src/dar_suite/dar_manager.cpp:1256 #, c-format msgid "Error performing the requested action: %S" msgstr "Erreur pendant l'exécution de l'action demandée : %S " #: src/dar_suite/dar_manager.cpp:1276 msgid "No problem found" msgstr "Aucun problème rencontré" #: src/dar_suite/dar_manager.cpp:1310 msgid "Opening and reading the batch file..." msgstr "Ouverture et lecture du fichier de traitement par lot..." #: src/dar_suite/dar_manager.cpp:1334 #, c-format msgid "" "\n" "\tExecuting batch file line: %S\n" " " msgstr "" "\n" "\tExécution de la ligne : %S\n" " " #: src/dar_suite/dar_manager.cpp:1358 #, c-format msgid "Syntax error in batch file: %S" msgstr "Erreur de syntaxe dans le fichier de traitement par lot : %S" #: src/dar_suite/dar_manager.cpp:1361 msgid "Syntax error in batch file: -C option not allowed" msgstr "" "Erreur de syntaxe dans le fichier de traitement par lot : option -C non " "autorisée" #: src/dar_suite/dar_manager.cpp:1364 msgid "Syntax error in batch file: -i option not allowed" msgstr "" "Erreur de syntaxe dans le fichier de traitement par lot : option -i non " "autorisée" #: src/dar_suite/dar_manager.cpp:1372 msgid "Aborting batch operation: " msgstr "Abandon du traitement par lot :" #: src/dar_suite/dar_manager.cpp:1383 #, c-format msgid "Enter each argument line by line, press return at the end\n" msgstr "" "Entrer chaque argument ligne par ligne puis appuyer sur entrée à la fin\n" #: src/dar_suite/dar_manager.cpp:1384 #, c-format msgid "To terminate enter an empty line\n" msgstr "Pour terminer entrer une ligne vide\n" #: src/dar_suite/line_tools.cpp:101 src/dar_suite/line_tools.cpp:119 msgid "Index out of range" msgstr "Index hors limites" #: src/dar_suite/line_tools.cpp:244 msgid "Syntax error in --retry-on-change argument: " msgstr "" "Erreur de syntaxe dans l'argument fourni à l'option --retry-on-change : " #: src/dar_suite/line_tools.cpp:259 msgid "Unknown TLV record type" msgstr "Type d'enregistrement inconnu" #: src/dar_suite/line_tools.cpp:300 src/dar_suite/line_tools.cpp:330 msgid "Unbalanced parenthesis in expression: " msgstr "Parenthèses non équilibrées correctement dans l'expression suivante : " #: src/dar_suite/line_tools.cpp:398 #, c-format msgid "Unknown macro %%%d in user comment" msgstr "Macro %%%d inconnue dans le cadre de commentaire utilisateur" #: src/dar_suite/line_tools.cpp:590 #, c-format msgid "Invalid number in string: %S" msgstr "Nombre invalide dans la chaîne : %S" #: src/dar_suite/line_tools.cpp:815 msgid "unknown cryptographic algorithm: " msgstr "algorithme de chiffrement inconnu : " #: src/dar_suite/line_tools.cpp:835 #, c-format msgid "" "| Signature Status| Key Status | Finger Print " "| Signature Date |" msgstr "" "|Etat de signature| Etat de la clef | Emprunte de la " "clef | Date de signature" #: src/dar_suite/line_tools.cpp:1186 msgid "date before 1970 is not allowed" msgstr "les dates antérieurs à l'an 1970 ne sont pas permises" #: src/dar_suite/line_tools.cpp:1191 msgid "Incorrect month" msgstr "Mois incorrect" #: src/dar_suite/line_tools.cpp:1196 msgid "Incorrect day of month" msgstr "Quantième inconnu" #: src/dar_suite/line_tools.cpp:1201 msgid "Incorrect hour" msgstr "Heure incorrecte" #: src/dar_suite/line_tools.cpp:1206 msgid "Incorrect minute" msgstr "Minute incorrecte" #: src/dar_suite/line_tools.cpp:1211 msgid "Incorrect second" msgstr "Seconde incorrecte" #: src/dar_suite/line_tools.cpp:1215 msgid "Bad formatted date expression" msgstr "Mauvais format de date" #: src/dar_suite/line_tools.cpp:1330 msgid "Given date must be in the past" msgstr "la date fournie doit être dans le passée" #: src/dar_suite/line_tools.cpp:1345 #, c-format msgid " Libz compression (gzip) : %s" msgstr " Compression libz (gzip) : %s" #: src/dar_suite/line_tools.cpp:1346 #, c-format msgid " Libbz2 compression (bzip2) : %s" msgstr " Compression libbz2 (bzip2) : %s" #: src/dar_suite/line_tools.cpp:1347 #, c-format msgid " Liblzo2 compression (lzo) : %s" msgstr " Compression liblzo2 (lzo) : %s" #: src/dar_suite/line_tools.cpp:1348 #, c-format msgid " Liblzma compression (xz) : %s" msgstr " Compression liblzma (xz) : %s" #: src/dar_suite/line_tools.cpp:1349 #, c-format msgid " Strong encryption (libgcrypt): %s" msgstr " Chiffrement fort (libgcrypt) : %s" #: src/dar_suite/line_tools.cpp:1350 #, c-format msgid " Public key ciphers (gpgme) : %s" msgstr " Chiffrement clef pub.(gpgme) : %s" #: src/dar_suite/line_tools.cpp:1351 #, c-format msgid " Extended Attributes support : %s" msgstr " Support Attributs Étendus : %s" #: src/dar_suite/line_tools.cpp:1352 #, c-format msgid " Large files support (> 2GB) : %s" msgstr " Support fichiers géants : %s" #: src/dar_suite/line_tools.cpp:1353 #, c-format msgid " ext2fs NODUMP flag support : %s" msgstr " Support drapeau NODUMP : %s" #: src/dar_suite/line_tools.cpp:1355 #, c-format msgid " Integer size used : unlimited" msgstr " Entiers utilisés : sans limite" #: src/dar_suite/line_tools.cpp:1357 #, c-format msgid " Integer size used : %d bits" msgstr " Entiers utilisés : %d bits" #: src/dar_suite/line_tools.cpp:1358 #, c-format msgid " Thread safe support : %s" msgstr " Support environ. multi-tâches: %s" #: src/dar_suite/line_tools.cpp:1359 #, c-format msgid " Furtive read mode support : %s" msgstr " Support de lecture furtive : %s" #: src/dar_suite/line_tools.cpp:1360 #, c-format msgid " Linux ext2/3/4 FSA support : %s" msgstr " Support FSA ext2/3/4 : %s" #: src/dar_suite/line_tools.cpp:1361 #, c-format msgid " Mac OS X HFS+ FSA support : %s" msgstr " Support FSA HFS+ de Mac OS X : %s" #: src/dar_suite/line_tools.cpp:1366 msgid "big" msgstr "gros" #: src/dar_suite/line_tools.cpp:1369 msgid "little" msgstr "petit" #: src/dar_suite/line_tools.cpp:1372 msgid "error!" msgstr "Erreur !" #: src/dar_suite/line_tools.cpp:1377 #, c-format msgid " Detected system/CPU endian : %s" msgstr " Boutisme du système/CPU : %s" #: src/dar_suite/line_tools.cpp:1378 #, c-format msgid " Posix fadvise support : %s" msgstr " Support de Posix fadvise : %s" #: src/dar_suite/line_tools.cpp:1379 #, c-format msgid " Large dir. speed optimi. : %s" msgstr " Optim. grands répertoires : %s" #: src/dar_suite/line_tools.cpp:1384 #, c-format msgid " Timestamp read accuracy : %S" msgstr " Précision de lecture du temps: %S" #: src/dar_suite/line_tools.cpp:1389 #, c-format msgid " Timestamp write accuracy : %S" msgstr " Précision d'écriture du temps: %S" #: src/dar_suite/line_tools.cpp:1390 #, c-format msgid " Restores dates of symlinks : %s" msgstr " Symlinks, dates restorées : %s" #: src/dar_suite/line_tools.cpp:1395 #, c-format msgid " Multiple threads (libthreads): %s %s" msgstr " Plusieurs taches(libthreadar): %s %s" #: src/dar_suite/line_tools.cpp:1396 #, c-format msgid " Delta compression support : %s" msgstr " Compression delta : %s" #: src/dar_suite/line_tools.cpp:1397 #, c-format msgid " Remote repository support : %s" msgstr " Support répertoires distants : %s" #: src/dar_suite/line_tools.cpp:1457 #, c-format msgid "" "Warning, %S seems more to be a slice name than a base name. Do you want to " "replace it by %S ?" msgstr "" "Attention, %S semble être un nom de fichier plutôt qu'un nom de base. " "Souhaitez-vous le remplacer par %S ?" #: src/dar_suite/line_tools.cpp:1462 #, c-format msgid "OK, keeping %S as basename" msgstr "OK, on garde %S comme nom de base" #: src/dar_suite/line_tools.cpp:1610 msgid "Error while fetching hostname: " msgstr "Erreur lors de recherche du nom d'hôte (hostname) : " #: src/dar_suite/line_tools.cpp:1677 msgid "unknown name give for delta signature block len function" msgstr "" "Nom de fonction inconnu pour le calcul de la taille de block des signatures " "delta" #: src/dar_suite/line_tools.cpp:1751 src/dar_suite/line_tools.cpp:1759 #, c-format msgid "Error met while retrieving current time: %S" msgstr "Erreur lors de la récupération de l'heure courante : %S" #: src/dar_suite/dar_suite.cpp:146 #, c-format msgid "" "We have linked with an incompatible version of libdar. Expecting version %d." "%d.x but having linked with version %d.%d.%d" msgstr "" "Nous utilisons une version incompatible de libdar. La version attendue est " "%d.%d.x alors que la version actuellement utilisée est %d.%d.%d" #: src/dar_suite/dar_suite.cpp:158 src/python/pybind11_libdar.cpp:206 msgid "NOT YET IMPLEMENTED FEATURE has been used: " msgstr "une FONCTIONNALITÉ NON ENCORE IMPLÉMENTÉE a été sollicitée :" #: src/dar_suite/dar_suite.cpp:159 msgid "Please check documentation or upgrade your software if available" msgstr "Merci de vérifier la documentation et/ou de mettre à jour ce logiciel" #: src/dar_suite/dar_suite.cpp:164 src/python/pybind11_libdar.cpp:210 msgid "SEEMS TO BE A HARDWARE PROBLEM: " msgstr "IL SEMBLE QU'IL Y AIT UN PROBLÈME MATÉRIEL :" #: src/dar_suite/dar_suite.cpp:165 msgid "Please check your hardware" msgstr "Merci de vérifier votre matériel" #: src/dar_suite/dar_suite.cpp:170 src/python/pybind11_libdar.cpp:214 msgid "Lack of SECURED memory to achieve the operation, aborting operation" msgstr "Pas assez de mémoire SÉCURISÉE pour terminer l'opération, abandon" #: src/dar_suite/dar_suite.cpp:175 src/dar_suite/dar_suite.cpp:180 #: src/python/pybind11_libdar.cpp:218 src/python/pybind11_libdar.cpp:222 msgid "Lack of memory to achieve the operation, aborting operation" msgstr "Pas assez de mémoire pour terminer l'opération, abandon" #: src/dar_suite/dar_suite.cpp:185 src/dar_suite/dar_suite.cpp:230 #: src/python/pybind11_libdar.cpp:226 msgid "FATAL error, aborting operation: " msgstr "Erreur FATALE, abandon de l'opération : " #: src/dar_suite/dar_suite.cpp:190 src/python/pybind11_libdar.cpp:230 msgid "Aborting program. User refused to continue while asking: " msgstr "Abandon. L'utilisateur a refusé de continuer lors de la question : " #: src/dar_suite/dar_suite.cpp:195 src/python/pybind11_libdar.cpp:234 msgid "Program has been aborted for the following reason: " msgstr "Le programme a été arrêté pour la raison suivante : " #: src/dar_suite/dar_suite.cpp:205 src/python/pybind11_libdar.cpp:242 msgid "Aborting program. An error occurred concerning user command execution: " msgstr "" "Abandon. une erreur est survenue lors de l'exécution d'une commande " "utilisateur : " #: src/dar_suite/dar_suite.cpp:210 src/python/pybind11_libdar.cpp:246 msgid "Aborting program. An error occurred while calling libdar: " msgstr "Abandon. Une erreur est survenue lors d'un appel à libdar : " #: src/dar_suite/dar_suite.cpp:215 src/dar_suite/dar_suite.cpp:220 #: src/python/pybind11_libdar.cpp:250 src/python/pybind11_libdar.cpp:254 msgid "Aborting program. " msgstr "Arrêt du programme. " #: src/dar_suite/dar_suite.cpp:225 src/python/pybind11_libdar.cpp:258 msgid "" "Aborting program. The requested operation needs a feature that has been " "disabled at compilation time: " msgstr "" "Abandon. L'opération demandée nécessite une fonctionnalité qui n'a pas été " "activée à la compilation : " #: src/dar_suite/dar_suite.cpp:235 src/python/pybind11_libdar.cpp:266 msgid "FATAL error during network communication, aborting operation: " msgstr "" "Erreur FATALE lors de la communication réseau, abandon de l'opération : " #: src/dar_suite/dar_suite.cpp:241 src/dar_suite/dar_suite.cpp:261 msgid "INTERNAL ERROR, PLEASE REPORT THE PREVIOUS OUTPUT TO MAINTAINER" msgstr "" "ERREUR INTERNE, MERCI DE RAPPORTER L'AFFICHAGE PRÉCÉDENT AU MAINTENEUR DU " "PROGRAMME" #: src/dar_suite/dar_suite.cpp:260 msgid "CAUGHT A NON (LIB)DAR EXCEPTION" msgstr "EXCEPTION NON (LIB)DAR ATTRAPÉE" #: src/dar_suite/dar_suite.cpp:268 msgid "" "SANITY CHECK: AT LEAST ONE THREAD_CANCELLATION OBJECT HAS NOT BEEN DESTROYED " "AND REMAINS IN MEMORY WHILE THE PROGRAM REACHED ITS END" msgstr "" "AUTO-CONTROL : AU MOINS UNE FILE D'EXÉCUTION N'A PAS ÉTÉ DÉTRUITE ET RESTE " "EN MÉMOIRE ALORS QUE LE PROGRAMME TOUCHE À SA FIN" #: src/dar_suite/dar_suite.cpp:284 #, c-format msgid "Long options support : %s\n" msgstr "Options longues supportées : %s\n" #: src/dar_suite/dar_suite.cpp:300 #, c-format msgid "Received signal: %s" msgstr "Signal %s reçu" #: src/dar_suite/dar_suite.cpp:302 #, c-format msgid "Received signal: %d" msgstr "Signal reçu : %d" #: src/dar_suite/dar_suite.cpp:308 msgid "Archive fast termination engaged" msgstr "Mise en route de la procédure de terminaison rapide de l'archive" #: src/dar_suite/dar_suite.cpp:312 msgid "Archive delayed termination engaged" msgstr "Mise en route de la procédure de terminaison de l'archive" #: src/dar_suite/dar_suite.cpp:316 msgid "" "Disabling signal handler, the next time this signal is received the program " "will abort immediately" msgstr "" "Désactivation du gestionnaire de signal. La prochaine réception de ce signal " "arrêtera immédiatement le programme" #: src/dar_suite/dar_suite.cpp:320 msgid "" "Cannot cleanly abort the operation, thread-safe support is missing, will " "thus abruptly stop the program, generated archive may be unusable" msgstr "" "Impossible d'arrêter proprement l'opération, l'absence de support pour un " "environnement multi-tâches arrêtera abruptement le programme, en conséquence " "l'archive générée risque fortement d'être inutilisable." #: src/dar_suite/dar.cpp:267 msgid "Considering the (first) archive of reference:" msgstr "Traitement de la (première) archive de référence :" #: src/dar_suite/dar.cpp:269 msgid "" "Sequential reading of the archive of reference is not possible when delta " "difference is requested, you need to read the archive of reference in direct " "access mode (default mode)" msgstr "" "La lecture sequentielle d'une archive est impossible quand une comparaison " "des deltas binaires est demandée, l'archive doit être lue en mode direct " "(mode par defaut)" #: src/dar_suite/dar.cpp:292 src/dar_suite/dar.cpp:343 msgid "" "Using sequential reading mode for archive source is not possible for merging " "operation" msgstr "" "le mode de lecture séquentiel n'est pas possible lors de la fusion d'archive" #: src/dar_suite/dar.cpp:320 msgid "Considering the second (alias auxiliary) archive of reference:" msgstr "Prise en compte de l'archive de référence auxilaire :" #: src/dar_suite/dar.cpp:401 src/dar_suite/dar.cpp:504 #: src/dar_suite/dar.cpp:562 src/dar_suite/dar.cpp:642 #: src/dar_suite/dar.cpp:733 msgid "Archive signature is only possible with gnupg encryption" msgstr "La signature d'un archive n'est possible qu'avec le chiffrement gnupg" #: src/dar_suite/dar.cpp:594 msgid "" "Making room in memory (releasing memory used by archive of reference)..." msgstr "" "Nettoyage mémoire (libération de la mémoire utilisée par l'archive de " "référence) ..." #: src/dar_suite/dar.cpp:613 msgid "Now performing on-fly isolation..." msgstr "Réalisation de l'isolation à la volée ..." #: src/dar_suite/dar.cpp:871 msgid "All files asked could not be restored" msgstr "Tous les fichiers demandés n'ont pu être restaurés" #: src/dar_suite/dar.cpp:954 msgid "Some file comparisons failed" msgstr "Des comparaisons de fichier ont échoué" #: src/dar_suite/dar.cpp:1029 msgid "" "Some files are corrupted in the archive and it will not be possible to " "restore them" msgstr "" "Certains fichiers sont corrompus dans l'archive et ne pourront pas être " "restaurés" #: src/dar_suite/dar.cpp:1082 msgid "Continue listing archive contents?" msgstr "Continuer avec le contenu de l'archive ?" #: src/dar_suite/dar.cpp:1104 src/dar_suite/dar.cpp:1109 msgid "Final memory cleanup..." msgstr "Finalisation du nettoyage de la mémoire ..." #: src/dar_suite/dar.cpp:1131 #, c-format msgid " %i inode(s) saved\n" msgstr " %i inode(s) sauvées\n" #: src/dar_suite/dar.cpp:1132 #, c-format msgid " including %i hard link(s) treated\n" msgstr " avec %i lien(s) physique(s) enregistré(s)\n" #: src/dar_suite/dar.cpp:1133 #, c-format msgid "" " %i inode(s) changed at the moment of the backup and could not be saved " "properly\n" msgstr "" " %i inode(s) modifiée(s) au moment de sa (leurs) sauvegarde n'a (n'ont) pas " "pu être sauvée correctement\n" #: src/dar_suite/dar.cpp:1134 #, c-format msgid " %i byte(s) have been wasted in the archive to resave changing files" msgstr "" "%i octet(s) ont été gaspillé(s) dans l'archive lors de re-tentatives de " "sauvegarde de fichiers ayant changés" #: src/dar_suite/dar.cpp:1135 #, c-format msgid " %i inode(s) with only metadata changed\n" msgstr " %i inode(s) ayant seulement un changement de métadonnées\n" #: src/dar_suite/dar.cpp:1136 #, c-format msgid " %i inode(s) not saved (no inode/file change)\n" msgstr " %i inode(s) non sauvées (pas de changement)\n" #: src/dar_suite/dar.cpp:1137 #, c-format msgid " %i inode(s) failed to be saved (filesystem error)\n" msgstr "" " %i sauvegarde(s) d'inode(s) échouée(s) (erreur du système de fichiers)\n" #: src/dar_suite/dar.cpp:1138 src/dar_suite/dar.cpp:1168 #: src/dar_suite/dar.cpp:1228 #, c-format msgid " %i inode(s) ignored (excluded by filters)\n" msgstr " %i inode(s) ignoré(s) (exclus par les filtres)\n" #: src/dar_suite/dar.cpp:1139 #, c-format msgid " %i inode(s) recorded as deleted from reference backup\n" msgstr "" " %i fichier(s) enregistré(s) comme supprimé(s) depuis l'archive de " "référence\n" #: src/dar_suite/dar.cpp:1141 src/dar_suite/dar.cpp:1172 #: src/dar_suite/dar.cpp:1236 #, c-format msgid " Total number of inode(s) considered: %i\n" msgstr " Nombre total d'inode(s) considérée(s) : %i\n" #: src/dar_suite/dar.cpp:1144 src/dar_suite/dar.cpp:1232 #, c-format msgid " EA saved for %i inode(s)\n" msgstr " Attributs Étendus sauvés pour %i inode(s)\n" #: src/dar_suite/dar.cpp:1146 src/dar_suite/dar.cpp:1234 #, c-format msgid " FSA saved for %i inode(s)\n" msgstr " FSA sauvés pour %i inode(s)\n" #: src/dar_suite/dar.cpp:1164 #, c-format msgid " %i inode(s) restored\n" msgstr " %i inode(s) restauré(s)\n" #: src/dar_suite/dar.cpp:1165 #, c-format msgid " including %i hard link(s)\n" msgstr " avec %i lien(s) physique(s)\n" #: src/dar_suite/dar.cpp:1166 #, c-format msgid " %i inode(s) not restored (not saved in archive)\n" msgstr " %i inode(s) non restaurée(s) (non sauvée(s) dans l'archive)\n" #: src/dar_suite/dar.cpp:1167 #, c-format msgid " %i inode(s) not restored (overwriting policy decision)\n" msgstr "" " %i inode(s) non restaurée(s) (décision de la politique d'écrasement)\n" #: src/dar_suite/dar.cpp:1169 #, c-format msgid " %i inode(s) failed to restore (filesystem error)\n" msgstr " %i inode(s) n'ayant pu être restaurée(s) (erreur système)\n" #: src/dar_suite/dar.cpp:1170 #, c-format msgid " %i inode(s) deleted\n" msgstr " %i inode(s) supprimée(s)\n" #: src/dar_suite/dar.cpp:1175 #, c-format msgid " EA restored for %i inode(s)\n" msgstr " Attributs Étendus restaurés pour %i inode(s)\n" #: src/dar_suite/dar.cpp:1177 #, c-format msgid " FSA restored for %i inode(s)\n" msgstr " FSA restaurés pour %i inode(s)\n" #: src/dar_suite/dar.cpp:1189 src/dar_suite/dar.cpp:1206 #, c-format msgid " %i item(s) treated\n" msgstr " %i entrée(s) traitée(s)\n" #: src/dar_suite/dar.cpp:1190 #, c-format msgid " %i item(s) do not match those on filesystem\n" msgstr "" " %i entrée(s) ne correspond(ent) pas à ce qui est sur le système de " "fichiers\n" #: src/dar_suite/dar.cpp:1191 src/dar_suite/dar.cpp:1208 #, c-format msgid " %i item(s) ignored (excluded by filters)\n" msgstr " %i entrée(s) ignorée(s) (exclus par les filtres)\n" #: src/dar_suite/dar.cpp:1193 src/dar_suite/dar.cpp:1210 #, c-format msgid " Total number of items considered: %i\n" msgstr " Nombre total d'entrée(s) considérée(s) : %i\n" #: src/dar_suite/dar.cpp:1207 #, c-format msgid " %i item(s) with error\n" msgstr " %i entrée(s) avec erreur\n" #: src/dar_suite/dar.cpp:1226 #, c-format msgid " %i inode(s) added to archive\n" msgstr " %i inode(s) ajoutée(s) à l'archive\n" #: src/dar_suite/dar.cpp:1227 #, c-format msgid " with %i hard link(s) recorded\n" msgstr " avec %i lien(s) physique(s) enregistré(s)\n" #: src/dar_suite/dar.cpp:1229 #, c-format msgid " %i inode(s) recorded as deleted\n" msgstr " %i inode(s) enregistrée(s) comme supprimée(s)\n" #: src/dar_suite/crit_action_cmd_line.cpp:115 msgid "Missing } in conditional statement: " msgstr "Il manque un } dans la condition suivante : " #: src/dar_suite/crit_action_cmd_line.cpp:119 msgid "Missing [ after } in conditional statement: " msgstr "Il manque un [ après un } dans la condition suivante : " #: src/dar_suite/crit_action_cmd_line.cpp:122 msgid "Missing ] in conditional statement: " msgstr "Il manque un ] dans la condition suivante : " #: src/dar_suite/crit_action_cmd_line.cpp:200 #, c-format msgid "Unknown policy for data '%c' in expression %S" msgstr "La politique '%c' est inconnue pour les données dans l'expression %S" #: src/dar_suite/crit_action_cmd_line.cpp:233 #, c-format msgid "Unknown policy for EA '%c' in expression %S" msgstr "La politique '%c' est inconnue pour les AE dans l'expression %S" #: src/dar_suite/crit_action_cmd_line.cpp:243 msgid "Unknown expression in overwriting policy: " msgstr "Expression inconnue dans la politique d'écrasement : " #: src/dar_suite/crit_action_cmd_line.cpp:426 msgid "Unknown atomic operator, or atomic not allowed with an argument: " msgstr "Opérateur unitaire inconnu ou non autorisé avec un argument : " #: src/dar_suite/crit_action_cmd_line.cpp:508 msgid "Unknown character found while parsing conditional string: " msgstr "" "Caractère inconnu trouvé lors de l'analyse de la chaîne conditionnelle " "suivante : " #: src/dar_suite/crit_action_cmd_line.cpp:517 msgid "Unknown expression found while parsing conditional string: " msgstr "" "Expression inconnue trouvée lors de l'analyse de la chaîne conditionnelle " "suivante : " #: src/dar_suite/hide_file.cpp:32 msgid "hide_file cannot be initialized with write-only file" msgstr "" "\"hide_file\" ne peut pas être initialisé avec un fichier en écriture seule" #: src/dar_suite/command_line.cpp:364 msgid "User target found on command line or included file(s):" msgstr "" "Cible(s) utilisateur(s) trouvée(s) en ligne de commande ou dans un fichier " "inclus :" #: src/dar_suite/command_line.cpp:373 msgid "No user target found on command line" msgstr "Aucune cible utilisateur trouvée sur la ligne de commande" #: src/dar_suite/command_line.cpp:391 #, c-format msgid "Given user target(s) could not be found: %S" msgstr "Les cibles utilisateurs suivantes n'ont pas été trouvées: %S" #: src/dar_suite/command_line.cpp:395 #, c-format msgid "Missing -c -x -d -t -l -C -+ option, see `%S -h' for help" msgstr "" "Il manque l'une des options -c -x -d -t -l -C -+, tapez '%S -h' pour l'aide" #: src/dar_suite/command_line.cpp:397 msgid "" "Slicing (-s option), is not compatible with archive on standard output (\"-" "\" as filename)" msgstr "" "Le découpage en tranches (option -s) n'est pas compatible avec la production " "de l'archive sur la sortie standard (\"-\" en nom d'archive)" #: src/dar_suite/command_line.cpp:418 msgid "-af option is only available with -c" msgstr "-af n'est disponible qu'avec l'option -c ou -+" #: src/dar_suite/command_line.cpp:420 msgid "-A option is not available with -l" msgstr "-A n'est disponible qu'avec l'option -l" #: src/dar_suite/command_line.cpp:422 msgid "-T option is only available with -l" msgstr "-T n'est disponible qu'avec l'option -l" #: src/dar_suite/command_line.cpp:424 msgid "with -C option, -A option is mandatory" msgstr "l'option -A est obligatoire quand -C est utilisé" #: src/dar_suite/command_line.cpp:426 msgid "with -+ option, -A option is mandatory" msgstr "l'option -A est obligatoire quand -+ est utilisé" #: src/dar_suite/command_line.cpp:428 msgid "-wa is only useful with -x option" msgstr "-wa n'est utile qu'avec l'option -x" #: src/dar_suite/command_line.cpp:431 msgid "-o is mandatory when using \"-A -\" with \"-c -\" \"-C -\" or \"-+ -\"" msgstr "" "l'option -o est obligatoire quand \"-A -\" est utilisé conjointement avec \"-" "c -\", \"-C -\" ou \"-+ -\"" #: src/dar_suite/command_line.cpp:441 msgid "-z option needs only to be used with -c -C or -+ options" msgstr "-z n'est utile qu'avec les options -c , -C ou -+" #: src/dar_suite/command_line.cpp:443 msgid "-S option requires the use of -s" msgstr "l'option -S nécessite l'utilisation de -s" #: src/dar_suite/command_line.cpp:445 msgid "ignoring -O option, as it is useless in this situation" msgstr "option -O ignorée car inutile dans cette situation" #: src/dar_suite/command_line.cpp:448 msgid "-F is only useful with -A option, for the archive of reference" msgstr "" "L'option -F n'est utile qu'avec l'option -A, car elle concerne l'archive de " "référence" #: src/dar_suite/command_line.cpp:451 msgid "-J is only useful with -A option, for the archive of reference" msgstr "" "L'option -J n'est utile qu'avec l'option -A, car elle concerne l'archive de " "référence" #: src/dar_suite/command_line.cpp:454 msgid "-f in only available with -x option, ignoring" msgstr "-f n'est disponible qu'avec l'option -x, -f sera ignoré ici" #: src/dar_suite/command_line.cpp:456 msgid "-m is only useful with -c" msgstr "-m n'est utile qu'avec l'option -c" #: src/dar_suite/command_line.cpp:462 msgid "-H is only useful with -A option when making a backup" msgstr "Lors d'une sauvegarde, l'option -H n'est utile qu'avec l'option -A" #: src/dar_suite/command_line.cpp:468 msgid "-H is only useful with -r option when extracting" msgstr "Lors d'une restauration, -H n'est utile qu'avec l'option -r" #: src/dar_suite/command_line.cpp:472 msgid "-H is only useful with -c, -d or -x" msgstr "-H n'est utile qu'avec -c, -d ou -x" #: src/dar_suite/command_line.cpp:476 msgid "-as is only available with -l, ignoring -as option" msgstr "-as n'est disponible qu'avec l'option -l, -as sera ignoré ici" #: src/dar_suite/command_line.cpp:478 msgid "-e is only useful with -x, -c or -+ options" msgstr "-wa n'est utile qu'avec les options -x, -c ou -+" #: src/dar_suite/command_line.cpp:480 msgid "-ac is only useful with -c or -d" msgstr "-ac n'est utile qu'avec -c ou -d" #: src/dar_suite/command_line.cpp:482 msgid "-M is only useful with -c" msgstr "-M n'est utile qu'avec l'option -c" #: src/dar_suite/command_line.cpp:484 msgid "The snapshot backup (-A +) is only available with -c option, ignoring" msgstr "" "L'option de \"photographie\" (-A +) n'est disponible qu'avec l'option -c" #: src/dar_suite/command_line.cpp:486 msgid "" "The Cache Directory Tagging Standard is only useful while performing a " "backup, ignoring it here" msgstr "" "L'utilisation du standard de marquage des répertoires de cache n'est utile " "que lors d'une sauvegarde" #: src/dar_suite/command_line.cpp:489 msgid "-@ is only available with -+ and -c options" msgstr "-@ n'est disponible qu'avec les options -+ et -c" #: src/dar_suite/command_line.cpp:491 msgid "-$ is only available with -+ option and -c options" msgstr "-$ n'est disponible qu'avec les options -x et -c" #: src/dar_suite/command_line.cpp:493 msgid "-~ is only available with -+ and -c options" msgstr "-~ n'est disponible qu'avec les options -+ et -c" #: src/dar_suite/command_line.cpp:495 #, c-format msgid "-%% is only available with -+ option" msgstr "-%% n'est disponible qu'avec l'option -+" #: src/dar_suite/command_line.cpp:498 msgid "" "-$ is only useful with -@ option, for the auxiliary archive of reference" msgstr "" "L'option -$ n'est utile qu'avec l'option -@, pour l'archive de référence " "auxiliaire" #: src/dar_suite/command_line.cpp:500 #, c-format msgid "" "-%% is only useful with -@ option, for the auxiliary archive of reference" msgstr "" "L'option -%% n'est utile qu'avec l'option -@ pour l'archive auxiliaire de " "référence" #: src/dar_suite/command_line.cpp:502 msgid "" "-~ is only useful with -@ option, for the auxiliary archive of reference" msgstr "" "L'option -~ n'est utile qu'avec l'option -@, car elle concerne l'archive " "auxiliaire de référence" #: src/dar_suite/command_line.cpp:505 msgid "-ak is only available while merging (operation -+), ignoring -ak" msgstr "-ak n'est disponible qu'avec l'option -+ et sera ignoré ici" #: src/dar_suite/command_line.cpp:510 msgid "" "Compression option (-z option) is useless and ignored when using -ak option" msgstr "" "La compression (option -z) est inutile et ignorée lorsque -ak est utilisé" #: src/dar_suite/command_line.cpp:517 msgid "" "--sparse-file-min-size only available while saving or merging archives, " "ignoring" msgstr "" "--sparse-file-min-size n'est disponible que lors de sauvegarde ou fusion " "d'archives et sera ignoré ici" #: src/dar_suite/command_line.cpp:520 msgid "" "To use --sparse-file-min-size while merging archive, you need to use -ah " "option too, please check man page for details" msgstr "" "L'utilisation de --sparse-file-min-size lors requière l'utilisation " "conjointe de l'option -ah, voir la page de manuel pour plus de détails" #: src/dar_suite/command_line.cpp:526 msgid "-k option is only useful with -x option" msgstr "l'option -k n'est utile qu'avec l'option -x" #: src/dar_suite/command_line.cpp:529 msgid "-konly and -kignore cannot be used at the same time" msgstr "-konly et kignore ne peuvent être utilisés en même temps" #: src/dar_suite/command_line.cpp:532 msgid "-p and -Q options are mutually exclusives" msgstr "les options -p et -Q sont mutuellement exclusives" #: src/dar_suite/command_line.cpp:535 msgid "-vf is only useful with -c option" msgstr "-vf n'est utile qu'avec l'option -c" #: src/dar_suite/command_line.cpp:540 msgid "-A option is required with -y option" msgstr "l'option -A est nécessaire avec l'option -y" #: src/dar_suite/command_line.cpp:542 msgid "'-A +' is not possible with -y option" msgstr "'-A +' est indisponible avec l'option -y" #: src/dar_suite/command_line.cpp:544 msgid "-af is not possible with -y option" msgstr "-af est indisponible avec l'option -y" #: src/dar_suite/command_line.cpp:546 msgid "-k option is not possible with -y option" msgstr "l'option -k n'est pas compatible avec l'option -y" #: src/dar_suite/command_line.cpp:548 msgid "" "-X, -I, -P, -g, -], -[ and any other file selection relative commands are " "not possible with -y option" msgstr "" "-X, -I -P, -g, -], -[ et autres commandes relatives à la selection de " "fichier sont impossibles avec l'option -y" #: src/dar_suite/command_line.cpp:550 msgid "-D option is useless with -y option" msgstr "-D est inutile avec -y" #: src/dar_suite/command_line.cpp:552 msgid "-r option is useless with -y option" msgstr "-r est inutile avec -y" #: src/dar_suite/command_line.cpp:554 msgid "" "-u, -U, -P, -g, -], -[ and any other EA selection relative commands are not " "possible with -y option" msgstr "" "-u, -U, -P, -g, -], -[ et autres commandes relatives à la selection d'EA " "n'est impossible avec l'option -y" #: src/dar_suite/command_line.cpp:556 msgid "-O option is not possible with -y option" msgstr "l'option -0 est incompatible avec l'option -y" #: src/dar_suite/command_line.cpp:558 msgid "-H option is useless with -y option" msgstr "-H est inutile avec -y" #: src/dar_suite/command_line.cpp:560 msgid "-as option is useless with -y option" msgstr "l'option -as est inutile avec -y" #: src/dar_suite/command_line.cpp:562 msgid "-ae option is useless with -y option" msgstr "l'option -ae est inutile avec -y" #: src/dar_suite/command_line.cpp:564 msgid "-ad option is useless with -y option" msgstr "l'option -ad est inutile avec -y" #: src/dar_suite/command_line.cpp:566 msgid "-asecu option is useless with -y option" msgstr "l'option -asecu est inutile avec -y" #: src/dar_suite/command_line.cpp:568 msgid "-ai option is useless with -y option" msgstr "l'option -ai est inutile avec -y" #: src/dar_suite/command_line.cpp:570 msgid "--alter=do-not-compare-symlink-mtime option is useless with -y option" msgstr "" "l'option --alter=do-not-compare-symlink-mtime est inutile avec l'option -y" #: src/dar_suite/command_line.cpp:572 msgid "-M option is useless with -y option" msgstr "l'option -M est inutile avec -y" #: src/dar_suite/command_line.cpp:574 msgid "-@ option is useless with -y option" msgstr "l'option -@ est inutile avec -y" #: src/dar_suite/command_line.cpp:576 msgid "-/ option is useless with -y option" msgstr "l'option -/ est inutile avec -y" #: src/dar_suite/command_line.cpp:578 msgid "-< and -> options are useless with -y option" msgstr "les options -< et -> sont inutiles avec l'option -y" #: src/dar_suite/command_line.cpp:580 msgid "-5 option is useless with -y option" msgstr "l'option -5 est inutile avec -y" #: src/dar_suite/command_line.cpp:582 msgid "-8 option is useless with -y option" msgstr "l'option -8 est inutile avec -y" #: src/dar_suite/command_line.cpp:584 msgid "-{ and -} options are useless with -y option" msgstr "les options -{ et -} sont inutiles avec l'option -y" #: src/dar_suite/command_line.cpp:586 msgid "-\\ option is useless with -y option" msgstr "l'option -\\ est inutile avec -y" #: src/dar_suite/command_line.cpp:588 msgid "compression (-z option) cannot be changed with -y option" msgstr "la compression (option -z) ne peut être modifiée avec l'option -y" #: src/dar_suite/command_line.cpp:590 msgid "-ak option is useless with -y option" msgstr "l'option -ak est inutile avec -y" #: src/dar_suite/command_line.cpp:592 msgid "-ah option is useless with -y option" msgstr "l'option -ah est inutile avec -y" #: src/dar_suite/command_line.cpp:594 msgid "--sequential-read is useless with -y option" msgstr "l'option --sequential-read est inutile avec -y" #: src/dar_suite/command_line.cpp:596 msgid "--alter=tape-marks is impossible with -y option" msgstr "--alter=tape-marks est impossible avec l'option -y" #: src/dar_suite/command_line.cpp:641 msgid "" "-Y and -Z are only useful with compression (-z option), ignoring any -Y and -" "Z option" msgstr "" "-Y et -Z ne sont utiles qu'avec la compression de données (option -z), -Y et " "-Z seront ignorés ici" #: src/dar_suite/command_line.cpp:643 msgid "-m is only useful with compression (-z option), ignoring -m" msgstr "" "-m n'est utile qu'avec la compression de données (option -z), -m sera " "ignorés ici" #: src/dar_suite/command_line.cpp:693 msgid "" "-= option is valid only while saving files, thus in conjunction with -c " "option, ignoring" msgstr "" "l'option -= n'est valide que lors de la sauvegarde (option -c) et sera " "ignorée, ici" #: src/dar_suite/command_line.cpp:695 msgid "" "-= option will be ignored as it is useless if you do not specify to which " "files or directories this backup hook is to be applied, thanks to -< and -> " "options. See man page for more details." msgstr "" "l'option -= sera ignorée car inutile faute d'indication des fichiers ou " "répertoires auxquelles l'appliquer. Voir les options -< et -> dans la page " "de manuel pour plus de détails" #: src/dar_suite/command_line.cpp:701 msgid "" "backup hook feature (-<, -> or -= options) is only available when saving " "files, ignoring" msgstr "" "La fonctionnalité d'action de sauvegarde (options-< -> et -=) n'est " "disponible que lors de la sauvegarde et sera ignorée ici" #: src/dar_suite/command_line.cpp:769 msgid "-/ option is only useful with -+ option, ignoring" msgstr "l'option -/ n'est utile qu'avec -+ et sera ignorée ici" #: src/dar_suite/command_line.cpp:780 msgid "" "-. option is only useful when merging, creating or isolating an archive, " "ignoring" msgstr "" "l'option -. n'est utile que lors de fusion, de création ou d'isolation " "d'archives et sera ignorée ici" #: src/dar_suite/command_line.cpp:785 #, c-format msgid "" "The following user comment will be placed in clear text in the archive: %S" msgstr "" "Les commentaires utilisateur suivants seront placés en clair dans " "l'archive : %S" #: src/dar_suite/command_line.cpp:804 msgid "" "Warning: libdar multi-threading is an experimental and unsupported feature, " "read man page about -G option for more information" msgstr "" "Attention: L'option -G est expérimentale et non supportée. Lire la page de " "manuel à propos de cette option pour plus d'information" #: src/dar_suite/command_line.cpp:809 msgid "Parse error: " msgstr "Erreur de syntaxe : " #: src/dar_suite/command_line.cpp:854 msgid " Only one option of -c -d -t -l -C -x or -+ is allowed" msgstr " Une seule option parmi -c -d -t -l -C, -x ou -+ est permise" #: src/dar_suite/command_line.cpp:907 msgid "Only one -A option is allowed" msgstr "Une seule option -A est permise" #: src/dar_suite/command_line.cpp:934 msgid "Error while parsing -A argument as a date: " msgstr "Erreur de syntaxe dans la date donné à l'option -A : " #: src/dar_suite/command_line.cpp:1015 msgid "Choose only one compression algorithm" msgstr "Choisir un seul algorithme de compression" #: src/dar_suite/command_line.cpp:1021 msgid "-w option is useless with -n" msgstr "-w est inutile avec -n" #: src/dar_suite/command_line.cpp:1033 msgid "Unknown argument given to -w: " msgstr "Argument inconnu donné à -w : " #: src/dar_suite/command_line.cpp:1053 src/dar_suite/command_line.cpp:1060 #: src/dar_suite/command_line.cpp:1067 msgid "\"-k\" (or \"-kignore\") and \"-konly\" are not compatible" msgstr "\"-k\" (ou \"-kignore\") et \"-konly\" ne sont pas compatibles" #: src/dar_suite/command_line.cpp:1071 #, c-format msgid "Unknown argument given to -k : %s" msgstr "Argument inconnu donné à -k : %s" #: src/dar_suite/command_line.cpp:1075 msgid "Only one -R option is allowed" msgstr "Une seule option -R est permise" #: src/dar_suite/command_line.cpp:1102 src/dar_suite/dar_xform.cpp:228 msgid "Only one -s option is allowed" msgstr "Une seule option -s est permise" #: src/dar_suite/command_line.cpp:1127 src/dar_suite/command_line.cpp:1145 #: src/dar_suite/dar_xform.cpp:253 src/dar_suite/dar_xform.cpp:271 msgid "Only one -S option is allowed" msgstr "Une seule option -S est permise" #: src/dar_suite/command_line.cpp:1135 msgid "" "Giving to -S option the same value as the one given to -s option is useless" msgstr "Donner à -S la même valeur que celle donnée à -s est inutile" #: src/dar_suite/command_line.cpp:1210 msgid "-r is useless with -n" msgstr "L'option -r est inutile avec -n" #: src/dar_suite/command_line.cpp:1291 msgid "Invalid argument given to -T option, expecting [:]" msgstr "" "Argument invalide transmis à l'option -T, [:] est attendu" #: src/dar_suite/command_line.cpp:1307 #, c-format msgid "Invalid hash algorithm provided to -T opton: %s" msgstr "Algorithm de hashage inconnu fourni à l'option -T : %s" #: src/dar_suite/command_line.cpp:1401 #, c-format msgid "" "File inclusion loop detected. The file %s includes itself directly or " "through other files (-B option)" msgstr "" "Boucle dans l'inclusion des fichiers. Le fichier %s s'inclut lui-même " "directement ou via d'autres fichiers (option -B)" #: src/dar_suite/command_line.cpp:1418 #, c-format msgid "Error reading included file (%s): " msgstr "Erreur pendant la l'analyse d'un fichier inclus (%s) : " #: src/dar_suite/command_line.cpp:1423 #, c-format msgid "Error in included file (%s): " msgstr "Erreur dans le fichier inclus %s : " #: src/dar_suite/command_line.cpp:1439 src/dar_suite/command_line.cpp:2841 #: src/dar_suite/command_line.cpp:2923 #, c-format msgid "In included file %S: " msgstr "Dans le fichier inclus %S : " #: src/dar_suite/command_line.cpp:1482 #, c-format msgid "" "%d is the default value for -m, no need to specify it on command line, " "ignoring" msgstr "" "%d est la valeur par défaut pour -m, il n'est pas nécessaire de préciser " "cette valeur" #: src/dar_suite/command_line.cpp:1499 msgid "" "--nodump feature has not been activated at compilation time, it is thus not " "available" msgstr "" "La fonctionnalité --nodump n'a pas été activée lors de la compilation, cette " "option n'est donc pas disponible" #: src/dar_suite/command_line.cpp:1515 msgid "Argument given to -H is not a positive integer number" msgstr "L'argument donné à -H doit être un entier positif" #: src/dar_suite/command_line.cpp:1521 src/dar_suite/dar_xform.cpp:318 msgid "-a option requires an argument" msgstr "Il manque un argument à l'option -a" #: src/dar_suite/command_line.cpp:1568 msgid "" "-ak option need not be specified more than once, ignoring extra -ak options" msgstr "inutile de spécifier l'option -ak plus d'une fois" #: src/dar_suite/command_line.cpp:1574 msgid "-af must be present before -A option not after!" msgstr "L'option -af doit être placée avant l'option -A, et non pas après" #: src/dar_suite/command_line.cpp:1576 msgid "" "-af option need not be specified more than once, ignoring extra -af options" msgstr "Inutile de spécifier plus d'une fois l'option -af" #: src/dar_suite/command_line.cpp:1614 #, c-format msgid "Unknown argument given to -a : %s" msgstr "Argument inconnu donné à -a : %s" #: src/dar_suite/command_line.cpp:1631 msgid "libthreadar required for multithreaded execution" msgstr "libthreadar est nécessaire pour une execution en parallèle" #: src/dar_suite/command_line.cpp:1659 msgid "Only one -@ option is allowed" msgstr "Une seule option -@ est permise" #: src/dar_suite/command_line.cpp:1737 msgid "Syntax error in overwriting policy: " msgstr "Erreur de syntaxe dans la politique d'écrasement : " #: src/dar_suite/command_line.cpp:1770 #, c-format msgid "" "%d is the default value for --sparse-file-min-size, no need to specify it on " "command line, ignoring" msgstr "" "%d est la valeur par défaut pour --sparse-file-min-size, il n'est pas " "nécessaire de préciser cette valeur" #: src/dar_suite/command_line.cpp:1791 #, c-format msgid "Unknown argument given to -2 : %s" msgstr "Argument inconnu donné à -2 : %s" #: src/dar_suite/command_line.cpp:1828 src/dar_suite/dar_xform.cpp:336 msgid "Missing argument to --hash" msgstr "Il manque un argument à --hash" #: src/dar_suite/command_line.cpp:1830 src/dar_suite/dar_xform.cpp:343 msgid "Unknown parameter given to --hash option: " msgstr "Paramètre inconnu donné à l'option --hash : " #: src/dar_suite/command_line.cpp:1843 msgid "Error while parsing --min-digits option: " msgstr "Erreur de syntaxe concernant --min-digits : " #: src/dar_suite/command_line.cpp:1849 msgid "Missing argument to --backup-hook-execute" msgstr "Argument manquant pour --backup-hook-execute" #: src/dar_suite/command_line.cpp:1920 msgid "Missing argument to --delta" msgstr "Il manque un argument à --delta" #: src/dar_suite/command_line.cpp:1942 msgid "missing function name argument in string" msgstr "absence de nom de fonction" #: src/dar_suite/command_line.cpp:1948 msgid "missing multiplier argument in string" msgstr "Absence du champ multiplicateur" #: src/dar_suite/command_line.cpp:1966 src/dar_suite/command_line.cpp:1977 msgid "too large value provided for the min block size" msgstr "Valeur trop grande fournie comme taille minimale de blocks" #: src/dar_suite/command_line.cpp:1982 msgid "unexpected extra argument in string" msgstr "Argument supplémentaire inattendu" #: src/dar_suite/command_line.cpp:1988 msgid "Unknown parameter given to --delta option: " msgstr "Paramètre inconnu donné à l'option --delta : " #: src/dar_suite/command_line.cpp:2035 msgid "Unknown parameter given to --modified-data-detection option: " msgstr "Paramètre inconnu donné à l'option --modified-data-detection : " #: src/dar_suite/command_line.cpp:2040 src/dar_suite/command_line.cpp:2042 #, c-format msgid "Unknown option -%c" msgstr "Option inconnue -%c" #: src/dar_suite/command_line.cpp:2059 #, c-format msgid "" "usage: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] " "[options...]\n" msgstr "" "usage: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] " "[options...]\n" #: src/dar_suite/command_line.cpp:2063 #, c-format msgid "Commands are:\n" msgstr "Les commandes sont :\n" #: src/dar_suite/command_line.cpp:2064 #, c-format msgid " -c creates an archive\n" msgstr " -c créer une archive\n" #: src/dar_suite/command_line.cpp:2065 #, c-format msgid " -x extracts files from the archive\n" msgstr " -x restauration de fichiers\n" #: src/dar_suite/command_line.cpp:2066 #, c-format msgid " -d compares the archive with the existing filesystem\n" msgstr " -d comparaison de l'archive avec le fichiers existants\n" #: src/dar_suite/command_line.cpp:2067 #, c-format msgid " -t tests the archive integrity\n" msgstr " -t test l'intégrité d'une archive\n" #: src/dar_suite/command_line.cpp:2068 #, c-format msgid " -l lists the contents of the archive\n" msgstr " -l affichage du contenu d'une archive\n" #: src/dar_suite/command_line.cpp:2069 #, c-format msgid " -C isolates the catalogue from an archive\n" msgstr " -C isole le catalogue d'une archive\n" #: src/dar_suite/command_line.cpp:2070 #, c-format msgid " -+ merge two archives / create a sub archive\n" msgstr " -+ fusion de deux archive / création de sous-archive\n" #: src/dar_suite/command_line.cpp:2071 #, c-format msgid " -y repair a truncated archive\n" msgstr " -y répare une archive tronquée\n" #: src/dar_suite/command_line.cpp:2073 #, c-format msgid " -h displays this help information\n" msgstr " -h affiche cette page d'aide\n" #: src/dar_suite/command_line.cpp:2074 #, c-format msgid " -V displays version information\n" msgstr " -V affiche les informations de version\n" #: src/dar_suite/command_line.cpp:2077 #, c-format msgid " -v[s|t|d|m|f|a] verbose output\n" msgstr " -v[s|t|d|m|f|a] affichage détaillé\n" #: src/dar_suite/command_line.cpp:2078 #, c-format msgid " -q\t\t suppress final statistics report\n" msgstr " -q\t\t supprime le rapport final de statistiques\n" #: src/dar_suite/command_line.cpp:2079 #, c-format msgid " -vs\t\t display skipped files\n" msgstr " -V\t\t affiche les fichiers ignorés\n" #: src/dar_suite/command_line.cpp:2080 #, c-format msgid " -R \t filesystem root directory (current dir by default)\n" msgstr "" " -R \t répertoire à considérer comme racine (rep. courant par " "défaut)\n" #: src/dar_suite/command_line.cpp:2081 #, c-format msgid "" " -X \t files to exclude from the operation (none by default)\n" msgstr "" " -X \t fichiers à exclure de l'opération (aucun par défault)\n" #: src/dar_suite/command_line.cpp:2082 #, c-format msgid " -I \t files to include in the operation (all by default)\n" msgstr "" " -I \t fichiers à inclure dans l'opération (tous par défaut)\n" #: src/dar_suite/command_line.cpp:2083 #, c-format msgid " -P \t subdirectory to exclude from the operation\n" msgstr " -P \t sous-répertoires à exclure de l'opération\n" #: src/dar_suite/command_line.cpp:2084 #, c-format msgid " -g \t subdirectory to include in the operation\n" msgstr " -g \t sous-répertoire à inclure dans l'opération\n" #: src/dar_suite/command_line.cpp:2085 #, c-format msgid " -[ filename contains a list of files to include\n" msgstr " -[ fichier contenant une liste de fichiers à inclure\n" #: src/dar_suite/command_line.cpp:2086 #, c-format msgid " -] \t filename contains a list of files to exclude\n" msgstr " -] \t fichier contenant une liste de fichiers à exclure\n" #: src/dar_suite/command_line.cpp:2087 src/dar_suite/dar_xform.cpp:429 #, c-format msgid " -n\t\t don't overwrite files\n" msgstr " -n\t\t ne pas écraser de fichier\n" #: src/dar_suite/command_line.cpp:2088 src/dar_suite/dar_xform.cpp:430 #, c-format msgid " -w\t\t don't warn before overwriting files\n" msgstr " -w\t\t ne pas avertir avant un écrasement de fichier\n" #: src/dar_suite/command_line.cpp:2089 #, c-format msgid " -wa\t\t don't warn before overwriting and removing files\n" msgstr "" " -wa\t\t ne pas avertir ni avant écrasement ni avant suppression de " "fichier\n" #: src/dar_suite/command_line.cpp:2090 src/dar_suite/dar_xform.cpp:431 #, c-format msgid " -b\t\t ring the terminal bell when user action is required\n" msgstr " -b\t\tfait sonner le terminal pour solliciter l'utilisateur\n" #: src/dar_suite/command_line.cpp:2091 #, c-format msgid "" " -O[ignore-owner | mtime | inode-type] do not consider user and group\n" msgstr "" " -O[ignore-owner | mtime | inode-type] ne pas prendre en compte la " "propriété des fichiers\n" #: src/dar_suite/command_line.cpp:2092 #, c-format msgid "\t\t ownership\n" msgstr "\t\t propriété\n" #: src/dar_suite/command_line.cpp:2093 #, c-format msgid " -H [N]\t ignore shift in dates of an exact number of hours\n" msgstr "" " -H [N]\t considère identiques des dates qui diffèrent d'un nombre " "entier d'heures\n" #: src/dar_suite/command_line.cpp:2094 #, c-format msgid " -E \t command to execute between slices\n" msgstr " -E \t commande exécutée entre les tranches\n" #: src/dar_suite/command_line.cpp:2095 #, c-format msgid " -F \t same as -E but for the archive of reference\n" msgstr " -F \t comme -E mais pour l'archive de référence\n" #: src/dar_suite/command_line.cpp:2096 #, c-format msgid " -u \t mask to ignore certain EA\n" msgstr " -u \t masque de sélection des Attributs Étendus\n" #: src/dar_suite/command_line.cpp:2097 #, c-format msgid " -U \t mask to allow certain EA\n" msgstr " -U \t masque d'exclusion d'Attributs Étendus\n" #: src/dar_suite/command_line.cpp:2098 #, c-format msgid " -K \t use as key to encrypt/decrypt\n" msgstr " -K \t mot de passe à utiliser pour chiffrer/déchiffrer\n" #: src/dar_suite/command_line.cpp:2099 #, c-format msgid "" " -J \t same as -K but it does concern the archive of reference\n" msgstr " -J \t comme -K mais pour l'archive de référence\n" #: src/dar_suite/command_line.cpp:2100 #, c-format msgid " -# encryption block size\n" msgstr " -# taille des blocs de chiffrement\n" #: src/dar_suite/command_line.cpp:2101 #, c-format msgid " -* same as -# but for archive of reference\n" msgstr " -* comme -# mais pour l'archive de référence\n" #: src/dar_suite/command_line.cpp:2102 #, c-format msgid " -B read options from given file\n" msgstr " -B lit les options à partir du fichier\n" #: src/dar_suite/command_line.cpp:2103 #, c-format msgid " -N\t\t do not read ~/.darrc nor /etc/darrc configuration file\n" msgstr " -N\t\t ignore ~/.darrc et /etc/darrc s'ils sont présents\n" #: src/dar_suite/command_line.cpp:2104 #, c-format msgid " -e\t\t dry run, fake execution, nothing is produced\n" msgstr " -e\t\t exécution à vide (rien n'est écrit sur le disque)\n" #: src/dar_suite/command_line.cpp:2105 #, c-format msgid " -Q\t\t suppress the initial warning when not launched from a tty\n" msgstr " -Q\t\t supprime le message initial si non lancé depuis un tty\n" #: src/dar_suite/command_line.cpp:2106 #, c-format msgid " -aa\t\t do not try to preserve atime of file open for reading.\n" msgstr "" " -aa\t\t ne cherche pas à conserver atime des fichiers ouverts en " "lecture.\n" #: src/dar_suite/command_line.cpp:2107 #, c-format msgid " -ac\t\t do not try to preserve ctime (default behavior).\n" msgstr "" " -ac\t\t ne cherche pas à conserver ctime (comportement par défaut).\n" #: src/dar_suite/command_line.cpp:2108 #, c-format msgid " -am\t\t set ordered mode for all filters\n" msgstr " -am\t\t filtres en mode ordonné\n" #: src/dar_suite/command_line.cpp:2109 #, c-format msgid " -an\t\t the masks that follow are now case insensitive\n" msgstr "" " -an\t\t dans les masques suivants majuscules et minuscules sont " "équivalents\n" #: src/dar_suite/command_line.cpp:2110 #, c-format msgid " -acase\t the masks that follow are now case sensitive\n" msgstr "" " -acase\t\t dans les masques qui suivent majuscules et minuscules sont " "différents\n" #: src/dar_suite/command_line.cpp:2111 #, c-format msgid " -ar\t\t set the following masks to be regex expressions\n" msgstr "" " -ar\t\t bascule les masques suivants comme expression régulières\n" #: src/dar_suite/command_line.cpp:2112 #, c-format msgid " -ag\t\t set the following masks to be glob expressions\n" msgstr " -ag\t\t bascule les masques suivants comme expression glob\n" #: src/dar_suite/command_line.cpp:2114 #, c-format msgid "" "Saving/Isolation/merging/repairing options (to use with -c, -C, -+ or -y):\n" msgstr "" "Options de sauvegarde/d'isolation/fusion/réparation (à utiliser avec -c, -C, " "-+ ou -y) :\n" #: src/dar_suite/command_line.cpp:2115 #, c-format msgid " -A [path/] archive to take as reference\n" msgstr " -A [chemin/] archive à prendre comme référence\n" #: src/dar_suite/command_line.cpp:2116 #, c-format msgid " -@ [path/] auxiliary archive of reference for merging\n" msgstr "" " -@ [chemin/] archive à prendre comme référence auxiliaire\n" #: src/dar_suite/command_line.cpp:2117 #, c-format msgid " -$ \t encryption key for auxiliary archive\n" msgstr " -$ \t clef de chiffrement de l'archive auxiliaire\n" #: src/dar_suite/command_line.cpp:2118 #, c-format msgid " -~ \t command between slices of the auxiliary archive\n" msgstr "" " -~ \t commande à exécuter entre les tranches d'une archive " "auxiliaire\n" #: src/dar_suite/command_line.cpp:2119 #, c-format msgid " -z [[algo:]level]\t compress data in archive. -z = -z9 = -zgzip:9\n" msgstr " -z [[algo:]niveau]\t compression des données\n" #: src/dar_suite/command_line.cpp:2120 #, c-format msgid "" " Available algo: gzip,bzip2,lzo,xz. Exemples: -zlzo -zxz:5 -z1 -z\n" msgstr "" " Algorithmes disponibles : gzip, bzip2, lzo, xz. Exemples -zlzo:5 -z1\n" #: src/dar_suite/command_line.cpp:2121 src/dar_suite/dar_xform.cpp:426 #, c-format msgid "" " -s split the archive in several files of size \n" msgstr "" " -s coupe l'archive en tranches de taille octets\n" #: src/dar_suite/command_line.cpp:2122 #, c-format msgid " -S first file size (if different from following ones)\n" msgstr "" " -S taille de la première tranche (si elle doit différente " "des autres)\n" #: src/dar_suite/command_line.cpp:2123 #, c-format msgid " -aSI \t slice size suffixes k, M, T, G, etc. are powers of 10\n" msgstr "" " -aSI \t signification des suffixes k, M, T, G, etc. comme puissances de " "10\n" #: src/dar_suite/command_line.cpp:2124 #, c-format msgid " -abinary\t slice size suffixes k, M, T, G, etc. are powers of 2\n" msgstr "" " -abinary\t signification des suffixes k, M, T, G, etc. comme puissances " "de 2\n" #: src/dar_suite/command_line.cpp:2125 src/dar_suite/dar_xform.cpp:428 #, c-format msgid " -p\t\t pauses before writing to a new file\n" msgstr " -p\t\t arrêt (pause) après chaque tranche\n" #: src/dar_suite/command_line.cpp:2126 #, c-format msgid " -D\t\t excluded directories are stored as empty directories\n" msgstr "" " -D\t\t les répertoires exclus sont enregistrés comme des répertoires " "vides\n" #: src/dar_suite/command_line.cpp:2127 #, c-format msgid " -Z \t do not compress the matching filenames\n" msgstr " -Z \t ne pas compresser ces fichiers\n" #: src/dar_suite/command_line.cpp:2128 #, c-format msgid " -Y \t do only compress the matching filenames\n" msgstr " -Y \t ne compresser que ces fichiers\n" #: src/dar_suite/command_line.cpp:2129 #, c-format msgid " -m \t do not compress file smaller than \n" msgstr "" " -m \t ne pas compresser les fichiers de taille inférieure à " " octets\n" #: src/dar_suite/command_line.cpp:2130 #, c-format msgid " --nodump\t do not backup, files having the nodump 'd' flag set\n" msgstr "" " --nodump\t ne pas sauver les fichiers ayant le drapeau 'd' positionné\n" #: src/dar_suite/command_line.cpp:2131 #, c-format msgid "" " -@ [path/] Do on-fly catalogue isolation of the resulting " "archive\n" msgstr "" " -@ [chemin/] Réalise une isolation à la volée de l'archive crée\n" #: src/dar_suite/command_line.cpp:2132 #, c-format msgid " -M\t\t stay in the same filesystem while scanning directories\n" msgstr "" " -M\t\t reste dans le même système de fichiers lors du parcours de " "l'arborescence\n" #: src/dar_suite/command_line.cpp:2133 #, c-format msgid " -,\t\t ignore directories that follow the Directory Tagging\n" msgstr " -,\t\t ignore les répertoires marqués comme répertoire cache\n" #: src/dar_suite/command_line.cpp:2134 #, c-format msgid "\t\t Standard\n" msgstr "\t\t Standard\n" #: src/dar_suite/command_line.cpp:2135 #, c-format msgid "" " -/ \t which way dar can overwrite files at archive merging or\n" msgstr "" " -/ \t défini que quelle façon dar peut écraser des fichiers lors " "de la fusion\n" #: src/dar_suite/command_line.cpp:2136 #, c-format msgid "\t\t extraction time\n" msgstr "\t\t ou lors de l'extraction d'une archive\n" #: src/dar_suite/command_line.cpp:2137 src/dar_suite/dar_xform.cpp:436 #, c-format msgid " -^ \t permission[:user[:group]] of created slices\n" msgstr "" " -^ \t permission[:utilisateur[:groupe]] pour la création des " "tranches\n" #: src/dar_suite/command_line.cpp:2138 #, c-format msgid "" " -8 sig\t add delta signature to perform binary delta if used as ref." msgstr "" " -8 sig génère signature pour diff binaire si utilisé comme ref." #: src/dar_suite/command_line.cpp:2140 #, c-format msgid "Restoring options (to use with -x) :\n" msgstr "Options de restauration (à utiliser avec -x) :\n" #: src/dar_suite/command_line.cpp:2141 #, c-format msgid " -k\t\t do not remove files destroyed since the reference backup\n" msgstr " -k\t\t non suppression des fichiers enregistrés comme détruits\n" #: src/dar_suite/command_line.cpp:2142 #, c-format msgid " -r\t\t do not restore file older than those on filesystem\n" msgstr "" " -r\t\t ne pas restaurer de fichiers plus anciens que ceux sur le " "système de fichiers\n" #: src/dar_suite/command_line.cpp:2143 #, c-format msgid " -f\t\t do not restore directory structure\n" msgstr " -f\t\t ne pas restaurer l'arborescence de répertoire\n" #: src/dar_suite/command_line.cpp:2145 #, c-format msgid "Reading options (to use with -x, -d, -t, -l, -A)\n" msgstr "Options de lecture (à utiliser avec -x, -d, -t, -l, -A) :\n" #: src/dar_suite/command_line.cpp:2146 #, c-format msgid "" " -i pipe to use instead of std input to read data from " "dar_slave\n" msgstr "" " -i tube à utiliser à la place de l'entrée standard pour lire " "les données de dar_slave\n" #: src/dar_suite/command_line.cpp:2147 #, c-format msgid "" " -o pipe to use instead of std output to orders dar_slave\n" msgstr "" " -o tube à utiliser au lieu de la sortie standard pour " "ordonner dar_slave\n" #: src/dar_suite/command_line.cpp:2149 #, c-format msgid "Listing options (to use with -l):\n" msgstr "Options de listing (à utiliser avec -l) :\n" #: src/dar_suite/command_line.cpp:2150 #, c-format msgid " -T\t\t tree output format\n" msgstr " -T\t\t affichage sous forme d'arbre\n" #: src/dar_suite/command_line.cpp:2151 #, c-format msgid " -as\t\t only list files saved in the archive\n" msgstr " -as\t\taffichage uniquement des fichiers sauvegardés\n" #: src/dar_suite/command_line.cpp:2152 #, c-format msgid "" "\n" "\n" msgstr "" "\n" "\n" #: src/dar_suite/command_line.cpp:2153 #, c-format msgid "" "Type \"man dar\" for more details and for all other available options.\n" msgstr "" "Taper \"man dar\" pour plus de détails et pour connaître toutes les autres " "options disponibles.\n" #: src/dar_suite/command_line.cpp:2529 #, c-format msgid " Using libdar %u.%u.%u built with compilation time options:" msgstr "Libdar %u.%u.%u utilisée, construite avec les options suivantes :" #: src/dar_suite/command_line.cpp:2530 #, c-format msgid " Using libdar %u.%u built with compilation time options:" msgstr "Libdar %u.%u utilisée, construite avec les options suivantes :" #: src/dar_suite/command_line.cpp:2753 #, c-format msgid "Arguments read from %S :" msgstr "Arguments lus depuis %S :" #: src/dar_suite/command_line.cpp:2875 #, c-format msgid "Failed reading %S: " msgstr "Echec de lecture pour %S : " #: src/dar_suite/command_line.cpp:2956 #, c-format msgid "Warning: Failed reading %S: " msgstr "Attention : échec de lecture pour %S : " #: src/dar_suite/command_line.cpp:3246 #, c-format msgid "" "%s does not name a compression \"[algorithm][:][level]\" , like for examples " "\"gzip\", \"lzo\", \"bzip2\", \"lzo:3\", \"gzip:2\", \"8\" or \"1\". Please " "review the man page about -z option" msgstr "" "%s ne désigne pas une compression \"[algo][:][niveau]\", comme par exemple " "\"gzip\", \"lzo\", \"bzip2\", \"lzo:3\", \"gzip:2\", \"8\" ou \"1\". Merci " "de revoir la description de l'option -z dans la page de manuel" #: src/dar_suite/command_line.cpp:3265 msgid "Compression level must be between 1 and 9, included" msgstr "le niveau de compression doit être compris entre 1 et 9 inclus" #: src/dar_suite/command_line.cpp:3294 msgid "unknown FSA family: " msgstr "Famille d'attributs FSA inconnue : " #: src/dar_suite/command_line.cpp:3326 #, c-format msgid "" "User target named \"%s\" is not allowed (reserved word for conditional " "syntax)" msgstr "" "La cible utilisateur \"%s\" n'est pas autorisée (cible réservé pour la " "syntaxe conditionnelle)" #: src/dar_suite/dar_xform.cpp:230 msgid "Missing argument to -s" msgstr "Il manque un argument à -s" #: src/dar_suite/dar_xform.cpp:241 msgid "Invalid size for option -s" msgstr "Taille invalide donnée via l'option -s" #: src/dar_suite/dar_xform.cpp:248 msgid "Missing argument to -S" msgstr "Il manque un argument à l'option -S" #: src/dar_suite/dar_xform.cpp:261 msgid "Giving -S option the same value as the one given to -s is useless" msgstr "" "Donner à l'option -S la même valeur que celle donnée à l'option -s est " "inutile" #: src/dar_suite/dar_xform.cpp:265 msgid "Invalid size for option -S" msgstr "Taille invalide donnée via l'option -S" #: src/dar_suite/dar_xform.cpp:302 msgid "Missing argument to -E" msgstr "Il manque un argument à l'option -E" #: src/dar_suite/dar_xform.cpp:310 msgid "Missing argument to -F" msgstr "Il manque un argument à l'option -F" #: src/dar_suite/dar_xform.cpp:325 msgid "Unknown parameter given to -a option: " msgstr "Paramètre inconnu donné à l'option -a : " #: src/dar_suite/dar_xform.cpp:331 msgid "Missing argument to -^" msgstr "Il manque un argument à -^" #: src/dar_suite/dar_xform.cpp:366 msgid "" "Missing source or destination argument on command line, see -h option for " "help" msgstr "Il manque la source ou la destination, voir l'option -h pour l'aide" #: src/dar_suite/dar_xform.cpp:381 msgid "Invalid argument as source archive" msgstr "Argument invalide donné comme archive source" #: src/dar_suite/dar_xform.cpp:388 msgid "Invalid argument as destination archive" msgstr "Argument invalide donné comme archive destination" #: src/dar_suite/dar_xform.cpp:394 msgid "Archive on stdout is not compatible with slicing (-s option)" msgstr "" "La production d'archive sur stdout n'est pas compatible avec plusieurs " "tranches (option -s)" #: src/dar_suite/dar_xform.cpp:419 #, c-format msgid "\t\t the first non options argument is the archive to read\n" msgstr "" "\t\t le premier argument qui n'est pas une option est le nom de l'archive à " "lire\n" #: src/dar_suite/dar_xform.cpp:421 #, c-format msgid "\t\t the second non option argument is the archive to create\n" msgstr "" "\t\t le second argument qui n'est pas une option et le nom de l'archive à " "créer\n" #: src/dar_suite/dar_xform.cpp:425 #, c-format msgid " -V\t\t displays version information\n" msgstr " -V\t\t affiche la version\n" #: src/dar_suite/dar_xform.cpp:427 #, c-format msgid " -S first file size\n" msgstr " -S taille de la première tranche\n" #: src/dar_suite/dar_xform.cpp:432 #, c-format msgid "" " -E \t command to execute between slices of destination archive\n" msgstr "" " -E \t commande à exéctuer après chaque tranche de l'archive " "destination\n" #: src/dar_suite/dar_xform.cpp:433 #, c-format msgid " -F \t command to execute between slice of source archive\n" msgstr "" " -F \t commande à exécuter avant chaque tranche de l'archive " "source\n" #: src/dar_suite/dar_xform.cpp:434 #, c-format msgid " -aSI \t slice size suffixes k, M, T, G, etc. are power of 10\n" msgstr "" " -aSI \t signification des suffixes k, M, T, G, etc. comme puissances de " "10\n" #: src/dar_suite/dar_xform.cpp:435 #, c-format msgid " -abinary\t slice size suffixes k, M, T, G, etc. are power of 2\n" msgstr "" " -abinary\t signification des suffixes k, M, T, G, etc. comme puissances " "de 2\n" #: src/python/pybind11_libdar.cpp:262 msgid "FATAL error from operating system, aborting operation: " msgstr "" "Erreur FATALE provenant du système d'exploitation, abandon de l'opération : " #: src/check/all_features.cpp:54 msgid "-" msgstr "-" #~ msgid "CRC failed for table of contents (aka \"catalogue\")" #~ msgstr "Erreur de CRC pour le catalogue" #~ msgid "aborting" #~ msgstr "abandon" #~ msgid "" #~ "Using the slice layout of the archive of reference recorded at the time " #~ "this isolated catalogue was done\n" #~ " Note: if this reference has been resliced this isolated catalogue has " #~ "been created, the resulting slicing information given here will be wrong " #~ "and will probably lead to an error. Check documentation to know hos to " #~ "manually specify the slicing to use" #~ msgstr "" #~ "Utilisation des tailles de découpage de l'archive de référence " #~ "enregistrées lors de l'isolation du catalogue\n" #~ "Note : Si l'archive de référence à été réagencée en tranche après " #~ "l'isolation, les informations de découpage utilisée ici sont incorrectes " #~ "et conduiront probablement à une erreur. Regardez la documentation pour " #~ "savoir comment définir manuellement le découpage en tranches" #~ msgid "" #~ "Warning: No slice layout of the archive of reference has been recorded in " #~ "this isolated catalogue. The additional slicing information you provided " #~ "may still lead the operation to fail because the archive has an " #~ "_unsupported_ (too old) format for this feature" #~ msgstr "" #~ "Attention: Aucune information de découpage en tranche pour l'archive de " #~ "référence n'a été enregistré avec ce catalogue isolé. Les informations " #~ "que vous avez fournies manuellement peuvent conduire l'opération à " #~ "échouer car cette archive a un format _non supporté_ (trop vieux) pour " #~ "cette fonctionnalité" #~ msgid " Special allocation scheme : %s\n" #~ msgstr " Allocation mémoire optimisée : %s\n" #~ msgid "Cannot get mtime: %s" #~ msgstr "Impossible d'obtenir la date de dernière modification : %s" #~ msgid "[--- REMOVED ENTRY ----]" #~ msgstr "[-- ENTRÉE SUPPRIMÉE --]" #~ msgid "" #~ "Archive number | Data | status || " #~ "EA | status \n" #~ msgstr "" #~ "Numéro d'archive | Données | statut || Attributs " #~ "Étendus | statut\n" #~ msgid "" #~ "---------------+-------------------------+--------+" #~ "+-------------------------+----------\n" #~ msgstr "" #~ "-----------------+------------------------+--------+" #~ "+---------------------------+------------\n" #~ msgid "Cannot remove file " #~ msgstr "Suppression de fichier impossible " #~ msgid "Cannot copy a wrapperlib object (NOT IMPLEMENTED)" #~ msgstr "" #~ "La copie d'un objet wrapperlib n'est pas implémentée, copie impossible" #~ msgid "argument must be a relative path" #~ msgstr "l'argument doit être un chemin relatif" #~ msgid "Archive copy constructor is not implemented" #~ msgstr "" #~ "Le constructeur de recopie pour la classe archive n'est pas implémenté" #~ msgid "Archive assignment operator is not implemented" #~ msgstr "L'opérateur d'assignation la classe archive n'est pas implémenté" #~ msgid "" #~ "CRC of file to apply binary diff to does not match the expected CRC value" #~ msgstr "" #~ "CRC du fichir auquel appliquer la différence binaire ne correspond pas à " #~ "la valeur de CRC attendue" #~ msgid "Cannot add another archive, database is full" #~ msgstr "Base de donnée pleine, impossible d'ajouter une nouvelle archive" #~ msgid "Absolute value too high for an archive number: %d" #~ msgstr "Valeur absolue trop élevée pour un numéro d'archive : %d" #~ msgid "" #~ "File ownership will not be restored as %s has not the CHOWN capability " #~ "nor is running as root. to avoid this message use -O option" #~ msgstr "" #~ "l'appartenance du fichier ne sera pas restauré car %s ne possède pas la " #~ "capacité CHOWN ni n'est lancé en tant que root. Pour éviter ce message " #~ "utiliser l'option -O" #~ msgid "" #~ "Furtive read mode has been disabled as %s has not the FOWNER capability " #~ "nor is running as root" #~ msgstr "" #~ "La lecture furtive a été désactivé car %s ne possède pas la capacité " #~ "FOWNER ni n'est lancé en tant que root" #~ msgid "nullptr argument given to \"sauv_path\"" #~ msgstr "Valeur nullptr donnée à \"filename\"" #~ msgid "nullptr argument given to \"filename\"" #~ msgstr "Valeur nullptr donnée à \"filename\"" #~ msgid "nullptr argument given to \"extension\"" #~ msgstr "Valeur nullptr donnée à \"extension\"" #~ msgid "Strong encryption support" #~ msgstr "Support pour le chiffrement fort" #~ msgid " -j\t\t ask user what to do when memory is exhausted\n" #~ msgstr "" #~ " -j\t\t permet à l'utilisateur d'agir quand la mémoire virtuelle fait " #~ "défaut\n" #~ msgid "" #~ "------------------+----------------" #~ "+----------------------------------------+-------------------------+" #~ msgstr "" #~ "-----------------+------------------------+--------+" #~ "+---------------------------+------------" #~ msgid "" #~ "---------------+------+-------+-------+-------------------------------" #~ "+------------------------------+-----------\n" #~ msgstr "" #~ "-----------------+------------------------+--------+" #~ "+---------------------------+------------\n" #~ msgid "" #~ "-----------------------------+------------+-------+-------+-------" #~ "+-------------------------------+------------\n" #~ msgstr "" #~ "-----------------+------------------------+--------+" #~ "+---------------------------+------------\n" #~ msgid "Preparing the archive contents for isolation..." #~ msgstr "Préparation du contenu de l'archive en vue de son isolation ..." #~ msgid "Current implementation does not support this (new) crypto algorithm" #~ msgstr "" #~ "L'implémentation actuelle ne support pas ce (nouvel) algorithme de " #~ "chiffrement" #~ msgid "Not supported flag or archive corruption" #~ msgstr "Drapeau non supporté ou archive corrompue" #~ msgid "" #~ "Cannot convert group to gid in statically linked binary, either directly " #~ "provide the GID or run libdar from a dynamically linked executable" #~ msgstr "" #~ "Impossible de convertir un nom de groupe en numéro (GID) dans un " #~ "exécutable lié statiquement. Fournir soit directement le GID soit " #~ "utiliser un exécutable lié dynamiquement" #~ msgid "" #~ "too low value (< 10) given as observation_read_number argument while " #~ "initializing cache" #~ msgstr "" #~ "Valeur trop faible (< 10) donnée à \"observation_read_number\" lors de " #~ "l'initialisation du cache" #~ msgid "" #~ "too low value (< 10) given as observation_write_number argument while " #~ "initializing cache" #~ msgstr "" #~ "Valeur trop faible (< 10) donnée à \"observation_write_number\" lors de " #~ "l'initialisation du cache" #~ msgid "" #~ "too high value (> 50) given as unused_read_ratio argument, while " #~ "initializing cache" #~ msgstr "" #~ "Valeur trop grande (> 50) donnée à \"unused_read_ratio\" lors de " #~ "l'initialisation du cache" #~ msgid "" #~ "unused_write_ratio must be less than max_size_hit_write_ratio, while " #~ "initializing cache" #~ msgstr "" #~ "\"unused_write_ratio\" doit être inférieur ou égal à " #~ "\"max_size_hit_write_ratio\", lors de l'initialisation du cache" #~ msgid "" #~ "max_size must be greater or equal to initial_size, while initializing " #~ "cache" #~ msgstr "" #~ "max_size doit être supérieur ou égal à initial_size pour l'initialisation " #~ "du cache" #~ msgid "incompatible Zlib version" #~ msgstr "version de Zlib incompatible" #~ msgid "" #~ "Could not find archive information at the end of the last slice, assuming " #~ "an old archive and trying to read at the beginning of the first slice..." #~ msgstr "" #~ "Impossible de trouver les informations sur l'archive en fin de dernière " #~ "tranche. On suppose qu'il s'agit d'un ancien format d'archive, pour " #~ "lequel cette information se trouve uniquement au début de la première " #~ "tranche ..." #~ msgid "" #~ "LAX MODE: Failed to read the archive header, I will need your help to " #~ "know what is the missing information." #~ msgstr "" #~ "MODE RELAX : Échec de lecture de l'en-tête de l'archive, Votre aide sera " #~ "nécessaire pour connaître l'information manquante." #~ msgid "" #~ "LAX MODE: Archive format revision found is [%s] but the higher version " #~ "this binary can handle is [%s]. Thus, assuming the archive version is " #~ "corrupted and falling back to the higher version this binary can support " #~ "(%s)" #~ msgstr "" #~ "MODE RELAX : La version du format d'archive est [%s] mais la version la " #~ "plus élevée que cet exécutable est capable de gérer est [%s]. On suppose " #~ "donc que l'archive est corrompue et que son format est le plus récent " #~ "qu'il est possible de gérer par cet exécutable (version %s)" #~ msgid "LAX MODE: Archive format revision found is [version %s]" #~ msgstr "MODE RELAX : Le format trouvé pour l'archive est [version %s]" #~ msgid "LAX MODE: is it correct, seen the table at the following URL: %s ?" #~ msgstr "" #~ "MODE RELAX : Est-ce correct à la lecture de la table disponible à l'URL " #~ "suivante : %s ?" #~ msgid "Cannot determine the permission to use for hash files: " #~ msgstr "" #~ "Impossible de déterminer la permission à appliquer aux fichier de hash: " #~ msgid "Error opening file " #~ msgstr "Erreur lors de l'ouverture du fichier " #~ msgid "Error opening file %s : %s" #~ msgstr "Erreur lors de l'ouverture du fichier %s : %s" #~ msgid "Position out of range" #~ msgstr "Position hors limites" #~ msgid "Read-write mode not supported for \"trivial_sar\"" #~ msgstr "Mode lecture-écriture non supporté pour \"trivial_sar\"" #~ msgid "Cannot create database %S : %s" #~ msgstr "Impossible de créer la base %S : %s" #~ msgid "system type time_t is too small to store a time/date" #~ msgstr "" #~ "Le type time_t du système est trop petit pour contenir une heure/date" #~ msgid "" #~ "Thread-safe not initialized for libdar, read manual or contact maintainer " #~ "of the application that uses libdar" #~ msgstr "" #~ "L'environnement multi-tâche na pas été initialisé pour libdar, merci de " #~ "lire le manuel d'utilisation de libdar ou de contacter le mainteneur de " #~ "l'application utilisatrice de libdar, concernée par ce problème" #~ msgid "Cannot initialize mutex: " #~ msgstr "Initialisation des \"mutex\" impossible : " #~ msgid "Aborting operations for the EA of %S : error while adding EA %s : %s" #~ msgstr "" #~ "Abandon des opérations pour les AE de %S : erreur pendant l'ajout de l'AE " #~ "%s : %s" #~ msgid "Aborting operations for the EAs of %S : error while removing %s : %s" #~ msgstr "" #~ "Abandon des opérations pour les AE de %S : erreur lors de la suppression " #~ "de %s : %s" #~ msgid "" #~ " -z [bzip2[:level]] compress data in archive using bzip2 algorithm.\n" #~ msgstr " -z [bzip2[:niveau]]\t compression avec l'algorithme bzip2\n" #~ msgid "" #~ "No more (virtual) memory available, you have the opportunity to stop un-" #~ "necessary applications to free up some memory. Can we continue now ?" #~ msgstr "" #~ "Plus de mémoire (virtuelle) disponible, vous avez la possibilité " #~ "d'arrêter certaines applications pour faire de la place. On continue " #~ "maintenant ?" #~ msgid "Cannot open file %S : %s" #~ msgstr "Le fichier %S ne peut pas être ouvert : %s" #~ msgid "User target found on command line:" #~ msgstr "Cible utilisateur trouvée en ligne de commande :" #~ msgid "" #~ "Note that -y option is deprecated it will be removed in future release, " #~ "please use -z option instead (read man page for details)" #~ msgstr "" #~ "Noter que l'option -y est obsolète et sera supprimée dans une prochaine " #~ "version, merci d'utiliser l'option -z à la place (voir la page de manuel " #~ "pour les détails)" #~ msgid "Choose either -z or -y not both" #~ msgstr "Choisir soit -z soit -y pas les deux à la fois" #~ msgid "" #~ "-G option is obsolete, use -@ option instead, see man page for details" #~ msgstr "" #~ "l'option -G est obsolète, utilise -@ à la place, voir la page de manuel " #~ "pour plus de détails" #~ msgid "NULL given as argument" #~ msgstr "NULL fourni comme argument" #~ msgid "NULL argument given to \"fs_root\"" #~ msgstr "Valeur NULL donnée à \"fs_root\"" #~ msgid "NULL argument given to \"selection\"" #~ msgstr "Valeur NULL donnée à \"selection\"" #~ msgid "NULL argument given to \"subtree\"" #~ msgstr "Valeur NULL donnée à \"subtree\"" #~ msgid "NULL argument given to \"execute\"" #~ msgstr "Valeur NULL donnée à \"execute\"" #~ msgid "NULL argument given to \"compr_mask\"" #~ msgstr "Valeur NULL donnée à \"compr_mask\"" #~ msgid "NULL argument given to \"min_compr_size\"" #~ msgstr "Valeur NULL donnée à \"min_compr_size\"" #~ msgid "NULL argument given to \"ea_mask\"" #~ msgstr "Valeur NULL donnée à \"ea_mask\"" #~ msgid "Cannot determine location of the end of cyphered data: " #~ msgstr "Impossible de localiser la fin des données chiffrées : " #~ msgid "Argument given to \"major\" is a NULL pointer" #~ msgstr "valeur NULL donnée à \"major\"" #~ msgid "Argument given to \"minor\" is a NULL pointer" #~ msgstr "Valeur NULL donnée à \"minor\"" #~ msgid "Argument given to \"medium\" is a NULL pointer" #~ msgstr "Valeur NULL donnée à \"medium\"" #~ msgid "argument given to \"minor\" is a NULL pointer" #~ msgstr "Valeur NULL donnée à \"minor\"" #~ msgid "invalid NULL argument given as mask option" #~ msgstr "argument NULL invalide donné pour l'option mask" #~ msgid "invalid NULL argument given as crit_action option" #~ msgstr "argument NULL invalide donné pour l'option crit_action" #~ msgid "-~ is only available with -+ option and -c options" #~ msgstr "-~ n'est disponible qu'avec les options -+ et -c" #~ msgid "End of file reached while skipping to the begin of a word" #~ msgstr "Fin de fichier atteinte lors du déplacement vers le début d'un mot" #~ msgid "Reached end of file while reading a word" #~ msgstr "Fin de fichier atteinte lors de la lecture d'un mot" #~ msgid "different file data" #~ msgstr "données différentes" #~ msgid "" #~ "Archive format older than \"08\" (release 2.4.0) cannot be read through a " #~ "single pipe, only using dar_slave or normal plain file (slice) method" #~ msgstr "" #~ "Un format d'archive plus ancien que \"08\" (release 2.4.0) ne peut être " #~ "lu à partir d'un seul tube, seule la lecture avec dar_slave ou à partir " #~ "de fichiers (c'est à dire de tranches) est possible" #~ msgid "unary operator" #~ msgstr "opérateur unaire" #~ msgid "Corrupted database, empty entry found" #~ msgstr "Base de données corrompue, entrée vide trouvée" #~ msgid "Reached End of File while reading CRC data" #~ msgstr "Fin de fichier atteinte pendant la lecture d'un CRC" #~ msgid "Error checking for presence of file " #~ msgstr "Erreur lors de la vérification de la présence du fichier " dar-2.6.8/po/LINGUAS0000644000175000017520000000026413606427316010675 00000000000000# Set of available languages. fr sv # these have not been updated for a while, which might lead to bug when C formated strings (%s, %u,...) have change or have been updated # de dar-2.6.8/po/remove-potcdate.sin0000644000175000017520000000066013617552277013470 00000000000000# Sed script that remove the POT-Creation-Date line in the header entry # from a POT file. # # The distinction between the first and the following occurrences of the # pattern is achieved by looking at the hold space. /^"POT-Creation-Date: .*"$/{ x # Test if the hold space is empty. s/P/P/ ta # Yes it was empty. First occurrence. Remove the line. g d bb :a # The hold space was nonempty. Following occurrences. Do nothing. x :b } dar-2.6.8/po/boldquot.sed0000644000175000017520000000033113617552277012200 00000000000000s/"\([^"]*\)"/“\1â€/g s/`\([^`']*\)'/‘\1’/g s/ '\([^`']*\)' / ‘\1’ /g s/ '\([^`']*\)'$/ ‘\1’/g s/^'\([^`']*\)' /‘\1’ /g s/“â€/""/g s/“/“/g s/â€/â€/g s/‘/‘/g s/’/’/g dar-2.6.8/po/en@quot.header0000644000175000017520000000226313617552277012445 00000000000000# All this catalog "translates" are quotation characters. # The msgids must be ASCII and therefore cannot contain real quotation # characters, only substitutes like grave accent (0x60), apostrophe (0x27) # and double quote (0x22). These substitutes look strange; see # http://www.cl.cam.ac.uk/~mgk25/ucs/quotes.html # # This catalog translates grave accent (0x60) and apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019). # It also translates pairs of apostrophe (0x27) to # left single quotation mark (U+2018) and right single quotation mark (U+2019) # and pairs of quotation mark (0x22) to # left double quotation mark (U+201C) and right double quotation mark (U+201D). # # When output to an UTF-8 terminal, the quotation characters appear perfectly. # When output to an ISO-8859-1 terminal, the single quotation marks are # transliterated to apostrophes (by iconv in glibc 2.2 or newer) or to # grave/acute accent (by libiconv), and the double quotation marks are # transliterated to 0x22. # When output to an ASCII terminal, the single quotation marks are # transliterated to apostrophes, and the double quotation marks are # transliterated to 0x22. # dar-2.6.8/po/insert-header.sin0000644000175000017520000000124013617552277013117 00000000000000# Sed script that inserts the file called HEADER before the header entry. # # At each occurrence of a line starting with "msgid ", we execute the following # commands. At the first occurrence, insert the file. At the following # occurrences, do nothing. The distinction between the first and the following # occurrences is achieved by looking at the hold space. /^msgid /{ x # Test if the hold space is empty. s/m/m/ ta # Yes it was empty. First occurrence. Read the file. r HEADER # Output the file's contents by reading the next line. But don't lose the # current line while doing this. g N bb :a # The hold space was nonempty. Following occurrences. Do nothing. x :b } dar-2.6.8/po/stamp-po0000644000175000017520000000001213617552353011324 00000000000000timestamp dar-2.6.8/po/sv.po0000644000175000017520000105707413617551261010653 00000000000000# translation of sv.po to svenska # Copyright (C) 2005, 2006, 2007, 2009 Free Software Foundation, Inc. # This file is distributed under the same license as the PACKAGE package. # # Peter Landgren , 2005, 2006, 2007, 2009, 2011. msgid "" msgstr "" "Project-Id-Version: sv\n" "Report-Msgid-Bugs-To: http://sourceforge.net/tracker/?" "group_id=65612MSGID_BUGS_ADDRESS =atid=511612\n" "POT-Creation-Date: 2020-02-08 16:12+0100\n" "PO-Revision-Date: 2012-04-15 11:28+0200\n" "Last-Translator: Peter Landgren \n" "Language-Team: Swedish \n" "Language: sv\n" "MIME-Version: 1.0\n" "Content-Type: text/plain; charset=UTF-8\n" "Content-Transfer-Encoding: 8bit\n" "X-Generator: Lokalize 1.0\n" "Plural-Forms: nplurals=2; plural=(n != 1);\n" #: src/libdar/cat_signature.cpp:72 src/libdar/catalogue.cpp:143 #: src/libdar/catalogue.cpp:162 src/libdar/catalogue.cpp:164 msgid "incoherent catalogue structure" msgstr "osammanhängande katalogstruktur" #: src/libdar/compression.cpp:56 src/libdar/compression.cpp:79 #: src/libdar/compression.cpp:102 msgid "unknown compression" msgstr "okänd komprimering" #: src/libdar/compression.cpp:129 #, c-format msgid "unknown compression algorithm: %S" msgstr "okänd komprimeringsalgoritm: %S" #: src/libdar/filesystem_tools.cpp:164 msgid "Cannot get inode information about file to remove " msgstr "Kan ej fÃ¥r inodsinformation om den fil som skall tas bort " #: src/libdar/filesystem_tools.cpp:177 msgid "Cannot remove directory " msgstr "Kan ej ta bort mapp " #: src/libdar/filesystem_tools.cpp:251 msgid "" "uid value is too high for this system for libdar be able to restore it " "properly" msgstr "" "uid-värde för högt för detta system för 'libdar' att kunna Ã¥terställa den " "säkert" #: src/libdar/filesystem_tools.cpp:255 msgid "" "gid value is too high for this system for libdar be able to restore it " "properly" msgstr "" "gid-värde för högt för detta system för 'libdar' att kunna Ã¥terställa den " "säkert" #: src/libdar/filesystem_tools.cpp:259 src/libdar/filesystem_tools.cpp:263 msgid "Could not restore original file ownership: " msgstr "Kunde inte Ã¥terställa ursprunglig ägare till fil: " #: src/libdar/filesystem_tools.cpp:285 #, c-format msgid "Cannot restore permissions of %s : %s" msgstr "Kan ej Ã¥terställe behörigheter för %s : %s" #: src/libdar/filesystem_tools.cpp:372 #, c-format msgid "Failed to open %S while checking for nodump flag: %s" msgstr "Misslyckades att öppna %S medan kontroll av nodump-flagga gjordes: %s" #: src/libdar/filesystem_tools.cpp:386 #, c-format msgid "Cannot get ext2 attributes (and nodump flag value) for %S : %s" msgstr "Kan ej fÃ¥ ext2-attribut (och nodump-flaggvärde) för %S : %s" #: src/libdar/filesystem_tools.cpp:418 #, c-format msgid "Cannot get inode information for %s : %s" msgstr "Kan ej fÃ¥ inodinformation för %s : %s" #: src/libdar/filesystem_tools.cpp:447 #, c-format msgid "" "Replacing %s in the -R option by the directory pointed to by this symbolic " "link: " msgstr "" "Ã…terställer %s i -R alternativet genom den mapp, pekad pÃ¥ frÃ¥n denna " "symboliska länk: " #: src/libdar/filesystem_tools.cpp:450 #, c-format msgid "" "The given path %s must be a directory (or symbolic link to an existing " "directory)" msgstr "" "Den uppgivna sökvägen %s mÃ¥ste vara en mapp (eller en symbolisk länk till en " "befintlig mapp)" #: src/libdar/filesystem_tools.cpp:577 msgid "" "File the patch is about to be applied to is not the expected one, aborting " "the patch operation" msgstr "" #: src/libdar/filesystem_tools.cpp:594 msgid "Patch data does not match its CRC, archive corruption took place" msgstr "" #: src/libdar/filesystem_tools.cpp:625 #, fuzzy msgid "Error met while checking the resulting patched file: " msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/filesystem_tools.cpp:752 #, fuzzy msgid "Copied data does not match expected CRC" msgstr "Kopierade data matchar inte CRC" #: src/libdar/fsa_family.cpp:58 msgid "creation date" msgstr "" #: src/libdar/fsa_family.cpp:60 #, fuzzy msgid "append only" msgstr "enbart läsning" #: src/libdar/fsa_family.cpp:62 #, fuzzy msgid "compressed" msgstr "lzo-komprimering" #: src/libdar/fsa_family.cpp:64 msgid "no dump flag" msgstr "" #: src/libdar/fsa_family.cpp:66 msgid "immutable" msgstr "" #: src/libdar/fsa_family.cpp:68 msgid "journalized" msgstr "" #: src/libdar/fsa_family.cpp:70 msgid "secure deletion" msgstr "" #: src/libdar/fsa_family.cpp:72 msgid "no tail merging" msgstr "" #: src/libdar/fsa_family.cpp:74 msgid "undeletable" msgstr "" #: src/libdar/fsa_family.cpp:76 msgid "no atime update" msgstr "" #: src/libdar/fsa_family.cpp:78 #, fuzzy msgid "synchronous directory" msgstr "mapp" #: src/libdar/fsa_family.cpp:80 msgid "synchronous update" msgstr "" #: src/libdar/fsa_family.cpp:82 msgid "top of directory hierarchy" msgstr "" #: src/libdar/statistics.cpp:81 #, fuzzy msgid "Error while initializing \"mutex\" for class \"statistics\": " msgstr "Fel vid initialisering \"mutex\" för klass-statistik: " #: src/libdar/tronconneuse.cpp:61 #, c-format msgid "%d is not a valid block size" msgstr "%d är inte en giltig blockstorlek" #: src/libdar/tronconneuse.cpp:464 src/libdar/crypto_sym.cpp:349 msgid "Data corruption may have occurred, cannot decrypt data" msgstr "data har förstörts, kan en dechiffrera data" #: src/libdar/cat_delta_signature.cpp:332 msgid "data corrupted when attempting to read delta signature block size" msgstr "" #: src/libdar/cat_delta_signature.cpp:354 #, fuzzy msgid "" "Error while reading CRC of delta signature data. Data corruption occurred" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/cat_delta_signature.cpp:356 #, fuzzy msgid "CRC error met while reading delta signature: data corruption." msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/sar.cpp:165 #, c-format msgid "" "Error met while opening the last slice: %S. Trying to open the archive using " "the first slice..." msgstr "" "Fel vid öppning av sista delsäkerhetskopia: %S. Försöker öppna arkivet genom " "att använda första delsäkerhetskopia..." #: src/libdar/sar.cpp:213 msgid "File size too small" msgstr "Filstorlek för liten" #: src/libdar/sar.cpp:217 msgid "First file size too small" msgstr "Första filens storlek för liten" #: src/libdar/sar.cpp:689 msgid "LAX MODE: Caught exception: " msgstr "LAX MODE: fÃ¥ngade avbrott: " #: src/libdar/sar.cpp:690 #, fuzzy, c-format msgid "" "LAX MODE: %S is missing, You have the possibility to create a zero byte " "length file under the name of this slice, to replace this missing file. This " "will of course generate error messages about the information that is missing " "in this slice, but at least libdar will be able to continue. Can we continue " "now?" msgstr "" "LAX MODE: %s saknas. Du har möjlighet att skapa en fil med längden 0 byte " "med namn pÃ¥ denna delsäkerhetskopia för att ersätta den saknade filen. Detta " "kommer naturligtvis att skapa felmeddelanden om den information, som saknas " "i denna delsäkerhetskopia, men Ã¥tminstone 'libdar' kommer att kunna " "fortsätta. Kan vi fortsätta nu?" #: src/libdar/sar.cpp:702 #, fuzzy, c-format msgid "%S is required for further operation, please provide the file." msgstr "%s krävs för ytterligare Ã¥tgärd, vänligen förse med filen." #: src/libdar/sar.cpp:704 #, fuzzy, c-format msgid "" "%S is required for further operation, please provide the file if you have it." msgstr "%s krävs för ytterligare Ã¥tgärd, vänligen förse med filen." #: src/libdar/sar.cpp:709 #, fuzzy, c-format msgid "Failed reading slice %S: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/libdar/sar.cpp:752 #, fuzzy, c-format msgid "%S has a bad or corrupted header, please provide the correct file." msgstr "%s har ett felaktigt huvud, ge den korrekta filen." #: src/libdar/sar.cpp:756 #, fuzzy, c-format msgid "" "LAX MODE: %S has a bad or corrupted header, trying to guess original values " "and continuing if possible" msgstr "" "LAX MODE: %s har felaktigt eller förstört huvud, försöker att gissa " "ursprungliga värden och fortsätter om möjligt" #: src/libdar/sar.cpp:766 #, fuzzy, c-format msgid "" "%S is not a valid file (wrong magic number), please provide the good file." msgstr "" "%s är inte en giltig fil (felaktigt magiskt tal), förse med den rätta filen." #: src/libdar/sar.cpp:770 #, fuzzy, c-format msgid "" "LAX MODE: In spite of its name, %S does not appear to be a dar slice, " "assuming a data corruption took place and continuing" msgstr "" "LAX MODE: Trots sitt namn, sÃ¥ verkar inte %s vara en dar-delsäkerhetskopia, " "antar data förstört och fortsätter" #: src/libdar/sar.cpp:774 msgid "" "This is an old archive, it can only be opened starting by the first slice" msgstr "" "Detta är ett gammalt arkiv, det kan bara öppnas genom att börja med den " "första delsäkerhetskopian" #: src/libdar/sar.cpp:801 msgid "" "LAX MODE: Due to probable data corruption, dar could not determine the " "correct size of slices in this archive. For recent archive, this information " "is duplicated in each slice, do you want to try opening another slice to get " "this value if present?" msgstr "" "LAX MODE: PÃ¥ grund av trolig dataförstörelse, kan 'dar' inte avgöra det " "rätta storleken pÃ¥ delsäkerhetskopian i detta arkiv. För senare arkiv är " "denna information dubblerad i varje delsäkerhetskopia, vill du försöka att " "öppna en annan delsäkerhetskopia för att fÃ¥ tag pÃ¥ detta värde om det finns?" #: src/libdar/sar.cpp:805 msgid "LAX MODE: Please provide the slice number to read: " msgstr "LAX MODE: Skriv in numret pÃ¥ den delsäkerhetskopia som skall läsas: " #: src/libdar/sar.cpp:813 msgid "LAX MODE: Please provide an strictly positive integer number" msgstr "LAX MODE: Skriv in ett positivt heltal" #: src/libdar/sar.cpp:819 #, c-format msgid "LAX MODE: opening slice %i to read its slice header" msgstr "" "LAX MODE: öppnar delsäkerhetskopia %i för att läsa dess " "delsäkerhetskopiehuvud" #: src/libdar/sar.cpp:821 #, c-format msgid "LAX MODE: closing slice %i, header properly fetched" msgstr "LAX MODE: stänger delsäkerhetskopia %i, huvud hämtat korrekt" #: src/libdar/sar.cpp:827 msgid "" "LAX MODE: In spite of a the absence of a known slice size, continuing anyway" msgstr "" "LAX MODE: Trots frÃ¥nvaron av en känd delsäkerhetskopiestorlek, fortsätter " "hur som helst" #: src/libdar/sar.cpp:834 msgid "Incoherent slice header: First slice size too small" msgstr "" "Felaktig delsäkerhetskopiehuvud: första delsäkerhetskopiestorlek för liten" #: src/libdar/sar.cpp:836 msgid "incoherent slice header: Slice size too small" msgstr "Felaktig delsäkerhetskopiehuvud: delsäkerhetskopiestorlek för liten" #: src/libdar/sar.cpp:842 #, fuzzy, c-format msgid "Error opening %S : " msgstr "Fel vid öppning av %s : " #: src/libdar/sar.cpp:842 msgid " . Retry ?" msgstr " : Försöka igen ?" #: src/libdar/sar.cpp:853 msgid " is a slice from another backup, please provide the correct slice." msgstr " är en del frÃ¥n en annan säkerhetskopia, hämta den rätta delen." #: src/libdar/sar.cpp:858 msgid "" "LAX MODE: internal name of the slice leads dar to consider it is not member " "of the same archive. Assuming data corruption occurred and relying on the " "filename of this slice as proof of its membership to the archive" msgstr "" "LAX MODE: det interna namnet pÃ¥ delsäkerhetskopian leder 'dar' till att " "besluta att den inte är frÃ¥n samma arkiv. Anar dataförstörelse och litande " "pÃ¥ filnamnet pÃ¥ denna delsäkerhetskopia som bevis pÃ¥ dess medlemskap i " "arkivet" #: src/libdar/sar.cpp:886 msgid "" "Data corruption met at end of slice, forbidden flag found at this position" msgstr "" "Data förstört vid slutet pÃ¥ delsäkerhetskopian, förbjuden flagga hittad i " "denna position" #: src/libdar/sar.cpp:892 msgid "Data corruption met at end of slice, unknown flag found" msgstr "" "Data förstört vid slutet pÃ¥ delsäkerhetskopian, förbjuden flagga hittad" #: src/libdar/sar.cpp:912 #, c-format msgid "" "Two different slices (%i and %i) are marked as the last slice of the backup!" msgstr "" "TvÃ¥ olika delsäkerhetskopior (%i och %i) är markerade som den sista " "delsäkerhetskopian i denna säkerhetskopia!" #: src/libdar/sar.cpp:915 #, c-format msgid "" "LAX MODE: slices %i and %i are both recorded as last slice of the archive, " "keeping the higher number as the real last slice" msgstr "" "LAX MODE: delsäkerhetskopior %i och %i är bägge noterade som den sista " "delsäkerhetskopian, behÃ¥ller det högre talet som den verkligt sista " "delsäkerhetskopian" #: src/libdar/sar.cpp:938 #, fuzzy, c-format msgid "Slice %S has an unknown flag (neither terminal nor non_terminal file)." msgstr "" "Delsäkerhetskopia %s har en okänd flagga (varken terminal eller icke-" "terminal fil)." #: src/libdar/sar.cpp:948 #, fuzzy, c-format msgid "" "Due to data corruption, it is not possible to know if slice %S is the last " "slice of the archive or not. I need your help to figure out this. At the " "following prompt please answer either one of the following words: \"last\" " "or \"notlast\" according to the nature of this slice (you can also answer " "with \"abort\" to abort the program immediately): " msgstr "" "PÃ¥ grind av dataförstörelse, är det omöjligt att veta om delsäkerhetskopia " "%s är den sista i arkivet eller ej. behöver hjälp för att ta reda pÃ¥ detta. " "Vid följande frÃ¥ga svara med ett av följande ord: \"last\" eller \"notlast\" " "enligt egenskapen för denna delsäkerhetskopia. Du kan även svara \"abort\" " "för att avsluta programmet direkt: " #: src/libdar/sar.cpp:950 src/libdar/sar.cpp:954 msgid "last" msgstr "sista" #: src/libdar/sar.cpp:950 msgid "notlast" msgstr "ej sista" #: src/libdar/sar.cpp:950 src/libdar/sar.cpp:952 msgid "abort" msgstr "avbryt" #: src/libdar/sar.cpp:966 msgid "" "LAX MODE: Slice flag corrupted, but a slice of higher number has been seen, " "thus the header flag was surely not indicating this slice as the last of the " "archive. Continuing" msgstr "" "LAX MODE: delsäkerhetskopieflagga förstörd, men en delsäkerhetskopia med " "högre nummer har setts, sÃ¥lunda indikerade denna delsäkerhetskopieflagga " "inte denna delsäkerhetskopia som den sista i arkivet. Fortsätter" #: src/libdar/sar.cpp:1010 #, c-format msgid "failed openning slice %S: %S. Will try to erase it first, if allowed" msgstr "" #: src/libdar/sar.cpp:1076 src/libdar/sar.cpp:1147 src/libdar/sar.cpp:1158 #: src/libdar/trivial_sar.cpp:224 src/libdar/trivial_sar.cpp:242 #, fuzzy, c-format msgid "Failed creating slice %S: " msgstr "Misslyckades med att notera hash: " #: src/libdar/sar.cpp:1097 msgid "file exists, and DONT_ERASE option is set." msgstr "fil finns, och 'DONT_ERASE'-alternativ är inställt." #: src/libdar/sar.cpp:1102 msgid " is about to be overwritten." msgstr " skall till att skrivas över." #: src/libdar/sar.cpp:1182 msgid "" "First slice size is too small to even just be able to drop the slice header" msgstr "" "Första delsäkerhetskopiestorlek är för liten för att till och med kunna pÃ¥ " "plats med delsäkerhetskopiehuvudet" #: src/libdar/sar.cpp:1184 msgid "Slice size is too small to even just be able to drop the slice header" msgstr "" "Delsäkerhetskopiestorlek är för liten för att till och med kunna pÃ¥ plats " "med delsäkerhetskopiehuvudet" #: src/libdar/sar.cpp:1247 msgid "Finished writing to file " msgstr "Klar med skrivning till fil " #: src/libdar/sar.cpp:1247 msgid ", ready to continue ? " msgstr ", beredd att fortsätta ? " #: src/libdar/sar.cpp:1252 msgid "" "If you really want to abort the archive creation hit CTRL-C, then press " "enter." msgstr "Om du verkligen vill stoppa dar, tryck CTLR-C och sedan vagnretur " #: src/libdar/sar.cpp:1284 msgid "file not open" msgstr "fil ej öppen" #: src/libdar/sar.cpp:1318 msgid "The last file of the set is not present in " msgstr "Den sista filen i omgÃ¥ngen finns inte i " #: src/libdar/sar.cpp:1318 msgid " , please provide it." msgstr " . hämta den." #: src/libdar/sar.cpp:1332 #, c-format msgid "" "No backup file is present in %S for archive %S, please provide the last file " "of the set." msgstr "" "Ingen säkerhetskopiefil finns i %S för arkiv %S, ge de n sista filen i " "mängden" #: src/libdar/gf_mode.cpp:43 msgid "read only" msgstr "enbart läsning" #: src/libdar/gf_mode.cpp:46 msgid "write only" msgstr "enbart skrivning" #: src/libdar/gf_mode.cpp:49 msgid "read and write" msgstr "läs och skriv" #: src/libdar/int_tools.cpp:66 msgid "a binary digit is either 0 or 1" msgstr "en binär siffra är antingen 0 eller 1" #: src/libdar/erreurs.cpp:123 #, c-format msgid "File %S line %d" msgstr "Fil %S rad %d" #: src/libdar/erreurs.cpp:123 msgid "it seems to be a bug here" msgstr "det verkar vara en bugg här" #: src/libdar/erreurs.cpp:152 #, c-format msgid "in file %S line %S" msgstr "i fil %S rad %S" #: src/libdar/erreurs.cpp:171 msgid "# UNEXPECTED EXCEPTION, #" msgstr "# OVÄNTAT AVBROTT, #" #: src/libdar/erreurs.cpp:172 src/libdar/erreurs.cpp:185 msgid "# E X I T I N G ! #" msgstr "# A V B B Y T E R ! #" #: src/libdar/erreurs.cpp:175 src/libdar/erreurs.cpp:188 msgid "" " THANKS TO REPORT THE PREVIOUS OUTPUT TO MAINTAINER\n" " GIVING A DESCRIPTION OF THE CIRCUMSTANCES." msgstr "" " TACKSAM FÖR RAPPORT OM FÖREGÃ…ENDE UTMATNING TILL UNDERHÃ…LLAREN\n" " MED EN BESKRIVNING AV OMSTÄNDIGHETERNA." #: src/libdar/erreurs.cpp:176 msgid "" " IF POSSIBLE TRY TO REPRODUCE THIS ERROR, A\n" " SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH\n" " IN SOLVING THIS PROBLEM. THANKS" msgstr "" " OM MÖJLIGT FÖRSÖK ATT Ã…TERSKAPA DETTA FEL,\n" "ETT SCENARIO SOM KAN Ã…TERSKAPA DET ÄR TILL STOR HJÄLP\n" " VID PROBLEMSLÖSNINGEN TACK" #: src/libdar/erreurs.cpp:184 msgid "# NOT CAUGHT EXCEPTION, #" msgstr "# ICKE FÃ…NGAT AVBROTT, #" #: src/libdar/erreurs.cpp:189 msgid "" " IF POSSIBLE TRY TO PRODUCE THIS ERROR, A\n" " SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH\n" " IN SOLVING THIS PROBLEM. THANKS" msgstr "" " OM MÖJLIGT FÖRSÖK ATT SKAPA DETTA FEL,\n" "ETT SCENARIO SOM KAN Ã…TERSKAPA DET ÄR TILL STOR HJÄLP\n" " VID PROBLEMSLÖSNINGEN TACK" #: src/libdar/generic_rsync.cpp:210 #, fuzzy msgid "Error met building the rsync hash table: " msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/generic_rsync.cpp:487 #, fuzzy msgid "Error met while feeding data to librsync: " msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/generic_rsync.cpp:509 #, fuzzy msgid "Error releasing librsync job: " msgstr "Fel vid läsning av EA för " #: src/libdar/tools.hpp:697 #, c-format msgid "Found negative date (%s) for inode %s ." msgstr "" #: src/libdar/tools.hpp:701 #, c-format msgid "%S Can we read it as if it was zero (1st January 1970 at 00:00:00 UTC)?" msgstr "" #: src/libdar/tools.hpp:704 msgid "Considering date as if it was zero (Jan 1970)" msgstr "" #: src/libdar/entree_stats.cpp:140 #, fuzzy, c-format msgid "CATALOGUE CONTENTS :" msgstr "" "\n" "KATALOGINNEHÃ…LL :\n" "\n" #: src/libdar/entree_stats.cpp:142 #, fuzzy, c-format msgid "total number of inode : %i" msgstr "totalt antal inoder : %i\n" #: src/libdar/entree_stats.cpp:143 #, fuzzy, c-format msgid "fully saved : %i" msgstr "sparad inode : %i\n" #: src/libdar/entree_stats.cpp:144 #, fuzzy, c-format msgid "binay delta patch : %i" msgstr "" "\n" "dar sökväg : %S\n" #: src/libdar/entree_stats.cpp:145 #, c-format msgid "inode metadata only : %i" msgstr "" #: src/libdar/entree_stats.cpp:146 #, fuzzy, c-format msgid "distribution of inode(s)" msgstr "fördelning av inoder\n" #: src/libdar/entree_stats.cpp:147 #, fuzzy, c-format msgid " - directories : %i" msgstr " - mappar : %i\n" #: src/libdar/entree_stats.cpp:148 #, fuzzy, c-format msgid " - plain files : %i" msgstr " - enkla filer : %i\n" #: src/libdar/entree_stats.cpp:149 #, fuzzy, c-format msgid " - symbolic links : %i" msgstr " - symboliska länkar :%i\n" #: src/libdar/entree_stats.cpp:150 #, fuzzy, c-format msgid " - named pipes : %i" msgstr " - namngivna rör : %i\n" #: src/libdar/entree_stats.cpp:151 #, fuzzy, c-format msgid " - unix sockets : %i" msgstr " - unix socklar : %i\n" #: src/libdar/entree_stats.cpp:152 #, fuzzy, c-format msgid " - character devices : %i" msgstr " - teckenenheter : %i\n" #: src/libdar/entree_stats.cpp:153 #, fuzzy, c-format msgid " - block devices : %i" msgstr " - blockenheter : %i\n" #: src/libdar/entree_stats.cpp:154 #, fuzzy, c-format msgid " - Door entries : %i" msgstr " - 'Door entries' : %i\n" #: src/libdar/entree_stats.cpp:155 #, fuzzy, c-format msgid "hard links information" msgstr "information om hÃ¥rda länkar\n" #: src/libdar/entree_stats.cpp:156 #, fuzzy, c-format msgid " - number of inode with hard link : %i" msgstr " - antal inoder med hÃ¥rd länk : %i\n" #: src/libdar/entree_stats.cpp:157 #, fuzzy, c-format msgid " - number of reference to hard linked inodes: %i" msgstr " - antal referenser till hÃ¥rdlänkade inoder: %i\n" #: src/libdar/entree_stats.cpp:158 #, fuzzy, c-format msgid "destroyed entries information" msgstr "information om förstörda poster\n" #: src/libdar/entree_stats.cpp:159 #, fuzzy, c-format msgid " %i file(s) have been record as destroyed since backup of reference" msgstr "" " %i filer har noterats som förstörda sedan referenssäkerhetskopian\n" "\n" #: src/libdar/wrapperlib.cpp:468 msgid "compressReset called but compressInit never called before" msgstr "'compressReset' anropad men 'compressInit' anropades inte först" #: src/libdar/fichier_libcurl.cpp:75 #, fuzzy, c-format msgid "Error met while resetting URL to handle: %s" msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/fichier_libcurl.cpp:84 src/libdar/fichier_libcurl.cpp:619 #: src/libdar/fichier_libcurl.cpp:676 #, fuzzy, c-format msgid "Error met while setting libcurl for reading data file: %s" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/fichier_libcurl.cpp:91 src/libdar/fichier_libcurl.cpp:96 #: src/libdar/fichier_libcurl.cpp:646 src/libdar/fichier_libcurl.cpp:683 #, fuzzy, c-format msgid "Error met while setting libcurl for writing data file: %s" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/fichier_libcurl.cpp:139 src/libdar/fichier_libcurl.cpp:148 #, c-format msgid "%s: %s" msgstr "" #: src/libdar/fichier_libcurl.cpp:184 src/libdar/fichier_libcurl.cpp:192 #: src/libdar/fichier_libcurl.cpp:203 #, fuzzy, c-format msgid "Error met while fetching file size: %s" msgstr "Fel vid tilldelning av en fils egenskaper:%s" #: src/libdar/fichier_libcurl.cpp:259 msgid "libcurl does not allow skipping in write mode" msgstr "" #: src/libdar/fichier_libcurl.cpp:491 #, fuzzy msgid "Error met during network transfer: " msgstr "Fel vid räddning av EA-lista för %s : %s" #: src/libdar/fichier_libcurl.cpp:514 #, fuzzy, c-format msgid "Error met while reading a block of data: %s" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/fichier_libcurl.cpp:590 src/libdar/fichier_libcurl.cpp:598 #: src/libdar/fichier_libcurl.cpp:636 src/libdar/fichier_libcurl.cpp:776 #, fuzzy, c-format msgid "Error while seeking in file on remote repository: %s" msgstr "Fel vid tilldelning av en fils egenskaper:%s" #: src/libdar/fichier_libcurl.cpp:631 msgid "" "Integer too large for libcurl, cannot skip at the requested offset in the " "remote repository" msgstr "" #: src/libdar/fichier_libcurl.cpp:654 #, fuzzy, c-format msgid "Error while setting write append mode for libcurl: %s" msgstr "Fel vid läsning av användarsvar frÃ¥n terminal: " #: src/libdar/fichier_libcurl.cpp:934 src/libdar/fichier_libcurl.cpp:940 #, c-format msgid "%S: %s, aborting" msgstr "" #: src/libdar/fichier_libcurl.cpp:955 #, c-format msgid "%S: %s, retrying in %d seconds" msgstr "" #: src/libdar/fichier_libcurl.cpp:962 #, c-format msgid "%S: %s, do we retry network operation?" msgstr "" #: src/libdar/macro_tools.cpp:165 msgid "Locating archive contents..." msgstr "Letar efter arkivinnehÃ¥ll..." #: src/libdar/macro_tools.cpp:177 msgid "Reading archive contents..." msgstr "Läser arkivinnehÃ¥ll..." #: src/libdar/macro_tools.cpp:213 msgid "Missing catalogue in file." msgstr "Katalog saknas i fil." #: src/libdar/macro_tools.cpp:312 msgid "" "LAX MODE: catalogue computed hash does not match the signed hash of the " "archive, ignoring" msgstr "" #: src/libdar/macro_tools.cpp:314 msgid "" "Catalogue computed hash does not match the signed hash of the archive, " "archive has been modified since it was signed!" msgstr "" #: src/libdar/macro_tools.cpp:343 msgid "Cannot open catalogue: " msgstr "Kan ej öppna katalog: " #: src/libdar/macro_tools.cpp:426 msgid "Opening standard input to read the archive..." msgstr "Öppnar standard-input för att läsa arkivet..." #: src/libdar/macro_tools.cpp:434 #, c-format msgid "Opening named pipe %S as input to read the archive..." msgstr "Öppnar namngiven pipe %S som input för att läsa arkivet..." #: src/libdar/macro_tools.cpp:447 #, c-format msgid "" "Opening a pair of pipes to read the archive, expecting dar_slave at the " "other ends..." msgstr "" "Öppnar ett par pipes för att läsa arkivet, förväntar mig dar_slave i andra " "ändan..." #: src/libdar/macro_tools.cpp:477 msgid "Opening the archive using the multi-slice abstraction layer..." msgstr "Öppnar arkivet genom att använda abstraktionsnivÃ¥ med 'multi-slice'..." #: src/libdar/macro_tools.cpp:511 msgid "Reading the archive header..." msgstr "Läser arkivhuvud..." #: src/libdar/macro_tools.cpp:513 #, fuzzy msgid "Reading the archive trailer..." msgstr "Läser arkivhuvud..." #: src/libdar/macro_tools.cpp:532 #, c-format msgid "" "Error while reading archive's header, this may be because this archive is an " "old encrypted archive or that data corruption took place, Assuming it is an " "old archive, we have to read the header at the beginning of the first " "slice..." msgstr "" "Fel vid läsning av arkivhuvud. Detta kan bero pÃ¥ att arkivet är ett gammalt " "krypterat arkiv eller att data har förstörts. Under antagande att det är ett " "gammalt arkiv, mÃ¥ste huvudet läsas frÃ¥n början av den första delen..." #: src/libdar/macro_tools.cpp:543 #, fuzzy msgid "" "Found a correct archive header at the beginning of the archive, which does " "not stands to be an old archive, the end of the archive is corrupted and " "thus the catalogue is not readable, aborting. Either retry providing in " "addition an isolated catalogue of that archive to perform the operation, or " "try reading the archive in sequential mode or try in lax mode or, last " "chance, try both lax and sequential read mode at the same time" msgstr "" "Hittade en felaktigt arkivhuvud i början av arkivet, vilket inte bevisar att " "det är ett gammalt arkiv. Slutet pÃ¥ arkivet är sÃ¥ledes förstört. Du mÃ¥ste " "använda sekvensiell läsningsteknik för att ha en chans att använda detta " "förstörda arkiv." #: src/libdar/macro_tools.cpp:545 #, fuzzy msgid "" "Found a correct archive header at the beginning of the archive, which does " "not stands to be an old archive, the end of the archive is thus corrupted. " "Without external catalogue provided and as we do not read the archive in " "sequential mode, there is very little chance to retreive something from this " "corrupted archive. Do we continue anyway ?" msgstr "" "Hittade en felaktigt arkivhuvud i början av arkivet, vilket inte bevisar att " "det är ett gammalt arkiv. Slutet pÃ¥ arkivet är sÃ¥ledes förstört. Du mÃ¥ste " "använda sekvensiell läsningsteknik för att ha en chans att använda detta " "förstörda arkiv." #: src/libdar/macro_tools.cpp:556 #, c-format msgid "Opening construction layer..." msgstr "Öppnar konstruktionsnivÃ¥..." #: src/libdar/macro_tools.cpp:573 msgid "Considering cyphering layer..." msgstr "Funderar pÃ¥ chiffreringsnivÃ¥..." #: src/libdar/macro_tools.cpp:582 msgid "" "LAX MODE: Archive seems to be ciphered, but you did not have provided any " "encryption algorithm, assuming data corruption and considering that the " "archive is not ciphered" msgstr "" "LAX MODE: Arkivet verkar vara chiffrerat, men du har ej angett nÃ¥gon " "chifferalgoritm, antar dataförstörelse och betraktar arkivet som ej " "chiffrerat" #: src/libdar/macro_tools.cpp:585 #, c-format msgid "" "The archive %S is encrypted and no encryption cipher has been given, cannot " "open archive." msgstr "" "Arkivet %S är krypterat och ingen krypteringschiffer har getts, kan ej öppna " "arkivet." #: src/libdar/macro_tools.cpp:616 msgid "" "WARNING: support for secure memory was not available at compilation time, in " "case of heavy memory load, this may lead the password you are about to " "provide to be wrote to disk (swap space) in clear. You have been warned!" msgstr "" "VARNING: stöd för säkert minne var ej tillgängligt vid " "kompileringstillfället. Vid stor minnesbelastning kan detta leda till att " "det lösenord du skall till och lämna kan skrivas till disk (växlingsutrymme) " "i klartext. Du har härmed varnats!" #: src/libdar/macro_tools.cpp:617 src/libdar/macro_tools.cpp:1359 #, c-format msgid "Archive %S requires a password: " msgstr "Arkiv %S kräver ett lösenord: " #: src/libdar/macro_tools.cpp:626 #, fuzzy msgid "No cyphering layer opened, adding cache layer for better performance" msgstr "Öppnar \"cache\"-nivÃ¥n för bättre prestanda..." #: src/libdar/macro_tools.cpp:632 msgid "" "Failed opening the cache layer, lack of memory, archive read performances " "will not be optimized" msgstr "" "Misslyckades med att öppna \"cache\"-nivÃ¥n, minnesbrist, arkivläsförmÃ¥ga " "blir ej optimal" #: src/libdar/macro_tools.cpp:637 msgid "No cyphering layer opened" msgstr "Ingen chiffreringslager öppnat..." #: src/libdar/macro_tools.cpp:646 src/libdar/macro_tools.cpp:696 msgid "Opening cyphering layer..." msgstr "Öppnar chiffreringslager..." #: src/libdar/macro_tools.cpp:703 msgid "Unknown encryption algorithm" msgstr "Okänd krypteringsalgoritm" #: src/libdar/macro_tools.cpp:723 src/libdar/macro_tools.cpp:1481 msgid "Creating a new thread to run the previously created layers..." msgstr "" #: src/libdar/macro_tools.cpp:743 msgid "Opening escape sequence abstraction layer..." msgstr "Öppnar escape-sekvensabstraktionslagret" #: src/libdar/macro_tools.cpp:748 msgid "" "LAX MODE: Archive is flagged as having escape sequence (which is normal in " "recent archive versions). However if this is not expected, shall I assume a " "data corruption occurred in this field and that this flag should be ignored? " "(If unsure, refuse)" msgstr "" "LAX MODE: Arkiv är flaggat som om det har 'escape sekvens " "markeringar' (vilket är normalt i tidigare arkivversioner). Om detta inte är " "förväntat, skall jag antaga att data förstörts i detta fält och att denna " "flagga skall ignoreras? (Om osäker, vägra)" #: src/libdar/macro_tools.cpp:763 msgid "" "Sequential read asked, but this archive is flagged to not have the necessary " "embedded escape sequences for that operation, aborting" msgstr "" "Sekvensiell läsning begärd, men detta arkiv är flaggat för att inte ha " "nödvändiga escepsekvenser för denna Ã¥tgärd, avbryter" #: src/libdar/macro_tools.cpp:768 msgid "" "LAX MODE: the requested sequential read mode relies on escape sequence which " "seem to be absent from this archive. Assuming data corruption occurred. " "However, if no data corruption occurred and thus no escape sequence are " "present in this archive, do not use sequential reading mode to explore this " "archive else you will just get nothing usable from it" msgstr "" "LAX MODE: en begärda läget sekvensiell läsning bygger pÃ¥ escape-sekvenser " "vilket värkar saknas i detta arkiv. Antar att data förstörts. Men om inga " "data förstörts och ändÃ¥ ingen escape-sekvens finns i arkivet, använd inte " "läget sekvensiell läsning för att undersöka detta arkiv du fÃ¥r inget " "matnyttigt av det" #: src/libdar/macro_tools.cpp:774 #, fuzzy msgid "" "LAX MODE: Archive is flagged to not have escape sequence which is not the " "case by default since archive format 8 (release 2.4.x). If corruption " "occurred and an escape sequence is present, this may lead data restoration " "to fail, answering no at this question will let me consider that an escape " "sequence layer has to be added in spite of the archive flags. Do you want to " "continue as suggested by the archive flag, thus without escape sequence " "layer?" msgstr "" "LAX MODE: Arkiv är flaggat att inte ha escape-sekvenser. Om data förstörts " "och en escape-sekvens finns, kan detta leda till att Ã¥terställningen " "misslyckas, svara \"no\" pÃ¥ denna frÃ¥ga kan ge mig möjligheten att lägga " "till en escape-sekvens-nivÃ¥ trots arkivflaggan. Vill du fortsätta som " "förslagits av arkivflaggan, sÃ¥lunda utan escape-sekvens-nivÃ¥?" #: src/libdar/macro_tools.cpp:805 src/libdar/macro_tools.cpp:1527 msgid "Creating a new thread to run the escape layer..." msgstr "" #: src/libdar/macro_tools.cpp:823 msgid "" "Opening the compression abstraction layer (compression algorithm used is " "none)..." msgstr "" "Öppnar kompressionsabstraktionslagret ( ingen kompressionsalgoritm " "används)..." #: src/libdar/macro_tools.cpp:825 msgid "Opening the compression layer..." msgstr "Öppnar kompressionslagret..." #: src/libdar/macro_tools.cpp:849 src/libdar/macro_tools.cpp:1559 #, fuzzy msgid "Creating a new thread to run the compression layer..." msgstr "Öppnar kompressionslagret..." #: src/libdar/macro_tools.cpp:863 src/libdar/macro_tools.cpp:1569 msgid "All layers have been created successfully" msgstr "Alla lager har skapats korrekt" #: src/libdar/macro_tools.cpp:866 #, fuzzy, c-format msgid "" "Warning, the archive %S has been encrypted. A wrong key is not possible to " "detect, it would cause DAR to report the archive as corrupted" msgstr "" "Varning, arkivet %S har krypterats. En felaktig nyckel är omöjlig att " "detektera, det skulle leda till att DAR rapporterar att arkivet var " "förstört\n" #: src/libdar/macro_tools.cpp:905 msgid "" "LAX MODE: The catalogue (table of contents) usually takes a few percents of " "the archive at its end, which percentage do you want me to scan (answer by " "an *integer* number between 0 and 100)? " msgstr "" "LAX MODE: katalogen (InnehÃ¥llsförteckning) tar normalt upp nÃ¥gra procent av " "arkivet i dess slut. Hur stor del procentuellt, vill du att jag skall avsöka " "(svara med ett heltal mellan 0 och 100)?" #: src/libdar/macro_tools.cpp:911 #, c-format msgid "LAX MODE: %i is not a valid percent value" msgstr "LAX MODE: %i är ej giltigt procentvärde" #: src/libdar/macro_tools.cpp:915 #, c-format msgid "%S is not a valid number" msgstr "%S är inte ett giltigt tal" #: src/libdar/macro_tools.cpp:921 #, c-format msgid "" "LAX MODE: Beginning search of the catalogue (from the end toward the " "beginning of the archive, on %i %% of its length), this may take a while..." msgstr "" "LAX MODE: Börjar med att avsöka katalogen (frÃ¥n slutet mot början av arkivet " "%i %% av arkivet), detta kan ta ett bra tag..." #: src/libdar/macro_tools.cpp:930 msgid "" "LAX MODE: Cannot skip at the end of the archive! Using current position to " "start the catalogue search" msgstr "" "LAX MODE: Kan ej hoppa över i slutet av arkivet! Amvänder aktuell position " "till att börja katalogsökningen" #: src/libdar/macro_tools.cpp:935 msgid "LAX MODE: Failed to read the catalogue (no data to inspect)" msgstr "" "LAX MODE: Misslyckades med att läsa katalogen (inga data att inspektera)" #: src/libdar/macro_tools.cpp:938 msgid "" "LAX MODE: Failed to read the catalogue (0 bytes of the archive length asked " "to look for the catalogue)" msgstr "" "LAX MODE: Misslyckades med att läsa katalogen (0 byte hos arkivlängden " "medförde att leta efter katalogen)" #: src/libdar/macro_tools.cpp:949 msgid "" "LAX MODE: Escape sequence seems present in this archive. I have thus two " "different methods, either I look for the escape sequence indicating the " "start of the catalogue or I try each position in turn in the hope it will " "not be data that look like a catalogue" msgstr "" "LAX MODE: escape-sekvenser verkar finnas i detta rakiv. Jag har dÃ¥ tvÃ¥ olika " "metoder, antingen letar jag efter escape-sekvenser visande pÃ¥ en " "katalogbörjan eller sÃ¥ försöker jag vid varje position i ordning i hopp om " "att det inte är data, som ser ut som en katalog." #: src/libdar/macro_tools.cpp:952 msgid "LAX MODE: Trying to locate the escape sequence (safer choice) ?" msgstr "LAX MODE: Försöker hitta escape-sekvensen (säkrare val) ?" #: src/libdar/macro_tools.cpp:957 msgid "" "LAX MODE: Good point! I could find the escape sequence marking the beginning " "of the catalogue, now trying to read it..." msgstr "" "LAX MODE: Bra ide! kunde hitta escape-sekvensen markerande katalogbörjan, " "försöker nu läsa den..." #: src/libdar/macro_tools.cpp:966 msgid "" "LAX MODE: Escape sequence could not be found, it may have been corrupted or " "out of the scanned portion of the archive, trying to find the catalogue the " "other way" msgstr "" "LAX MODE: Escape-sekvens kunde inte hittas, den kan ha blivit förstörd or " "utanför avsökt del av arkivet, försöker hitta katalogen den andra vägen..." #: src/libdar/macro_tools.cpp:989 #, c-format msgid "LAX MODE: %i %% remaining" msgstr "LAX MODE: %i %% Ã¥terstÃ¥r" #: src/libdar/macro_tools.cpp:1001 #, c-format msgid "" "Could read a catalogue data structure at offset %i, it contains the " "following:" msgstr "" "Kunde inte läsa en katalogdatastruktur med offset %i, det innehÃ¥ller " "följande:" #: src/libdar/macro_tools.cpp:1003 #, fuzzy msgid "Do you want to use it for the operation?" msgstr "Vill du använda det för Ã¥terhämtning?" #: src/libdar/macro_tools.cpp:1035 msgid "" "LAX MODE: Reached the end of the area to scan, FAILED to find any catalogue" msgstr "" "LAX MODE: NÃ¥dde slutet pÃ¥ avsökt omrÃ¥de, misslyckades med att hitta nÃ¥gon " "katalog" #: src/libdar/macro_tools.cpp:1045 msgid "LAX MODE: Failed to read the catalogue" msgstr "LAX MODE: Misslyckades med att läsa katalogen" #: src/libdar/macro_tools.cpp:1130 msgid "" "Creating low layer: Writing archive into a black hole object (equivalent to /" "dev/null)..." msgstr "" #: src/libdar/macro_tools.cpp:1139 msgid "Creating low layer: Writing archive into standard output object..." msgstr "" #: src/libdar/macro_tools.cpp:1152 msgid "Creating low layer: Writing archive into a plain file object..." msgstr "" #: src/libdar/macro_tools.cpp:1173 msgid "" "Creating low layer: Writing archive into a sar object (Segmentation and " "Reassembly) for slicing..." msgstr "" #: src/libdar/macro_tools.cpp:1211 msgid "Adding cache layer over pipe to provide limited skippability..." msgstr "" #: src/libdar/macro_tools.cpp:1241 #, fuzzy msgid "" "WARNING: support for secure memory was not available at compilation time, in " "case of heavy memory load, this may lead the password/passphrase provided to " "be wrote to disk (swap space) in clear. You have been warned!" msgstr "" "VARNING: stöd för säkert minne var ej tillgängligt vid " "kompileringstillfället. Vid stor minnesbelastning kan detta leda till att " "det lösenord du skall till och lämna kan skrivas till disk (växlingsutrymme) " "i klartext. Du har härmed varnats!" #: src/libdar/macro_tools.cpp:1256 msgid "Generating random key for symmetric encryption..." msgstr "" #: src/libdar/macro_tools.cpp:1264 msgid "" "Scrambling is a very weak encryption algorithm, this is a non-sens to use " "with asymmetric encryption" msgstr "" #: src/libdar/macro_tools.cpp:1304 #, c-format msgid "" "For your information, this is the iteration %d for which the randomly " "generated key is reported to be weak by libgcrypt, continuing generating " "another random key... patience" msgstr "" #: src/libdar/macro_tools.cpp:1308 #, c-format msgid "" "... A strong randomly generated key could be found after %d iteration(s)" msgstr "" #: src/libdar/macro_tools.cpp:1315 msgid "Key generated" msgstr "" #: src/libdar/macro_tools.cpp:1360 msgid "Please confirm your password: " msgstr "Bekräfta lösenordet: " #: src/libdar/macro_tools.cpp:1364 msgid "The two passwords are not identical. Aborting" msgstr "De tvÃ¥ lösenorden är olika. Avbryter" #: src/libdar/macro_tools.cpp:1406 #, fuzzy msgid "Writing down the archive header..." msgstr "Läser arkivhuvud..." #: src/libdar/macro_tools.cpp:1422 msgid "Adding a new layer on top: scrambler object..." msgstr "" #: src/libdar/macro_tools.cpp:1434 msgid "Adding a new layer on top: Strong encryption object..." msgstr "" #: src/libdar/macro_tools.cpp:1454 #, fuzzy msgid "Adding a new layer on top: Caching layer for better performances..." msgstr "Öppnar \"cache\"-nivÃ¥n för bättre prestanda..." #: src/libdar/macro_tools.cpp:1499 msgid "Writing down the initial elastic buffer through the encryption layer..." msgstr "" #: src/libdar/macro_tools.cpp:1511 msgid "Adding a new layer on top: Escape layer to allow sequential reading..." msgstr "" #: src/libdar/macro_tools.cpp:1541 msgid "Adding a new layer on top: compression..." msgstr "" #: src/libdar/macro_tools.cpp:1583 #, fuzzy msgid "Error creating archive layers: " msgstr "Fel vid läsning av tecken: " #: src/libdar/macro_tools.cpp:1692 #, fuzzy msgid "Writing down archive contents..." msgstr "Skriver arkivinnehÃ¥ll..." #: src/libdar/macro_tools.cpp:1716 msgid "Calculating the signature of the catalogue hash..." msgstr "" #: src/libdar/macro_tools.cpp:1732 #, fuzzy msgid "Writing down the signed hash of the catalogue..." msgstr "Uppdaterar databas med katalog..." #: src/libdar/macro_tools.cpp:1782 #, fuzzy msgid "Closing the compression layer..." msgstr "Öppnar kompressionslagret..." #: src/libdar/macro_tools.cpp:1797 #, fuzzy msgid "Closing the escape layer..." msgstr "Öppnar kompressionslagret..." #: src/libdar/macro_tools.cpp:1817 #, fuzzy msgid "Writing down the first archive terminator..." msgstr "Skriver arkivinnehÃ¥ll..." #: src/libdar/macro_tools.cpp:1823 msgid "writing down the final elastic buffer through the encryption layer..." msgstr "" #: src/libdar/macro_tools.cpp:1862 #, fuzzy msgid "Closing the encryption layer..." msgstr "Öppnar kompressionslagret..." #: src/libdar/macro_tools.cpp:1884 #, fuzzy msgid "Writing down archive trailer..." msgstr "Skriver arkivinnehÃ¥ll..." #: src/libdar/macro_tools.cpp:1889 #, fuzzy msgid "Writing down the second archive terminator..." msgstr "Skriver arkivinnehÃ¥ll..." #: src/libdar/macro_tools.cpp:1900 #, fuzzy msgid "Closing archive low layer..." msgstr "Funderar pÃ¥ chiffreringsnivÃ¥..." #: src/libdar/macro_tools.cpp:1917 #, fuzzy msgid "Archive is closed." msgstr "Arkivet bestÃ¥r av %i fil(er)\n" #: src/libdar/macro_tools.cpp:2084 msgid "" "The format version of the archive is too high for that software version, try " "reading anyway?" msgstr "" "Arkivets formatversion är för högt för denna programvaruversion, försöka " "läsa ändÃ¥?" #: src/libdar/fichier_local.cpp:152 msgid "Error getting size of file: " msgstr "Fel vid hämtning av filstorlek: " #: src/libdar/fichier_local.cpp:262 msgid "Error getting file reading position: " msgstr "Fel vid hämtning av fils läsposition: " #: src/libdar/fichier_local.cpp:294 src/libdar/fichier_local.cpp:296 msgid "Error while reading from file: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/libdar/fichier_local.cpp:345 src/libdar/fichier_local.cpp:351 msgid "Error while writing to file: " msgstr "Fel vid skrivning till fil: " #: src/libdar/fichier_local.cpp:411 src/libdar/archive_options.cpp:474 #: src/libdar/archive_options.cpp:1729 msgid "Furtive read mode" msgstr "Smygläsningsmode" #: src/libdar/fichier_local.cpp:429 msgid "" "No space left for inode, you have the opportunity to make some room now. " "When done : can we continue ?" msgstr "" "Inget utrymme kvar för inod, du har möjlighet att skapa en del plats nu. När " "det är gjort : kan vi fortsätta ?" #: src/libdar/fichier_local.cpp:440 msgid "Cannot open file : " msgstr "Kan ej öppna fil : " #: src/libdar/fichier_local.cpp:463 #, fuzzy, c-format msgid "Cannot dup() filedescriptor while copying \"fichier_local\" object: %s" msgstr "Kan ej dup() filbeskrivning vid kopiering av \"fichier\"-objekt: %s" #: src/libdar/erreurs.hpp:129 msgid "Lack of Memory" msgstr "Minnesbrist" #: src/libdar/erreurs.hpp:146 #, fuzzy msgid "Lack of Secured Memory" msgstr "Minnesbrist" #: src/libdar/erreurs.hpp:204 #, fuzzy msgid "" "Cannot handle such a too large integer. Use a full version of libdar " "(compiled to rely on the \"infinint\" integer type) to solve this problem" msgstr "" "kan ej hantera ett sÃ¥ stort heltal. Använd den kompletta versionen av dar-" "svitens program (kompileringsalternativ sätts till att 'using infinint' för " "att lösa detta problem" #: src/libdar/erreurs.hpp:385 msgid "Thread cancellation requested, aborting as soon as possible" msgstr "TrÃ¥d-avbrott begärt, avbryter sÃ¥ snart som möjligt" #: src/libdar/erreurs.hpp:385 msgid "Thread cancellation requested, aborting as properly as possible" msgstr "TrÃ¥d-avbrott begärt, avbryter sÃ¥ snart snyggt möjligt" #: src/libdar/generic_file_overlay_for_gpgme.cpp:55 #, fuzzy, c-format msgid "Error creating data buffer overlay for GPGME: %s" msgstr "Fel vid läsning av databas %S : " #: src/libdar/generic_file_overlay_for_gpgme.cpp:107 msgid "File offset too large to be stored in off_t type" msgstr "" #: src/libdar/fichier_libcurl.hpp:100 msgid "user/group ownership not supported for this repository" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:292 #, c-format msgid "" "Error creating hard link %s : %s\n" " Trying to duplicate the inode" msgstr "" "Fel vid skapande av hÃ¥rd länk %s : %s\n" " Försöker att duplicera inoden" #: src/libdar/filesystem_hard_link_write.cpp:306 #, c-format msgid "" "Error creating hard link : %s , the inode to link with [ %s ] has " "disappeared, re-creating it" msgstr "" "Fel vid skapande av hÃ¥rd länk %s , inoden till länk med [ %s ] har " "försvunnit, Ã¥terskapar den" #: src/libdar/filesystem_hard_link_write.cpp:313 #, c-format msgid "" "Error creating hard link : %s , the inode to link with [ %s ] is not " "present, cannot restore this hard link" msgstr "" "Fel vid skapande av hÃ¥rd länk %s , inoden till länk med [ %s ] finns inte, " "kan ej Ã¥terställa denna hÃ¥rda länk" #: src/libdar/filesystem_hard_link_write.cpp:385 msgid "Bad CRC, data corruption occurred" msgstr "Fel i CRC, data förstört" #: src/libdar/filesystem_hard_link_write.cpp:435 #, c-format msgid "" "error restoring Unix socket %s, path too long to be stored properly, socket " "will be created as %s instead, do you confirm?" msgstr "" #: src/libdar/filesystem_hard_link_write.cpp:437 msgid "Error creating Unix socket file: " msgstr "Fel vid skapande av UNIX sockelfil: " #: src/libdar/filesystem_hard_link_write.cpp:457 msgid "Could not create inode: " msgstr "Kunde ej skapa inod: " #: src/libdar/filesystem_hard_link_write.cpp:459 msgid "Cannot create inode: " msgstr "Kan ej skapa inod: " #: src/libdar/filesystem_hard_link_write.cpp:459 msgid " Ready to continue ?" msgstr " Klar att fortsätta ?" #: src/libdar/entrepot.cpp:69 #, fuzzy msgid "root's entrepot must be an absolute path: " msgstr "argumentet mÃ¥ste vara en relativ sökväg" #: src/libdar/entrepot.cpp:156 #, fuzzy msgid "Error met while creating the hash file: " msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/elastic.cpp:80 msgid "Zero is not a valid size for an elastic buffer" msgstr "Noll är inte en gÃ¥ngbar storlek för en elastisk buffert" #: src/libdar/elastic.cpp:82 msgid "Size too large for an elastic buffer" msgstr "För stor storlek för en elastisk buffert" #: src/libdar/elastic.cpp:99 src/libdar/elastic.cpp:105 #: src/libdar/elastic.cpp:135 src/libdar/elastic.cpp:141 #: src/libdar/elastic.cpp:157 src/libdar/elastic.cpp:165 #: src/libdar/elastic.cpp:194 src/libdar/elastic.cpp:202 #: src/libdar/elastic.cpp:212 msgid "elastic buffer incoherent structure" msgstr "osammanhängande struktur i elastisk buffert" #: src/libdar/elastic.cpp:131 src/libdar/elastic.cpp:190 msgid "too large elastic buffer or elastic buffer incoherent structure" msgstr "" "för stor elastisk buffert eller osammanhängande struktur i elastisk buffert" #: src/libdar/elastic.cpp:219 msgid "not enough space provided to dump the elastic buffer" msgstr "inte försedd med tillräckligt med plats att dumpa elastisk buffert" #: src/libdar/fichier_global.cpp:110 src/libdar/tuyau.cpp:368 msgid "" "No space left on device, you have the opportunity to make room now. When " "ready : can we continue ?" msgstr "" "Inget utrymme kvar pÃ¥ enhet, du har möjlighet att skapa plats nu. När du är " "klar : kan vi fortsätta ?" #: src/libdar/mycurl_easyhandle_node.cpp:39 #, fuzzy msgid "Error met while creating a libcurl handle" msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/mycurl_easyhandle_node.cpp:51 #, fuzzy msgid "Error met while duplicating libcurl handle" msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/user_interaction5.cpp:167 src/libdar/shell_interaction.cpp:405 msgid "Continue? " msgstr "Fortsätta? " #: src/libdar/filesystem_restore.cpp:277 msgid "" "Cannot restore a delta binary patch without a file to patch on filesystem" msgstr "" #: src/libdar/filesystem_restore.cpp:285 #, fuzzy msgid "" "Cannot restore a inode metadata only without an existing file on filesystem" msgstr "Kan ej ta bort icke befintlig fil frÃ¥n filsystemet: " #: src/libdar/filesystem_restore.cpp:302 #, fuzzy, c-format msgid "Cannot remove non-existent file from filesystem: %S" msgstr "Kan ej ta bort icke befintlig fil frÃ¥n filsystemet: " #: src/libdar/filesystem_restore.cpp:307 src/libdar/filesystem_restore.cpp:751 msgid "Restoring file's data: " msgstr "Ã…terställer fils data: " #: src/libdar/filesystem_restore.cpp:328 src/libdar/filesystem_restore.cpp:970 msgid "Restoring file's EA: " msgstr "Ã…terställer fils EA: " #: src/libdar/filesystem_restore.cpp:339 src/libdar/filesystem_restore.cpp:522 #, fuzzy, c-format msgid "Restoration of EA for %S aborted: " msgstr "Ã…terställer EA för " #: src/libdar/filesystem_restore.cpp:351 src/libdar/filesystem_restore.cpp:1120 #, fuzzy msgid "Restoring file's FSA: " msgstr "Ã…terställer fils EA: " #: src/libdar/filesystem_restore.cpp:366 src/libdar/filesystem_restore.cpp:536 #, c-format msgid "Restoration of FSA for %S aborted: " msgstr "" #: src/libdar/filesystem_restore.cpp:424 src/libdar/filesystem_restore.cpp:619 #, fuzzy msgid "Restoring linux immutable FSA for " msgstr "Ã…terställer fils EA: " #: src/libdar/filesystem_restore.cpp:429 #, fuzzy, c-format msgid "Restoration of linux immutable FSA for %S aborted: " msgstr "Ã…terställer EA för " #: src/libdar/filesystem_restore.cpp:484 #, fuzzy msgid "Restoring file's data using a delta patching: " msgstr "Ã…terställer fils data: " #: src/libdar/filesystem_restore.cpp:495 #, c-format msgid "Cannot restore delta diff for %S as exsiting inode is not a plain file" msgstr "" #: src/libdar/filesystem_restore.cpp:670 #, c-format msgid "%S is about to be removed from filesystem, continue?" msgstr "%S skall till att tas bort frÃ¥n filsystemet, fortsätta ? " #: src/libdar/filesystem_restore.cpp:675 src/libdar/filesystem_restore.cpp:684 #, c-format msgid "Removing file (reason is file recorded as removed in archive): %S" msgstr "Tar bort fil (orsaken är att fil är noterad som bortagen i arkiv): %S" #: src/libdar/filesystem_restore.cpp:682 #, c-format msgid "" "%S must be removed, but does not match expected type, remove it anyway ?" msgstr "%S mÃ¥ste tas bort, men matchar inte förvänta typ, ta bort ändÃ¥ ?" #: src/libdar/filesystem_restore.cpp:690 #, c-format msgid "" "%S: Overwriting policy (Data) is undefined for that file, do not know " "whether removal is allowed or not!" msgstr "" "%S: Överskrivningspolicy (Data) är odefinierat för denna fil, vet ej " "huruvida bortagande är tillÃ¥tet eller ej!" #: src/libdar/filesystem_restore.cpp:727 #, c-format msgid "" "Directory %S cannot be restored: overwriting not allowed and a non-directory " "inode of that name already exists, all files in that directory will be " "skipped for restoration:" msgstr "" "Mappen %S kan ej Ã¥terställas: överskrivning ej tillÃ¥ten och en icke-mapp-" "inod med det namnet finns redan, alla filer i den mappen kommer att hoppas " "över vid Ã¥terhämtning:" #: src/libdar/filesystem_restore.cpp:736 #, c-format msgid "%S is about to be overwritten, OK?" msgstr "%S skall till att överskrivas, OK?" #: src/libdar/filesystem_restore.cpp:768 msgid "" "Existing file is of a different nature, cannot only restore inode metadata" msgstr "" #: src/libdar/filesystem_restore.cpp:786 #, c-format msgid "Existing EA for %S could not be read and preserved: " msgstr "Befintlig EA för %S kunde inte läsas och bevaras: " #: src/libdar/filesystem_restore.cpp:806 #, fuzzy, c-format msgid "Existing FSA for %S could not be read and preserved: " msgstr "Befintlig EA för %S kunde inte läsas och bevaras: " #: src/libdar/filesystem_restore.cpp:820 #, c-format msgid "Removing existing immutable flag in order to restore data for %S" msgstr "" #: src/libdar/filesystem_restore.cpp:846 #, c-format msgid "Existing EA for %S could not be preserved : " msgstr "Befintlig EA för %S kunde inte bevaras : " #: src/libdar/filesystem_restore.cpp:863 #, fuzzy, c-format msgid "Existing FSA for %S could not be preserved : " msgstr "Befintlig EA för %S kunde inte bevaras : " #: src/libdar/filesystem_restore.cpp:878 #, c-format msgid "" "%S is about to be deleted (required by overwriting policy), do you agree?" msgstr "" "%S skall till at tas bort (krav frÃ¥n överskrivningspolicy), är du överens?" #: src/libdar/filesystem_restore.cpp:880 #, c-format msgid "Removing file (reason is overwriting policy): %S" msgstr "Tar bort fil (orsak är överskrivningspolicy): %S" #: src/libdar/filesystem_restore.cpp:886 #, c-format msgid "" "%S: Overwriting policy (Data) is undefined for that file, do not know " "whether overwriting is allowed or not!" msgstr "" "%S: Överskrivningspolicy är obestämd för denna fil, vet ej huruvida " "överskrivning tillÃ¥ts eller ej!" #: src/libdar/filesystem_restore.cpp:949 #, c-format msgid "EA for %S are about to be overwritten, OK?" msgstr "NÃ¥gra EA för %S skall till att skrivas över, OK?" #: src/libdar/filesystem_restore.cpp:964 #, c-format msgid "" "EA for %S have not been overwritten because this file is a hard link " "pointing to an already restored inode" msgstr "" "EA för %S har ej skrivits över eftersom denna fil är en hÃ¥rd länk pekande pÃ¥ " "en redan Ã¥terskapad inode." #: src/libdar/filesystem_restore.cpp:984 #, c-format msgid "EA for %S are about to be removed, OK?" msgstr "EA för %S skall till att tas bort, OK?" #: src/libdar/filesystem_restore.cpp:995 #, c-format msgid "" "EA for %S have not been cleared as requested by the overwriting policy " "because this file is a hard link pointing to an already restored inode" msgstr "" "EA for %S har ej nollställt enligt begäran frÃ¥n överskrivnngspolicyn " "eftersom denna fil är en hÃ¥rd länk pekande pÃ¥ en redan Ã¥terställd inod" #: src/libdar/filesystem_restore.cpp:1001 msgid "Clearing file's EA (requested by overwriting policy): " msgstr "Nollställning av fils EA (begärd av överskrivninsgpolicy): " #: src/libdar/filesystem_restore.cpp:1016 #, c-format msgid "EA for %S are about to be merged, OK?" msgstr "EA för %S skall till att slÃ¥s ihop, OK?" #: src/libdar/filesystem_restore.cpp:1047 #, c-format msgid "" "%S: Overwriting policy (EA) is undefined for that file, do not know whether " "overwriting is allowed or not!" msgstr "" "%S: Överskrivningspolicyn (EA) är obestämd för denna fil, vet ej huruvida " "överskrivning är tillÃ¥ten eller ej!" #: src/libdar/filesystem_restore.cpp:1099 #: src/libdar/filesystem_restore.cpp:1145 #, fuzzy, c-format msgid "FSA for %S are about to be overwritten, OK?" msgstr "NÃ¥gra EA för %S skall till att skrivas över, OK?" #: src/libdar/filesystem_restore.cpp:1114 #, fuzzy, c-format msgid "" "FSA for %S have not been overwritten because this file is a hard link " "pointing to an already restored inode" msgstr "" "EA för %S har ej skrivits över eftersom denna fil är en hÃ¥rd länk pekande pÃ¥ " "en redan Ã¥terskapad inode." #: src/libdar/filesystem_restore.cpp:1171 #, fuzzy, c-format msgid "" "%S: Overwriting policy (FSA) is undefined for that file, do not know whether " "overwriting is allowed or not!" msgstr "" "%S: Överskrivningspolicyn (EA) är obestämd för denna fil, vet ej huruvida " "överskrivning är tillÃ¥ten eller ej!" #: src/libdar/tlv.cpp:66 #, fuzzy msgid "Missing data to initiate a TLV object" msgstr "data saknas till att bygga en mapp" #: src/libdar/cat_lien.cpp:98 msgid "symbolic link does not point to the same target: " msgstr "symbolisk länk pekar inte pÃ¥ samma mÃ¥l" #: src/libdar/hash_fichier.cpp:63 #, c-format msgid "" "Error while initializing hash: Hash algorithm not available in libgcrypt: %s/" "%s" msgstr "" "Fel vid initiering av hash: Hash algoritm ej tillgänglig i libgcrypt: %s/%s" #: src/libdar/hash_fichier.cpp:67 #, c-format msgid "Error while creating hash handle: %s/%s" msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/hash_fichier.cpp:69 src/libdar/hash_fichier.cpp:107 #: src/libdar/hash_fichier.cpp:122 msgid "" "Missing hashing algorithms support (which is part of strong encryption " "support, using libgcrypt)" msgstr "" "Saknar stöd för 'hashing'-algoritm (vilket är en del av stödet för stark " "kryptering genom 'libgcrypt'" #: src/libdar/hash_fichier.cpp:155 msgid "Failed writing down the hash: " msgstr "Misslyckades med att notera hash: " #: src/libdar/escape_catalogue.cpp:89 msgid "" "incoherent data after escape sequence, cannot read internal data set label" msgstr "" "ej översenstämmande data efter escape-sekvens, kan ej läsa intern " "datamängsetikett" #: src/libdar/escape_catalogue.cpp:99 #, fuzzy msgid "Could not find tape mark for the internal catalogue" msgstr "kunde inte hitta den interna datamängdsetikettens escape-sekvens " #: src/libdar/escape_catalogue.cpp:489 msgid "" "Escape sequences used for reading lead the archive to place some files out " "of the specified root. To overcome this problem, try reading the archive in " "direct mode (not using sequential reading), try repairing the archive using " "Parchive if redundancy data has been created or in last resort try using the " "lax mode" msgstr "" "Escape-sekvens använd för läsning leder till att arkivet placerar vissa " "filer utanför den specificerade roten. För att klara detta problem, försök " "att läsa arkivet i direkt mode (använd ej sekvensiell läsning), försök " "reaparera arkivet med hlälp av Parchive om redundanta data har skapats eller " "som sista utväg försök använda lax mode" #: src/libdar/escape_catalogue.cpp:492 msgid "" "LAX MODE: Archive directory structure is corrupted, it would lead to place " "some files out of the specified root directory. Restoring different " "directory contents at the root not out of it, which will put files of " "different directories in the specified root directory" msgstr "" "LAX MODE: mappstrukturen i arkivet är förstört, det skulle leda till vissa " "filer placeras utanför den angivna rotmappen. Ã…terställning av andra mappars " "innehÃ¥ll vid roten, ej utanför den, vilket kommer att lägga filer frÃ¥n andra " "mappar i den angivna rotmappen." #: src/libdar/escape_catalogue.cpp:507 msgid "" "LAX MODE: found unknown catalogue entry, assuming data corruption occurred. " "Skipping to the next entry, this may lead to improper directory structure " "being restored, if the corrupted data was a directory" msgstr "" "LAX MODE: hittat okänd katalogpost antar data förstört. Hoppar över till " "nästa post, detta kan leda till att felaktig mappstruktur Ã¥terställs om " "förstört data var en mapp" #: src/libdar/escape_catalogue.cpp:514 msgid "Corrupted entry following an escape mark in the archive" msgstr "Förstörd post följande ett escape-märke i arkivet" #: src/libdar/escape_catalogue.cpp:543 msgid "" "Uncompleted archive! Assuming it has been interrupted during the backup " "process. If an error has been reported just above, simply ignore it, this is " "about the file that was saved at the time of the interruption." msgstr "" "Ej komplett arkiv! Antar det har blivit avbrutet vid säkerhetskopieringen. " "Om ett fel har precis rapporterats ovan, strunta i det helt enkelt, detta är " "om den fil, som sparades vid tidpunkten för avbrottet." #: src/libdar/escape_catalogue.cpp:578 msgid "Cannot extract from the internal catalogue the list of files to remove" msgstr "" "Kan ej extrahera frÃ¥n den interna katalogen lista med filer, som skall tas " "bort " #: src/libdar/escape_catalogue.cpp:639 src/libdar/i_archive.cpp:270 #: src/libdar/i_archive.cpp:304 msgid "" "Archive internal catalogue is not identically signed as the archive itself, " "this might be the sign the archive has been compromised" msgstr "" #: src/libdar/escape_catalogue.cpp:655 msgid "" "Archive internal catalogue is properly signed but its content does not match " "the tape marks used so far for sequentially reading. Possible data " "corruption or archive compromission occurred! if data extracted in " "sequential read mode does not match the data extracted in direct access " "mode, consider the sequential data has been been modified after the archive " "has been generated" msgstr "" #: src/libdar/escape_catalogue.cpp:795 msgid "" "Resetting the sequential reading process of the archive contents while it is " "not finished, will make all data unread so far becoming inaccessible" msgstr "" "Nollställning av den sekvensiella läsprocessen av arkivinnehÃ¥llet medan den " "inte är avslutad, kommer att göra alla data oläsbara och sÃ¥ lÃ¥ngt oÃ¥tkomliga" #: src/libdar/cat_entree.cpp:84 src/libdar/cat_entree.cpp:132 #: src/libdar/cat_entree.cpp:142 msgid "corrupted file" msgstr "förstörd fil" #: src/libdar/cat_entree.cpp:134 msgid "" "LAX MODE: Unexpected saved status for end of directory entry, assuming data " "corruption occurred, ignoring and continuing" msgstr "" "LAX MODE: Oväntat sparat status för mappslutpost, antar dataförstörelse " "skett, ignorerar och fortsätter" #: src/libdar/cat_entree.cpp:144 #, fuzzy msgid "" "LAX MODE: Unexpected saved status for class \"cat_detruit\" object, assuming " "data corruption occurred, ignoring and continuing" msgstr "" "LAX MODE: Oväntat sparat status för klassen \"detruit\" objekt, antar " "dataförstörelse skett, ignorerar och fortsätter" #: src/libdar/cat_entree.cpp:153 msgid "unknown type of data in catalogue" msgstr "okänd datatyp i katalog" #: src/libdar/cat_entree.cpp:156 msgid "" "LAX MODE: found unknown catalogue entry, assuming data corruption occurred, " "cannot read further the catalogue as I do not know the length of this type " "of entry" msgstr "" "LAX MODE: hittade obekant kataloguppgift, antar att data förstörts, kan ej " "läsa vidare i katalogen dÃ¥ jag ej vet längden pÃ¥ denna typ av uppgift" #: src/libdar/cat_entree.cpp:207 msgid "unknown entry" msgstr "Okänd inmatning" #: src/libdar/cat_entree.cpp:208 #, c-format msgid "Entry information CRC failure for %S. Ignore the failure?" msgstr "Inmatad information CRC-misslyckande för %S. Ignorera detta?" #: src/libdar/cat_entree.cpp:214 #, c-format msgid "Entry information CRC failure for %S" msgstr "Inmatad information CRC-misslyckande för %S" #: src/libdar/cat_entree.cpp:216 msgid "Entry information CRC failure" msgstr "Inmatad information CRC-misslyckande" #: src/libdar/cat_entree.cpp:468 msgid "directory" msgstr "mapp" #: src/libdar/cat_entree.cpp:472 msgid "hard linked inode" msgstr "hÃ¥rdlänkad inode" #: src/libdar/cat_entree.cpp:474 msgid "plain file" msgstr "enkel fil" #: src/libdar/cat_entree.cpp:476 msgid "soft link" msgstr "mjuk länk" #: src/libdar/cat_entree.cpp:478 msgid "char device" msgstr "teckenenhet" #: src/libdar/cat_entree.cpp:480 msgid "block device" msgstr "blockenhet" #: src/libdar/cat_entree.cpp:482 msgid "named pipe" msgstr "namnad pipe" #: src/libdar/cat_entree.cpp:484 msgid "unix socket" msgstr "unix sockel" #: src/libdar/cat_entree.cpp:486 msgid "deleted entry" msgstr "fördröjd inmatning" #: src/libdar/cat_entree.cpp:488 msgid "door inode" msgstr "'door' inod" #: src/libdar/shell_interaction.cpp:175 msgid "" "No terminal found for user interaction. All questions will be assumed a " "negative answer (less destructive choice), which most of the time will abort " "the program." msgstr "" "Ingen terminal hittad för användarinteraktion. Alla frÃ¥gor kommer att anses " "negativt besvarade (minst skadliga val), vilket i det flesta fall kommer att " "avbryta programmet." #: src/libdar/shell_interaction.cpp:239 msgid "Error reading character: " msgstr "Fel vid läsning av tecken: " #: src/libdar/shell_interaction.cpp:265 #, fuzzy, c-format msgid "" "[Data ][D][ EA ][FSA][Compr][S]| Permission | User | Group | Size " "| Date | filename" msgstr "" "[data ][ EA ][kompr][S] | behörighet | anvd | grupp | storl | " "datum | filnamn\n" #: src/libdar/shell_interaction.cpp:270 #, fuzzy, c-format msgid "" "Access mode | User | Group | Size | Date |" "[Data ][D][ EA ][FSA][Compr][S]| Filename" msgstr "" "access mod | anvd | grupp | storl | datum | " "[data ][ EA ][kompr][S] | filnamn\n" #: src/libdar/shell_interaction.cpp:315 src/libdar/database5.cpp:78 #, fuzzy, c-format msgid "dar path : %S" msgstr "" "\n" "dar sökväg : %S\n" #: src/libdar/shell_interaction.cpp:316 src/libdar/database5.cpp:79 #, fuzzy, c-format msgid "dar options : %S" msgstr "" "dar-alternativ : %S\n" "\n" #: src/libdar/shell_interaction.cpp:317 src/libdar/database5.cpp:80 #, fuzzy, c-format msgid "database version: %S" msgstr "Nytt databasnamn: " #: src/libdar/shell_interaction.cpp:318 src/libdar/database5.cpp:81 #, fuzzy, c-format msgid "compression used: %S" msgstr "lzo-komprimering" #: src/libdar/shell_interaction.cpp:320 src/libdar/database5.cpp:83 #, fuzzy, c-format msgid "archive # | path | basename" msgstr "arkiv # | sökväg | basnamn\n" #: src/libdar/shell_interaction.cpp:328 src/libdar/database5.cpp:97 msgid "" msgstr "" #: src/libdar/shell_interaction.cpp:378 src/libdar/database5.cpp:148 #, fuzzy, c-format msgid " archive # | most recent/total data | most recent/total EA" msgstr " arkiv # | senaste/total data | senaste/total EA\n" #: src/libdar/shell_interaction.cpp:379 src/libdar/database5.cpp:149 #, fuzzy, c-format msgid "--------------+-------------------------+-----------------------" msgstr "--------------+-------------------------+-----------------------\n" #: src/libdar/shell_interaction.cpp:442 msgid " [return = YES | Esc = NO]" msgstr " [vagnretur = JA | Esc = NEJ]" #: src/libdar/shell_interaction.cpp:461 msgid "Error while reading user answer from terminal: " msgstr "Fel vid läsning av användarsvar frÃ¥n terminal: " #: src/libdar/shell_interaction.cpp:466 msgid "Continuing..." msgstr "Fortsätter..." #: src/libdar/shell_interaction.cpp:468 msgid "Escaping..." msgstr "Avbryter..." #: src/libdar/shell_interaction.cpp:533 msgid "Secured string can only be read from a terminal" msgstr "" #: src/libdar/shell_interaction.cpp:558 msgid "provided password is too long for the allocated memory" msgstr "lämnat lösenord är för lÃ¥ngt för det tilldelade minnet" #: src/libdar/shell_interaction.cpp:594 msgid "Error while changing user terminal properties: " msgstr "Fel vid ändring av användarterminalens egenskaper: " #: src/libdar/shell_interaction.cpp:636 #, fuzzy, c-format msgid "%S [%c] [ REMOVED ENTRY ] (%S) %S" msgstr "%S [%c] [ FLYTTAD ENTRY ] (%S) %S\n" #: src/libdar/shell_interaction.cpp:666 src/libdar/shell_interaction.cpp:718 msgid " Extended Attribute: [" msgstr " Extended Attribut: [" #: src/libdar/shell_interaction.cpp:934 src/libdar/database5.cpp:181 msgid "[ Saved ]" msgstr "[ Sparad ]" #: src/libdar/shell_interaction.cpp:936 src/libdar/shell_interaction.cpp:941 #: src/libdar/database5.cpp:183 src/libdar/database5.cpp:188 msgid "[ ]" msgstr "[ ]" #: src/libdar/shell_interaction.cpp:939 src/libdar/database5.cpp:186 msgid "[ EA ]" msgstr "[ EA ]" #: src/libdar/shell_interaction.cpp:955 src/libdar/database5.cpp:203 msgid "removed " msgstr "borttagen" #: src/libdar/shell_interaction.cpp:956 src/libdar/database5.cpp:204 msgid "present " msgstr "närvarande " #: src/libdar/shell_interaction.cpp:957 src/libdar/database5.cpp:205 msgid "saved " msgstr "sparad " #: src/libdar/shell_interaction.cpp:958 src/libdar/database5.cpp:206 msgid "absent " msgstr "frÃ¥nvarande " #: src/libdar/shell_interaction.cpp:959 src/libdar/database5.cpp:207 msgid "patch " msgstr "" #: src/libdar/shell_interaction.cpp:960 src/libdar/database5.cpp:208 msgid "BROKEN " msgstr "" #: src/libdar/shell_interaction.cpp:961 src/libdar/database5.cpp:209 msgid "inode " msgstr "" #: src/libdar/entrepot_local.cpp:169 #, fuzzy, c-format msgid "Cannot remove file %s: %s" msgstr "Kan ej ta bort fil " #: src/libdar/i_libdar_xform.cpp:263 msgid "Error transforming the archive :" msgstr "Fel vid omvandling av arkivet :" #: src/libdar/list_entry.cpp:50 msgid "[DIRTY]" msgstr "[SMUTS]" #: src/libdar/list_entry.cpp:52 src/libdar/list_entry.cpp:71 msgid "[Saved]" msgstr "[Sparad]" #: src/libdar/list_entry.cpp:54 msgid "[Inode]" msgstr "" #: src/libdar/list_entry.cpp:56 src/libdar/list_entry.cpp:73 msgid "[InRef]" msgstr "[InRef]" #: src/libdar/cat_detruit.cpp:46 msgid "missing data to build" msgstr "data saknas för att bygga" #: src/libdar/crit_action.cpp:145 msgid "cannot evaluate an empty chain in an overwriting policy" msgstr "kan ej utvärdera en tom kedja i en överskrivningspolicy" #: src/libdar/cache.cpp:69 msgid "wrong value given as initial_size argument while initializing cache" msgstr "" "fel värde givet till argumentet initial_size medan cachen initialiseras" #: src/libdar/ea_filesystem.cpp:189 #, fuzzy, c-format msgid "Error while adding EA %s : %s" msgstr "Fel vi skapande av IV: %s/%s" #: src/libdar/ea_filesystem.cpp:226 #, fuzzy, c-format msgid "Error while removing %s : %s" msgstr "Fel vid filöppning %s : %s" #: src/libdar/ea_filesystem.cpp:258 src/libdar/ea_filesystem.cpp:282 #, c-format msgid "Error reading attribute %s of file %s : %s" msgstr "Fel vid läsning av attribut %s hos fil %s : %s" #: src/libdar/ea_filesystem.cpp:332 src/libdar/ea_filesystem.cpp:347 #, c-format msgid "Error retrieving EA list for %s : %s" msgstr "Fel vid räddning av EA-lista för %s : %s" #: src/libdar/database_header.cpp:102 msgid "" "The format version of this database is too high for that software version, " "use a more recent software to read or modify this database" msgstr "" "Formatversionen för denna databas är för hög för denna programvaruversion, " "använd en senare programvara för att läsa eller ändra denna databas" #: src/libdar/database_header.cpp:105 msgid "Unknown header option in database, aborting\n" msgstr "Okänt alternativ i databashuvud, avbryter\n" #: src/libdar/database_header.cpp:148 msgid "Cannot create database, file exists" msgstr "Kan ej skapa databas, fil finns" #: src/libdar/database_header.cpp:191 #, c-format msgid "Error reading database %S : " msgstr "Fel vid läsning av databas %S : " #: src/libdar/user_interaction_callback5.cpp:93 #: src/libdar/user_interaction_callback.cpp:98 #, fuzzy msgid "nullptr given as argument of user_interaction_callback()" msgstr "Tomt givet som argument för 'user_interaction_callback'" #: src/libdar/user_interaction_callback5.cpp:130 #: src/libdar/user_interaction_callback5.cpp:135 #: src/libdar/user_interaction_callback5.cpp:152 #: src/libdar/user_interaction_callback5.cpp:156 #: src/libdar/user_interaction_callback5.cpp:173 #: src/libdar/user_interaction_callback5.cpp:177 #: src/libdar/user_interaction_callback5.cpp:198 #: src/libdar/user_interaction_callback5.cpp:202 #: src/libdar/user_interaction_callback5.cpp:225 #: src/libdar/user_interaction_callback5.cpp:229 #: src/libdar/user_interaction_callback5.cpp:246 #: src/libdar/user_interaction_callback5.cpp:250 #: src/libdar/user_interaction_callback5.cpp:267 #: src/libdar/user_interaction_callback5.cpp:271 #: src/libdar/user_interaction_callback5.cpp:290 #: src/libdar/user_interaction_callback5.cpp:294 #: src/libdar/user_interaction_callback5.cpp:313 #: src/libdar/user_interaction_callback5.cpp:317 msgid "No exception allowed from libdar callbacks" msgstr "Inget avbrott tillÃ¥tet frÃ¥n libdar 'callbacks'" #: src/libdar/real_infinint.cpp:70 src/libdar/limitint.hpp:272 msgid "Reached end of file before all data could be read" msgstr "Kommit till filslut innan alla data kunnat läsas" #: src/libdar/real_infinint.cpp:83 src/libdar/limitint.hpp:285 msgid "Badly formed \"infinint\" or not supported format" msgstr "Felaktigt format 'infinint' eller ej understött format" #: src/libdar/real_infinint.cpp:247 src/libdar/limitint.hpp:423 msgid "" "Subtracting an \"infinint\" greater than the first, \"infinint\" cannot be " "negative" msgstr "" "Subtraktion av en 'infinint' större än det första, 'infinint' kan ej vara " "negativt" #: src/libdar/real_infinint.cpp:840 src/libdar/limitint.hpp:464 #: src/libdar/limitint.hpp:473 msgid "Division by zero" msgstr "Division med noll" #: src/libdar/terminateur.cpp:133 msgid "Badly formatted terminator, cannot extract catalogue location: " msgstr "Fel utformat avslutning, kan ej extrahera katalogplats: " #: src/libdar/header_version.cpp:79 #, fuzzy msgid "LAX MODE: Failed to read the archive header's format version." msgstr "LAX MODE: Misslyckades med att läsa katalogen" #: src/libdar/header_version.cpp:82 #, c-format msgid "" "LAX MODE: Please provide the archive format: You can use the table at %s to " "find the archive format depending on the release version, (for example if " "this archive has been created using dar release 2.3.4 to 2.3.7 answer \"6\" " "without the quotes here): " msgstr "" "LAX MODE: Upplys om arkivformatet. Du kan använda tabellen vid %s att finna " "arkivformatet beroende pÃ¥ version., till exempel om detta arkiv har skapats " "med \"dar\" version 2.3.4 till 2.3.7, svara \"6\", (utan anföringstecken) " "här:\" " #: src/libdar/header_version.cpp:87 #, c-format msgid "LAX MODE: \"%S\" is not a valid archive format" msgstr "LAX MODE: \"%S\" är ej ett giltigt arkivformat" #: src/libdar/header_version.cpp:93 #, c-format msgid "LAX MODE: Using archive format \"%d\"?" msgstr "LAX MODE: Använder arkivformat \"%d\"?" #: src/libdar/header_version.cpp:122 #, fuzzy msgid "" "LAX MODE: Unknown compression algorithm used, assuming data corruption " "occurred. Please help me, answering with one of the following words \"none" "\", \"gzip\", \"bzip2\", \"lzo\" or \"xz\" at the next prompt:" msgstr "" "LAX MODE: Okänd komprimeringsalgoritm använd, antar data förstörda. Hjälp " "mig genom att svara med ett av orden \"none\", \"gzip\", \"bzip2\" or \"lzo" "\" pÃ¥ frÃ¥gan:" #: src/libdar/header_version.cpp:123 src/libdar/header_version.cpp:498 #: src/libdar/crypto.cpp:42 msgid "none" msgstr "inget" #: src/libdar/header_version.cpp:125 msgid "gzip" msgstr "gzip" #: src/libdar/header_version.cpp:127 msgid "bzip2" msgstr "bzip2" #: src/libdar/header_version.cpp:129 msgid "lzo" msgstr "lzo" #: src/libdar/header_version.cpp:131 msgid "xz" msgstr "" #: src/libdar/header_version.cpp:138 src/libdar/header_version.cpp:146 #: src/libdar/header_version.cpp:157 src/libdar/header_version.cpp:175 #, fuzzy msgid "Reached End of File while reading archive header_version data structure" msgstr "NÃ¥tt filslut vid läsning av arkivversion" #: src/libdar/header_version.cpp:208 msgid "Corruption met while reading header_version data structure" msgstr "" #: src/libdar/header_version.cpp:225 msgid "Missing data for encrypted symmetrical key" msgstr "" #: src/libdar/header_version.cpp:242 msgid "" "Error met while reading archive of reference slicing layout, ignoring this " "field and continuing" msgstr "" #: src/libdar/header_version.cpp:266 msgid "valid hash algoritm needed for key derivation function" msgstr "" #: src/libdar/header_version.cpp:278 msgid "" "please indicate the hash algoritm to use for key derivation function '1' for " "sha1, '5' for sha512, 'm' for md5, or 'q' to abort: " msgstr "" #: src/libdar/header_version.cpp:296 msgid "please answer with a single character" msgstr "" #: src/libdar/header_version.cpp:328 src/libdar/header_version.cpp:330 #: src/libdar/header_version.cpp:352 src/libdar/header_version.cpp:354 msgid "Consistency check failed for archive header" msgstr "Sundhetskontroll misslyckades för arkivhuvud" #: src/libdar/header_version.cpp:480 msgid "invalid hash algorithm provided for key derivation function" msgstr "" #: src/libdar/header_version.cpp:489 src/libdar/header_version.cpp:506 #: src/libdar/op_tools.cpp:41 src/libdar/op_tools.cpp:113 #: src/libdar/op_tools.cpp:193 msgid "yes" msgstr "ja" #: src/libdar/header_version.cpp:489 src/libdar/header_version.cpp:506 msgid "no" msgstr "nej" #: src/libdar/header_version.cpp:510 #, fuzzy, c-format msgid "Archive version format : %s" msgstr "Arkivets versionsformat : %s\n" #: src/libdar/header_version.cpp:511 #, fuzzy, c-format msgid "Compression algorithm used : %S" msgstr "Utnyttjad kompressionsalgoritm : %S\n" #: src/libdar/header_version.cpp:512 #, fuzzy, c-format msgid "Symmetric key encryption used : %S" msgstr " Stark kryptering : %s\n" #: src/libdar/header_version.cpp:513 #, fuzzy, c-format msgid "Asymmetric key encryption used : %S" msgstr " Stark kryptering : %s\n" #: src/libdar/header_version.cpp:514 #, fuzzy, c-format msgid "Archive is signed : %S" msgstr "Arkivets versionsformat : %s\n" #: src/libdar/header_version.cpp:515 #, fuzzy, c-format msgid "Sequential reading marks : %s" msgstr "Sekvensiell läsnings markeringar : %s\n" #: src/libdar/header_version.cpp:515 msgid "present" msgstr "befintlig" #: src/libdar/header_version.cpp:515 msgid "absent" msgstr "frÃ¥nvarande" #: src/libdar/header_version.cpp:516 #, fuzzy, c-format msgid "User comment : %S" msgstr "" "Användar kommentar : %S\n" "\n" #: src/libdar/header_version.cpp:519 #, fuzzy, c-format msgid "KDF iteration count : %S" msgstr "" "Användar kommentar : %S\n" "\n" #: src/libdar/header_version.cpp:520 #, fuzzy, c-format msgid "KDF hash algorithm : %S" msgstr "Utnyttjad kompressionsalgoritm : %S\n" #: src/libdar/header_version.cpp:522 #, fuzzy, c-format msgid "Salt size : %d byte%c" msgstr "" "Användar kommentar : %S\n" "\n" #: src/libdar/crypto_asym.cpp:86 src/libdar/crypto_asym.cpp:139 #: src/libdar/crypto_asym.cpp:175 src/libdar/crypto_asym.cpp:233 #: src/libdar/crypto_asym.cpp:292 msgid "Unexpected error reported by GPGME: " msgstr "" #: src/libdar/crypto_asym.cpp:137 msgid "Key found but users are not all trusted" msgstr "" #: src/libdar/crypto_asym.cpp:169 msgid "No data to decrypt" msgstr "" #: src/libdar/crypto_asym.cpp:171 #, fuzzy msgid "Invalid Cipher text" msgstr "Ogiltigt arkivnummer: " #: src/libdar/crypto_asym.cpp:173 msgid "Failed retreiving passphrase" msgstr "" #: src/libdar/crypto_asym.cpp:188 #, fuzzy msgid "Failed creating GPGME context: " msgstr "Misslyckades med att notera hash: " #: src/libdar/crypto_asym.cpp:192 msgid "Failed setting GPGME context with OpenPGP protocol: " msgstr "" #: src/libdar/crypto_asym.cpp:306 #, c-format msgid "No valid signing key could be find for %S" msgstr "" #: src/libdar/crypto_asym.cpp:308 #, c-format msgid "No valid encryption key could be find for %S" msgstr "" #: src/libdar/crypto_asym.cpp:319 msgid "No signatory remain with a valid key, signing is impossible, aborting" msgstr "" #: src/libdar/crypto_asym.cpp:321 msgid "" "No recipient remain with a valid key, encryption is impossible, aborting" msgstr "" #: src/libdar/crypto_asym.cpp:394 #, c-format msgid "Passphrase required for key %s :" msgstr "" #: src/libdar/crypto_asym.cpp:415 msgid "Error, invalid passphrase given, try again:" msgstr "" #: src/libdar/crypto_asym.cpp:423 #, fuzzy msgid "Error, while sending the passphrase to GPGME:" msgstr "Fel vid öppnande av referensarkivet: " #: src/libdar/crypto_asym.cpp:425 msgid "Failed sending the totality of the passphrase to GPGME" msgstr "" #: src/libdar/crypto_asym.cpp:432 msgid "Failed sending CR after the passphrase" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:69 #, c-format msgid "protocol %S is not supported by libcurl, aborting" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:85 #, fuzzy, c-format msgid "Error met while setting verbosity on handle: %s" msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/i_entrepot_libcurl.cpp:125 #, fuzzy, c-format msgid "Error met while preparing directory listing: %s" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/i_entrepot_libcurl.cpp:135 #, fuzzy, c-format msgid "Error met while listing FTP/SFTP directory %s" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/i_entrepot_libcurl.cpp:312 src/libdar/i_entrepot_libcurl.cpp:317 #, fuzzy, c-format msgid "Error met while setting up connection for file %S removal: %s" msgstr "" "Fel pÃ¥träffat vid läsning av rad\n" "\t%S\n" " frÃ¥n fil %S: %S" #: src/libdar/i_entrepot_libcurl.cpp:325 #, fuzzy, c-format msgid "Error met while removing file %S" msgstr "Fel vid filöppning %s : %s" #: src/libdar/i_entrepot_libcurl.cpp:332 #, fuzzy, c-format msgid "Error met while removing file %S: %s" msgstr "Fel vid filöppning %s : %s" #: src/libdar/i_entrepot_libcurl.cpp:391 #, fuzzy, c-format msgid "Failed assigning URL to libcurl: %s" msgstr "Fel vid tilldelning av nyckel till libgcrypts nyckelhanterare: %s/%s" #: src/libdar/i_entrepot_libcurl.cpp:421 #, fuzzy, c-format msgid "Error met while setting known_hosts file: %s" msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/i_entrepot_libcurl.cpp:432 #, fuzzy, c-format msgid "Error met while assigning public key file: %s" msgstr "Fel vid tilldelning av nyckel till libgcrypts nyckelhanterare: %s/%s" #: src/libdar/i_entrepot_libcurl.cpp:440 #, fuzzy, c-format msgid "Error met while assigning private key file: %s" msgstr "Fel vid skapande av hash handle: %s/%s" #: src/libdar/i_entrepot_libcurl.cpp:448 #, fuzzy, c-format msgid "Error met while assigning sftp authentication methods: %s" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/i_entrepot_libcurl.cpp:465 #, fuzzy, c-format msgid "Error met while passing username to libcurl: %s" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/i_entrepot_libcurl.cpp:471 #, c-format msgid "" "Error met while asking libcurl to consider ~/.netrc for authentication: %s" msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:478 #, c-format msgid "Please provide the password for login %S at host %S: " msgstr "" #: src/libdar/i_entrepot_libcurl.cpp:493 #, fuzzy, c-format msgid "Error met while setting libcurl authentication: %s" msgstr "Fel vid tilldelning av en fils egenskaper:%s" #: src/libdar/cat_mirage.cpp:141 msgid "Incoherent catalogue structure: hard linked inode's data not found" msgstr "Osammanhängande katalogstruktur: hÃ¥rdlänkade inoders data ej hittade" #: src/libdar/cat_mirage.cpp:178 msgid "Incoherent catalogue structure: hard linked data is not an inode" msgstr "Osammanhängande katalogstruktur: hÃ¥rdlänkade data är ej en inode" #: src/libdar/cat_mirage.cpp:215 msgid "Incoherent catalogue structure: duplicated hard linked inode's data" msgstr "Ej enhetlig katalogstruktur: duplicerad hÃ¥rdlänkade inodes data" #: src/libdar/cat_mirage.cpp:229 msgid "" "Incoherent catalogue structure: unknown status flag for hard linked inode" msgstr "" "Ej enhetlig katalogstruktur: obekant statusflagga för hÃ¥rdlänkad inode." #: src/libdar/datetime.cpp:405 #, fuzzy msgid "Unknown time unit" msgstr "Okänt val: " #: src/libdar/slice_layout.cpp:52 msgid "Missing data while reading slice_layout object" msgstr "" #: src/libdar/mask.hpp:118 msgid "TRUE" msgstr "" #: src/libdar/mask.hpp:118 msgid "FALSE" msgstr "" #: src/libdar/mask.hpp:309 msgid "AND" msgstr "" #: src/libdar/mask.hpp:342 msgid "No mask in the list of mask to operate on" msgstr "Ingen mask i masklistan att jobba med" #: src/libdar/mask.hpp:372 msgid "OR" msgstr "" #: src/libdar/mask.hpp:382 #, fuzzy msgid "No mask to operate on in the list of mask" msgstr "Ingen mask i masklistan att jobba med" #: src/libdar/archive_version.cpp:47 msgid "Archive version too high, use a more recent version of libdar" msgstr "Arkivversion är för hög, använd en senare version av 'libdar'" #: src/libdar/archive_version.cpp:77 msgid "Reached End of File while reading archive version" msgstr "NÃ¥tt filslut vid läsning av arkivversion" #: src/libdar/archive_version.cpp:90 src/libdar/archive_version.cpp:99 msgid "Unexpected value while reading archive version" msgstr "Oväntat värde vid läsning av arkivversion" #: src/libdar/archive_version.cpp:97 msgid "Reached premature end of file while reading archive version" msgstr "NÃ¥dde filslut under läsning av arkivversion" #: src/libdar/cat_device.cpp:95 src/libdar/cat_device.cpp:98 msgid "missing data to build a special device" msgstr "data saknas till att skapa en specialenhet" #: src/libdar/cat_device.cpp:146 #, c-format msgid "devices have not the same major number: %d <--> %d" msgstr "enheter har inte samma 'major'-tal: %d <--> %d" #: src/libdar/cat_device.cpp:148 #, fuzzy, c-format msgid "devices have not the same minor number: %d <--> %d" msgstr "enheter har inte samma 'minor'-tal: %d <--> %d" #: src/libdar/tuyau.cpp:143 msgid "Error while creating anonymous pipe: " msgstr "Fel vid skapande av anonym pip: " #: src/libdar/tuyau.cpp:171 msgid "Pipe's other end is not known, cannot provide a filedescriptor on it" msgstr "Rörs andra ända okänt, kan ej erhÃ¥lla en filbeskrivning för det" #: src/libdar/tuyau.cpp:185 msgid "" "Pipe's other end is not known, cannot close any filedescriptor pointing on it" msgstr "" "Rörs andra ända okänt, kan ej stänga nÃ¥gon filbeskrivning pekande pÃ¥ det" #: src/libdar/tuyau.cpp:312 msgid "Error while reading from pipe: " msgstr "Fel vid läsning frÃ¥n ett rör: " #: src/libdar/tuyau.cpp:366 src/libdar/tuyau.cpp:371 msgid "Error while writing data to pipe: " msgstr "Fel vid skrivning av data till ett rör: " #: src/libdar/tuyau.cpp:422 msgid "Error opening pipe: " msgstr "Fel vid öppnade av rör: " #: src/libdar/tuyau.cpp:511 msgid "File mode is neither read nor write" msgstr "FiltillstÃ¥nd är varken läsning eller skrivning" #: src/libdar/cat_inode.cpp:163 msgid "badly structured inode: unknown inode flag" msgstr "dÃ¥ligt strukturerad inod: okänd inodflagga" #: src/libdar/cat_inode.cpp:174 src/libdar/cat_inode.cpp:177 #: src/libdar/cat_inode.cpp:187 msgid "missing data to build an inode" msgstr "data saknas för att bygga en inod" #: src/libdar/cat_inode.cpp:278 #, fuzzy msgid "badly structured inode: unknown inode flag for FSA" msgstr "dÃ¥ligt strukturerad inod: okänd inodflagga" #: src/libdar/cat_inode.cpp:392 msgid "different file type" msgstr "annan filtyp" #: src/libdar/cat_inode.cpp:397 #, c-format msgid "different owner (uid): %i <--> %i" msgstr "annan ägare (uid): %i <--> %i" #: src/libdar/cat_inode.cpp:403 #, c-format msgid "different owner group (gid): %i <--> %i" msgstr "annan ägargrupp (gid): %i <--> %i" #: src/libdar/cat_inode.cpp:409 #, c-format msgid "different permission: %S <--> %S" msgstr "andra behörigheter: %S <--> %S" #: src/libdar/cat_inode.cpp:417 src/libdar/cat_file.cpp:1213 #, c-format msgid "difference of last modification date: %S <--> %S" msgstr "skillnad mot senaste ändringsdatum: %S <--> %S" #: src/libdar/cat_inode.cpp:432 msgid "different Extended Attributes" msgstr "andra Utvidgade Attribut" #: src/libdar/cat_inode.cpp:438 msgid "no Extended Attribute to compare with" msgstr "inga Utvidgade Attribut att jämföra me " #: src/libdar/cat_inode.cpp:440 src/libdar/cat_inode.cpp:461 msgid "" "Cannot compare EA: EA support has not been activated at compilation time" msgstr "" "Kan ej jämföra EA: EA-stöd har inte aktiverats vid kompileringstillfället" #: src/libdar/cat_inode.cpp:454 msgid "inode last change date (ctime) greater, EA might be different" msgstr "inods senaste ändringsdatum (ctime) är större, EA kan vara annat" #: src/libdar/cat_inode.cpp:459 msgid "no Extended Attributes to compare with" msgstr "inga Utvidgade Attribut att jämföra me " #: src/libdar/cat_inode.cpp:488 #, fuzzy msgid "different Filesystem Specific Attributes" msgstr "andra Utvidgade Attribut" #: src/libdar/cat_inode.cpp:494 #, fuzzy msgid "No Filesystem Specific Attribute to compare with" msgstr "inga Utvidgade Attribut att jämföra me " #: src/libdar/cat_inode.cpp:502 #, fuzzy msgid "inode last change date (ctime) greater, FSA might be different" msgstr "inods senaste ändringsdatum (ctime) är större, EA kan vara annat" #: src/libdar/cat_inode.cpp:505 msgid "Filesystem Specific Attribute are missing" msgstr "" #: src/libdar/cat_inode.cpp:792 msgid "CRC error detected while reading EA" msgstr "CRC-fel upptäckt vid läsning av EA" #: src/libdar/cat_inode.cpp:936 msgid "" "Error while reading CRC for EA from the archive: No escape mark found for " "that file" msgstr "" "Fel vid läsning CRC för EA frÃ¥n arkiv: Inget escape-märke funnet för denna " "fil" #: src/libdar/cat_inode.cpp:1163 #, fuzzy msgid "CRC error detected while reading FSA" msgstr "CRC-fel upptäckt vid läsning av EA" #: src/libdar/cat_inode.cpp:1287 #, fuzzy msgid "" "Error while reading CRC for FSA from the archive: No escape mark found for " "that file" msgstr "" "Fel vid läsning CRC för EA frÃ¥n arkiv: Inget escape-märke funnet för denna " "fil" #: src/libdar/header.cpp:103 src/libdar/header.cpp:111 #: src/libdar/header.cpp:114 src/libdar/header.cpp:116 msgid "Reached end of file while reading slice header" msgstr "NÃ¥tt filslut vid läsning av delsäkerhetskopiehuvud" #: src/libdar/header.cpp:130 src/libdar/header.cpp:152 msgid "" "LAX MODE: slice size is not possible to read, (lack of virtual memory?), " "continuing anyway..." msgstr "" "Lax MODE: delsäkerhetskopiestorlek ej möjlig att läsa, (brist pÃ¥ virtuellt " "minne?); fortsätter hur som helst..." #: src/libdar/header.cpp:167 msgid "" "LAX MODE: first slice size is not possible to read, (lack of virtual " "memory?), continuing anyway..." msgstr "" "LAX MODE: första delsäkerhetskopiestorlek är ej möjlig att läsa, (brist pÃ¥ " "virtuellt minne?), fortsätter hur som helst..." #: src/libdar/header.cpp:180 msgid "" "Archive format older than \"08\" (release 2.4.0) cannot be read through a " "single pipe. It only can be read using dar_slave or normal plain file (slice)" msgstr "" "Arkivformat äldre än \"08\" (utgÃ¥va 2.4.0) kan ej läsas genom ett ensamt " "rör. Det kana bara läsas genom dar_slave eller normal planfilsmetod" #: src/libdar/header.cpp:182 msgid "" "LAX MODE: first slice size is not possible to read, continuing anyway..." msgstr "" "LAX MODE: första delsäkerhetskopiestorlek är ej möjlig att läsa, fortsätter " "hur som helst..." #: src/libdar/header.cpp:197 msgid "Badly formatted SAR header (unknown TLV type in slice header)" msgstr "" "Felaktigt formaterad SAR-huvud (okänd TLV-typ i delsäkerhetskopiehuvud)" #: src/libdar/header.cpp:200 msgid "LAX MODE: Unknown data in slice header, ignoring and continuing" msgstr "" "LAX MODE: Okänt data i delsäkerhetskopiehuvud, ignorerar och fortsätter" #: src/libdar/header.cpp:379 msgid "incomplete data set name found in a slice header" msgstr "ej komplett datamängdsnamn hittat i ett delsäkerhetskopiehuvud" #: src/libdar/header.cpp:383 #, c-format msgid "" "Unknown entry found in slice header (type = %d), option not supported. The " "archive you are reading may have been generated by a more recent version of " "libdar, ignore this entry and continue anyway?" msgstr "" "Okänd post hittad i delsäkerhetskopiehuvud (typ= = %d), alternativ ej stött. " "Arkivet du läser kan ha skapats av en senare version av 'libdar', ignorera " "denna post och fortsätta hur som helst?" #: src/libdar/cat_file.cpp:470 msgid "cannot provide data from a \"not saved\" file object" msgstr "kan inte förse data frÃ¥n en \"not saved\"-filobjekt" #: src/libdar/cat_file.cpp:481 msgid "data has been cleaned, object is now empty" msgstr "data har rensats, objekt är nu tomt" #: src/libdar/cat_file.cpp:877 msgid "can't read data CRC: No escape mark found for that file" msgstr "kan ej läsa CRC: inget escape-märke hittat för denna fil" #: src/libdar/cat_file.cpp:1080 msgid "can't find mark for delta signature" msgstr "" #: src/libdar/cat_file.cpp:1097 #, fuzzy msgid "Error while retrieving delta signature from the archive: " msgstr "Fel vid läsning av användarsvar frÃ¥n terminal: " #: src/libdar/cat_file.cpp:1206 #, c-format msgid "not same size: %i <--> %i" msgstr "inte samma storlek: %i <--> %i" #: src/libdar/cat_file.cpp:1274 #, c-format msgid "different file data, offset of first difference is: %i" msgstr "" #: src/libdar/cat_file.cpp:1282 msgid "" "Same data but CRC value could not be verified because we did not guessed " "properly its width (sequential read restriction)" msgstr "" "Samma data men CRC-värdet kunde inte verifieras, ty vi gissade inte korrekt " "dess bredd (sekvensiell läsbegränsning)" #: src/libdar/cat_file.cpp:1284 msgid "Same data but stored CRC does not match the data!?!" msgstr "Samma data men lagrat CRC matchar inte data!?!" #: src/libdar/cat_file.cpp:1325 msgid "Delta signature do not match" msgstr "" #: src/libdar/cat_file.cpp:1367 #, fuzzy, c-format msgid "Delta signature do not have the same size: %i <--> %i" msgstr "inte samma storlek: %i <--> %i" #: src/libdar/cat_file.cpp:1369 msgid "Delta signature have the same size but do not match" msgstr "" #: src/libdar/cat_file.cpp:1412 #, fuzzy msgid "CRC difference concerning file's data" msgstr "olika fildata" #: src/libdar/semaphore.cpp:127 msgid "Error while converting UID/GID to string for backup hook file: " msgstr "Fel vid omvandling av UID/GID till sträng för \"backup hook file\": " #: src/libdar/filesystem_backup.cpp:215 src/libdar/filesystem_diff.cpp:193 msgid "Non existent file: " msgstr "Icke-existerande fil: " #: src/libdar/filesystem_backup.cpp:217 src/libdar/filesystem_diff.cpp:195 msgid "File must be a directory: " msgstr "Fil mÃ¥ste vara en mapp: " #: src/libdar/filesystem_backup.cpp:302 #, c-format msgid "Cannot read directory contents: %s : " msgstr "Kan ej läsa mappinnehÃ¥llet: %s : " #: src/libdar/filesystem_backup.cpp:339 msgid "Ignoring file with NODUMP flag set: " msgstr "Ignorerar fil med NODUMP-flagga satt: " #: src/libdar/filesystem_backup.cpp:347 src/libdar/filesystem_backup.cpp:352 msgid "Error reading directory contents: " msgstr "Fel vid läsning av mappinnehÃ¥ll: " #: src/libdar/filesystem_backup.cpp:347 src/libdar/filesystem_backup.cpp:352 msgid " . Ignoring file or directory" msgstr " . Ignorerar fil eller mapp" #: src/libdar/data_dir.cpp:74 msgid "Unexpected end of file" msgstr "Oväntat filslut" #: src/libdar/data_dir.cpp:327 msgid "" "This database has been corrupted probably due to a bug in release 2.4.0 to " "2.4.9, and it has not been possible to cleanup this corruption, please " "rebuild the database from archives or extracted \"catalogues\", if the " "database has never been used by one of the previously mentioned released, " "you are welcome to open a bug report and provide as much as possible details " "about the circumstances" msgstr "" #: src/libdar/data_dir.cpp:614 msgid "Unknown record type" msgstr "Okänd posttyp" #: src/libdar/archive5.cpp:57 msgid "listing() method must be given" msgstr "list()-metod mÃ¥ste anges" #: src/libdar/label.cpp:123 msgid "Incomplete label" msgstr "Ej fullständig etikett" #: src/libdar/crypto_sym.cpp:77 #, fuzzy msgid "" "Current implementation of blowfish encryption is not compatible with old " "(weak) implementation, use dar-2.3.x software or later (or other software " "based on libdar-4.4.x or greater) to read this archive" msgstr "" "Aktuell implementering av blowfish-kryptering är ej kompatibel med den gamla " "(svaga) implementeringen. Använd dar-2.3.x programvara (eller annan " "programvara grundad pÃ¥ libdar-4.4.x) för att läsa detta arkiv." #: src/libdar/crypto_sym.cpp:80 msgid "cannot use 'none' as hashing algorithm for key derivation function" msgstr "" #: src/libdar/crypto_sym.cpp:91 src/libdar/crypto_sym.cpp:164 #, c-format msgid "Cyphering algorithm not available in libgcrypt: %s/%s" msgstr "Cyphering-algoritm inte tillgänglig i libgcrypt: %s/%s" #: src/libdar/crypto_sym.cpp:97 src/libdar/crypto_sym.cpp:631 #, c-format msgid "" "Failed retrieving from libgcrypt the block size used by the cyphering " "algorithm: %s/%s" msgstr "" "Misslyckades med att hämta frÃ¥n libgcrypt den blockstorlek använd vid " "cyphering algoritmen: %s/%s" #: src/libdar/crypto_sym.cpp:114 msgid "Too large value give for key derivation interation count" msgstr "" #: src/libdar/crypto_sym.cpp:125 #, c-format msgid "Error while opening libgcrypt key handle: %s/%s" msgstr "Fel vid öppnande av 'libgcrypt key handle': %s/%s" #: src/libdar/crypto_sym.cpp:133 #, c-format msgid "Error while assigning key to libgcrypt key handle: %s/%s" msgstr "Fel vid tilldelning av nyckel till libgcrypts nyckelhanterare: %s/%s" #: src/libdar/crypto_sym.cpp:150 msgid "Missing strong encryption support (libgcrypt)" msgstr "Saknar stöd för stark kryptering (libgcrypt)" #: src/libdar/crypto_sym.cpp:169 #, fuzzy msgid "Failed retrieving from libgcrypt the maximum key length" msgstr "" "Misslyckades med att hämta frÃ¥n libgcrypt den använda nyckellängden: %s/%s" #: src/libdar/crypto_sym.cpp:202 #, fuzzy, c-format msgid "" "Error while opening libgcrypt key handle to check password strength: %s/%s" msgstr "Fel vid öppnande av 'libgcrypt key handle': %s/%s" #: src/libdar/crypto_sym.cpp:214 #, fuzzy, c-format msgid "" "Error while assigning key to libgcrypt key handle to check password " "strength: %s/%s" msgstr "Fel vid tilldelning av nyckel till libgcrypt key handle (essiv): %s/%s" #: src/libdar/crypto_sym.cpp:315 #, c-format msgid "Error while resetting encryption key for a new block: %s/%s" msgstr "Fel vid Ã¥terställning av krypteringsnyckel för ett nytt block: %s/%s" #: src/libdar/crypto_sym.cpp:319 src/libdar/crypto_sym.cpp:343 #, c-format msgid "Error while setting IV for current block: %s/%s" msgstr "Fel vid inställning av IV för aktuellt block: %s/%s" #: src/libdar/crypto_sym.cpp:322 #, c-format msgid "Error while cyphering data: %s/%s" msgstr "Fel vid kodning av data: %s/%s" #: src/libdar/crypto_sym.cpp:328 src/libdar/crypto_sym.cpp:352 msgid "blowfish strong encryption support" msgstr "stöd för 'blowfish' stark kryptering" #: src/libdar/crypto_sym.cpp:346 #, c-format msgid "Error while decyphering data: %s/%s" msgstr "Fel vid dechiffrering av data: %s/%s" #: src/libdar/crypto_sym.cpp:387 #, c-format msgid "Error while generating IV: %s/%s" msgstr "Fel vi skapande av IV: %s/%s" #: src/libdar/crypto_sym.cpp:429 #, c-format msgid "Error! SHA1 not available in libgcrypt: %s/%s" msgstr "Fel SHA1 ej tillgängligt i 'libgcrypt': %s/%s" #: src/libdar/crypto_sym.cpp:435 #, c-format msgid "Error while derivating key from password (HMAC open): %s/%s" msgstr "Fel vid härledning av nyckel frÃ¥n lösenord (HMAC öppnat): %s/%s" #: src/libdar/crypto_sym.cpp:441 #, c-format msgid "Error while derivating key from password (HMAC set key): %s/%s" msgstr "Fel vid härledning av key frÃ¥n lösenord (HMAC set key): %s/%s" #: src/libdar/crypto_sym.cpp:583 #, c-format msgid "Error while creating ESSIV handle: %s/%s" msgstr "Fel vid skapande av ESSIV handle: %s/%s" #: src/libdar/crypto_sym.cpp:591 #, fuzzy, c-format msgid "" "Failed retrieving from libgcrypt the key length to use (essiv key): %s/%s" msgstr "" "Misslyckades med att hämta frÃ¥n libgcrypt den använda nyckellängden: %s/%s" #: src/libdar/crypto_sym.cpp:612 #, c-format msgid "Error while assigning key to libgcrypt key handle (essiv): %s/%s" msgstr "Fel vid tilldelning av nyckel till libgcrypt key handle (essiv): %s/%s" #: src/libdar/crypto_sym.cpp:621 #, fuzzy, c-format msgid "" "Failed retrieving from libgcrypt the block size used by the cyphering " "algorithm (essiv): %s/%s" msgstr "" "Misslyckades med att hämta frÃ¥n libgcrypt den blockstorlek använd vid " "cyphering algoritmen: %s/%s" #: src/libdar/crypto_sym.cpp:676 src/libdar/crypto_sym.cpp:682 #: src/libdar/crypto_sym.cpp:691 src/libdar/crypto_sym.cpp:699 #: src/libdar/crypto_sym.cpp:732 msgid "Library used for blowfish encryption does not respect RFC 3962" msgstr "Bibliotek, som används för blowfish-kryptering respekterar ej RFC 3962" #: src/libdar/trivial_sar.cpp:191 #, c-format msgid "%S already exists, and overwritten is forbidden, aborting" msgstr "%S finns redan och överskrivning är ej tillÃ¥ten, avbryter" #: src/libdar/trivial_sar.cpp:193 #, c-format msgid "%S is about to be overwritten, continue ?" msgstr "%S skall till att skrivas över, fortsätta ?" #: src/libdar/trivial_sar.cpp:219 src/libdar/trivial_sar.cpp:237 #, c-format msgid "" "Directory component in %S does not exist or is a dangling symbolic link: " msgstr "" #: src/libdar/trivial_sar.cpp:510 msgid "This archive has slices and is not possible to read from a pipe" msgstr "Detta arkiv har delar och har ej möjlighet att bli lästa frÃ¥n ett rör" #: src/libdar/trivial_sar.cpp:547 msgid "" "This archive is not single sliced, more data exists in the next slices but " "cannot be read from the current pipe, aborting" msgstr "" "Detta arkiv har delar och men det är ej möjlighet att läsa följand del frÃ¥n " "ett rör, avbryter" #: src/libdar/trivial_sar.cpp:585 #, fuzzy msgid "Cannot skip to a valid position in file" msgstr "Kan ej hoppa till aktuell position i \"tronc\"" #: src/libdar/compressor.cpp:171 src/libdar/compressor.cpp:199 msgid "" "incompatible compression library version or unsupported feature required " "from compression library" msgstr "" #: src/libdar/compressor.cpp:344 msgid "compressed data is corrupted" msgstr "komprimerade data är förstörda" #: src/libdar/compressor.cpp:484 msgid "compressed data CRC error" msgstr "CRC-fel hos komprimerade data" #: src/libdar/compressor.cpp:571 src/libdar/compressor.cpp:601 #: src/libdar/compressor.cpp:753 src/libdar/compressor.cpp:818 msgid "lzo compression" msgstr "lzo-komprimering" #: src/libdar/compressor.cpp:739 #, c-format msgid "Probable bug in liblzo2: lzo1x_*_compress returned unexpected code %d" msgstr "Möjligt fel i liblso2: lzo1x_*_compress lämnade oväntad kod %d" #: src/libdar/compressor.cpp:774 msgid "data corruption detected: Incoherence in LZO compressed data" msgstr "förstörda data hittade: oegentligheter i LZO-komprimerade data" #: src/libdar/compressor.cpp:778 src/libdar/compressor.cpp:800 #: src/libdar/compressor.cpp:814 msgid "compressed data corruption detected" msgstr "förstörda komprimerade data hittade" #: src/libdar/compressor.cpp:788 msgid "data corruption detected: Too large block of compressed data" msgstr "förstörda data hittade: För stort block med komprimerade data" #: src/libdar/compressor.cpp:790 msgid "" "Too large block of compressed data: Either due to data corruption or current " "system limitation where SSIZE_MAX value implied smaller buffers than required" msgstr "" "För stort block med komprimerade data: antingen pÃ¥ grund av förstörda data " "eller begränsningar i aktuellt system där SSIZE_MAX-värdet indikerar mindre " "buffertar än begärda" #: src/libdar/tronc.cpp:256 src/libdar/tronc.cpp:298 msgid "Cannot skip to the current position in \"tronc\"" msgstr "Kan ej hoppa till aktuell position i \"tronc\"" #: src/libdar/tronc.cpp:310 msgid "Tried to write out of size limited file" msgstr "Försökte skriva 'out of size' begränsad fil" #: src/libdar/deci.cpp:46 msgid "invalid decimal digit" msgstr "ogiltig decimalsiffra" #: src/libdar/deci.cpp:132 msgid "an empty string is an invalid argument" msgstr "en tom sträng är ett ogiltigt argument" #: src/libdar/cat_etoile.cpp:44 msgid "Hard links of directories are not supported" msgstr "HÃ¥rda länkar för mappar stöds ej" #: src/libdar/secu_string.cpp:102 src/libdar/secu_string.cpp:141 msgid "Error while reading data for a secure memory:" msgstr "Fel vid läsning av data frÃ¥n säkert minne:" #: src/libdar/secu_string.cpp:119 #, fuzzy msgid "appending data over secure_memory its end" msgstr "Fel vid läsning av data frÃ¥n säkert minne:" #: src/libdar/secu_string.cpp:133 #, fuzzy msgid "appending data after the end of a secure_memory" msgstr "Fel vid läsning av data frÃ¥n säkert minne:" #: src/libdar/secu_string.cpp:136 msgid "Cannot receive that much data in regard to the allocated memory" msgstr "kan ej ta emot sÃ¥ mycket data med hänsyn till allokerat minne" #: src/libdar/secu_string.cpp:154 msgid "Cannot reduce the string to a size that is larger than its current size" msgstr "" "Kan ej reducera en sträng till en storlek, som är större än dess aktuella " "storlek" #: src/libdar/secu_string.cpp:164 msgid "secu_string randomization requested exceeds storage capacity" msgstr "" #: src/libdar/secu_string.cpp:177 msgid "Out of range index requested for a secu_string" msgstr "" #: src/libdar/generic_file.cpp:145 #, fuzzy msgid "Reading ahead a write only generic_file" msgstr "Läsning av en 'write only generic_file'" #: src/libdar/generic_file.cpp:160 msgid "Reading a write only generic_file" msgstr "Läsning av en 'write only generic_file'" #: src/libdar/generic_file.cpp:170 msgid "Writing to a read only generic_file" msgstr "Skrivning till en 'read only generic-file'" #: src/libdar/generic_file.cpp:340 msgid "Cannot compare files in write only mode" msgstr "Kan ej jämföra filer i skrivmod" #: src/libdar/generic_file.cpp:427 msgid "Cannot sync write on a read-only generic_file" msgstr "Kan ej synka skrivning med en skrivskyddad allmän fil." #: src/libdar/generic_file.cpp:438 #, fuzzy msgid "Cannot flush read a write-only generic_file" msgstr "Kan ej synka skrivning med en skrivskyddad allmän fil." #: src/libdar/tools.cpp:220 msgid "Not a zero terminated string in file" msgstr "Inte en nollavslutad sträng i fil" #: src/libdar/tools.cpp:258 #, c-format msgid "Cannot get file size: %s" msgstr "Kan inte hämta filstorlek: %s" #: src/libdar/tools.cpp:344 #, fuzzy msgid "Cannot read \"fcntl\" file's flags : " msgstr "Kan ej läsa fcntl-fils flaggor : " #: src/libdar/tools.cpp:350 #, fuzzy msgid "Cannot set \"fcntl\" file's flags : " msgstr "Kan sätta fcntl-fils flaggor : " #: src/libdar/tools.cpp:490 src/libdar/tools.cpp:495 #: src/dar_suite/line_tools.cpp:1139 src/dar_suite/line_tools.cpp:1144 #, fuzzy msgid "Invalid number: " msgstr "Ogiltigt arkivnummer: " #: src/libdar/tools.cpp:529 src/libdar/tools.cpp:620 msgid "Error while calling fork() to launch dar: " msgstr "Fel vid anrop av fork() för att starta dar: " #: src/libdar/tools.cpp:543 src/libdar/tools.cpp:650 msgid "Unexpected error while waiting for dar to terminate: " msgstr "Oväntat fel vid väntan pÃ¥ att dar avslutas: " #: src/libdar/tools.cpp:549 src/libdar/tools.cpp:656 msgid "DAR terminated upon signal reception: " msgstr "DAR avslutat vid signalmottagande: " #: src/libdar/tools.cpp:555 src/libdar/tools.cpp:662 msgid " . Retry to launch dar as previously ?" msgstr " . Försök att starta dar pÃ¥ samma sätt som förut ?" #: src/libdar/tools.cpp:560 src/libdar/tools.cpp:567 src/libdar/tools.cpp:667 #: src/libdar/tools.cpp:674 msgid " Continue anyway ?" msgstr " Fortsätta likväl ?" #: src/libdar/tools.cpp:565 src/libdar/tools.cpp:672 msgid "DAR sub-process has terminated with exit code " msgstr "DAR underprocess har avslutats med exitkod " #: src/libdar/tools.cpp:770 #, fuzzy msgid "nullptr argument given to tools_readlink()" msgstr "Tomt argument givet till 'tools_readlink'" #: src/libdar/tools.cpp:772 #, fuzzy msgid "Empty string given as argument to tools_readlink()" msgstr "Tom sträng givet som argument till 'tools_readlink'" #: src/libdar/tools.cpp:799 #, c-format msgid "Cannot read file information for %s : %s" msgstr "Kan ej läsa filinformation för %s : %s" #: src/libdar/tools.cpp:886 #, fuzzy msgid "Cannot set birth time: " msgstr "Kan inte hämta mtime: %s" #: src/libdar/tools.cpp:916 msgid "Cannot set last access and last modification time: " msgstr "Kan ej sätta senaste tillgÃ¥ngtid och senaste modifieringstid: " #: src/libdar/tools.cpp:994 msgid "Aborting program: child process died unexpectedly" msgstr "" #: src/libdar/tools.cpp:1002 #, fuzzy, c-format msgid "Error trying to run %s: %s" msgstr "Fel vid räddning av EA-lista för %s : %s" #: src/libdar/tools.cpp:1005 #, fuzzy msgid "execvp() failed but did not returned error code" msgstr "execvp misslyckades, men gav ingen felkod" #: src/libdar/tools.cpp:1136 #, c-format msgid "Removing file %s" msgstr "Tar bort fil %s" #: src/libdar/tools.cpp:1157 #, fuzzy, c-format msgid "Error removing file %s: %S" msgstr "Fel vid filöppning %s : %s" #: src/libdar/tools.cpp:1191 #, c-format msgid "" "Overwriting not allowed while a slice of a previous archive with the same " "basename has been found in the %s directory, Operation aborted" msgstr "" "Överskrivning ej tillÃ¥ten dÃ¥ en del av ett tidigare arkiv med samma basnamn " "har hittats i %s-mappen. Ã…tgärden avbruten" #: src/libdar/tools.cpp:1197 #, fuzzy, c-format msgid "" "At least one slice of an old archive with the same name remains in the " "directory %s. It is advised to remove all the old archive's slices before " "creating an archive of same name. Can I remove these old slices?" msgstr "" "Ã…tminstone en del av ett gammalt arkiv med samma basnamn Ã¥terstÃ¥r i mappen " "%s . Om du inte tar bort denna först, kommer du att fÃ¥ svÃ¥righeter att " "identifiera den sista delen av det arkiv som du ar pÃ¥ väg att skapa, ty den " "kan bli gömd bland delarna i detta gamla arkiv. Skall vi ta bort det gamla " "arkivets delar först ?" #: src/libdar/tools.cpp:1217 src/libdar/tools.cpp:1224 #, fuzzy, c-format msgid "Cannot get inode information for %s: %s" msgstr "Kan ej fÃ¥ inodinformation för %s : %s" #: src/libdar/tools.cpp:1234 msgid "Current Working Directory cannot be a relative path" msgstr "Aktuell arbetsmapp kan ej vara en relativ sökväg" #: src/libdar/tools.cpp:1251 msgid "Cannot block signals: " msgstr "Kan ej blockera signal: " #: src/libdar/tools.cpp:1261 msgid "Cannot unblock signals: " msgstr "Kan ej frisläppa signal: " #: src/libdar/tools.cpp:1292 src/libdar/tools.cpp:1325 #, fuzzy, c-format msgid "Cannot get last modification date: %s" msgstr "Kan ej sätta senaste tillgÃ¥ngtid och senaste modifieringstid: " #: src/libdar/tools.cpp:1329 #, c-format msgid "Cannot get size of %S: not a plain file" msgstr "" #: src/libdar/tools.cpp:1380 #, c-format msgid "Unknown suffix [%c] in string %S" msgstr "Okänt suffix [%c] i sträng %S" #: src/libdar/tools.cpp:1407 msgid "Unknown substitution string: %" msgstr "Okänd utbytessträng :%" #: src/libdar/tools.cpp:1414 #, fuzzy msgid "" "last char of user command-line to execute is '%', (use '%%' instead to avoid " "this message)" msgstr "" "sista tecknet i användarkommandoraden är '%', (använd '%%' i stället sÃ¥ " "undviks detta meddelande). Ignorera det och fortsätta ?" #: src/libdar/tools.cpp:1470 msgid "execve() failed. (process table is full ?)" msgstr "execve() misslyckades. (full processtabell ?)" #: src/libdar/tools.cpp:1472 msgid "system() call failed: " msgstr "system()-anrop misslyckades: " #: src/libdar/tools.cpp:1474 #, c-format msgid "execution of [ %S ] returned error code: %d" msgstr "utförande av [ %S ] resulterade i felkod: %d" #: src/libdar/tools.cpp:1481 msgid "Error during user command line execution: " msgstr "Fel vid utförande av användarkommandoradsexekvering: " #: src/libdar/tools.cpp:1481 msgid " . Retry command-line ?" msgstr " . Försöka med kommandoraden igen ?" #: src/libdar/tools.cpp:1486 msgid "Ignore previous error on user command line and continue ?" msgstr "Ignorera föregÃ¥ende fel frÃ¥n användarkommandorad och fortsätta ?" #: src/libdar/tools.cpp:1528 msgid "Fatal error on user command line: " msgstr "Ödesdigert fel i användarkommandorad: " #: src/libdar/tools.cpp:1610 src/libdar/tools.cpp:1616 #, fuzzy msgid "Badly formated octal number" msgstr "DÃ¥ligt formaterad databas" #: src/libdar/tools.cpp:1721 #, fuzzy msgid "Cannot get effective permission given a file descriptor: " msgstr "Kan ej Ã¥terställe behörigheter för %s : %s" #: src/libdar/tools.cpp:1737 #, c-format msgid "Error while setting file permission: %s" msgstr "Fel vid tilldelning av en fils egenskaper:%s" #: src/libdar/tools.cpp:1758 #, fuzzy msgid "An empty string is not a valid user name" msgstr "En tom sträng är ej en giltig sökväg" #: src/libdar/tools.cpp:1796 src/libdar/tools.cpp:1818 #, fuzzy msgid "Unknown user" msgstr "Okänd användare: %S" #: src/libdar/tools.cpp:1798 src/libdar/tools.cpp:1820 #, fuzzy, c-format msgid "Error found while looking for UID of user %s: %S" msgstr "Fel vid anrop av fork() för att starta dar: " #: src/libdar/tools.cpp:1828 src/libdar/tools.cpp:1922 msgid "" "Cannot convert username to uid in statically linked binary, either directly " "provide the UID or run libdar from a dynamically linked executable" msgstr "" #: src/libdar/tools.cpp:1853 #, fuzzy msgid "An empty string is not a valid group name" msgstr "En tom sträng är ej en giltig sökväg" #: src/libdar/tools.cpp:1889 src/libdar/tools.cpp:1912 #, fuzzy msgid "Unknown group" msgstr "Okänd användare: %S" #: src/libdar/tools.cpp:1891 #, c-format msgid "Error found while looking fo GID of group %s: %S" msgstr "" #: src/libdar/tools.cpp:1914 #, fuzzy, c-format msgid "Error found while looking for GID of group %s: %S" msgstr "Fel vid anrop av fork() för att starta dar: " #: src/libdar/tools.cpp:1951 #, c-format msgid "Error while setting file user ownership: %s" msgstr "Fel vid inställning av ägare till fil: %s" #: src/libdar/tools.cpp:2077 src/dar_suite/line_tools.cpp:1497 msgid "Cannot get full path of current working directory: " msgstr "Kan ej fÃ¥ komplett sökväg till gällande arbetsmapp: " #: src/libdar/tools.cpp:2110 #, fuzzy msgid "Worse" msgstr "[Värre]" #: src/libdar/tools.cpp:2130 #, c-format msgid "Error code %d to message conversion failed" msgstr "" #: src/libdar/tools.cpp:2185 #, fuzzy msgid "Invalid wide-char found in string: " msgstr "Ogiltigt tal i sträng: %S" #: src/libdar/tools.cpp:2298 #, fuzzy, c-format msgid "Error unlinking %S: %s" msgstr "Fel vid öppning av %s : " #: src/libdar/tools.cpp:2345 msgid "" "Cannot convert negative floating point value to unsigned (positive) integer" msgstr "" #: src/libdar/archive_options.cpp:169 msgid "Cannot get catalogue of reference as it has not been provided" msgstr "Kan ej fÃ¥ referenskatalog dÃ¥ den ej har lämnats" #: src/libdar/archive_options.cpp:187 msgid "Error, catalogue of reference has not been provided" msgstr "Fel, referenskatalog har aj lämnats" #: src/libdar/archive_options.cpp:1518 src/libdar/archive_options.cpp:1525 msgid "No mask available" msgstr "" #: src/libdar/get_version.cpp:142 msgid "Initialization problem for liblzo2 library" msgstr "Initieringsproblem för liblzo2-biblioteket" #: src/libdar/get_version.cpp:158 #, c-format msgid "Error while activating libgcrypt's memory guard: %s/%s" msgstr "Fel vid aktivering av 'libgcrypts' minnesskydd: %s/%s" #: src/libdar/get_version.cpp:165 src/libdar/get_version.cpp:183 #, fuzzy, c-format msgid "Too old version for libgcrypt, minimum required version is %s" msgstr "För gammal version av libgcrypt, som minst krävs version %s\n" #: src/libdar/get_version.cpp:174 #, c-format msgid "Error while telling libgcrypt that initialization is finished: %s/%s" msgstr "" "Fel vid meddelande till 'libgcrypt' att initialiseringen är avslutad: %s/%s" #: src/libdar/get_version.cpp:179 msgid "libgcrypt not initialized and libdar not allowed to do so" msgstr "'libgcrypt' är ej initialiserad och 'libdar' ej tillÃ¥ten att göra sÃ¥" #: src/libdar/get_version.cpp:194 #, c-format msgid "GPGME version requirement is not satisfied, requires version > %s" msgstr "" #: src/libdar/get_version.cpp:198 #, c-format msgid "GPGME engine not available: %s" msgstr "" #: src/libdar/get_version.cpp:208 src/libdar/get_version.cpp:212 #: src/libdar/get_version.cpp:214 #, fuzzy, c-format msgid "libcurl initialization failed: %s" msgstr "Minnestilldelning misslyckades : %s" #: src/libdar/i_archive.cpp:110 #, fuzzy, c-format msgid "Opening archive %s ..." msgstr "Läser arkivinnehÃ¥ll..." #: src/libdar/i_archive.cpp:140 msgid "header only mode asked" msgstr "" #: src/libdar/i_archive.cpp:153 #, c-format msgid "" "Opening the archive of reference %s to retreive the isolated catalog ... " msgstr "" #: src/libdar/i_archive.cpp:164 msgid "" "Reading the archive of reference from pipe or standard input is not possible" msgstr "" "Läsning av referensarkivet frÃ¥n ett rör eller standard input är ej möjligt" #: src/libdar/i_archive.cpp:166 msgid "" "The basename '+' is reserved for special a purpose that has no meaning in " "this context" msgstr "" "Basnamnet '+' är reserverat för speciella ändamÃ¥l, som inte har nÃ¥gon mening " "i detta sammanhang" #: src/libdar/i_archive.cpp:209 msgid "Error while opening the archive of reference: " msgstr "Fel vid öppnande av referensarkivet: " #: src/libdar/i_archive.cpp:224 msgid "Loading isolated catalogue in memory..." msgstr "" #: src/libdar/i_archive.cpp:236 msgid "" "Archive of reference is not signed properly (no the same signatories for the " "archive and the internal catalogue), do we continue?" msgstr "" #: src/libdar/i_archive.cpp:243 msgid "" "The archive and the isolated catalogue do not correspond to the same data, " "they are thus incompatible between them" msgstr "" "Arkivert och den isolerade katalogen mostavar inte samma data, de är sÃ¥lunda " "inkompatibla med varann" #: src/libdar/i_archive.cpp:259 #, fuzzy msgid "Loading catalogue into memory..." msgstr "Katalog saknas i fil." #: src/libdar/i_archive.cpp:284 msgid "" "No data found in that archive, sequentially reading the catalogue found at " "the end of the archive..." msgstr "" #: src/libdar/i_archive.cpp:314 msgid "" "The catalogue will be filled while sequentially reading the archive, " "preparing the data structure..." msgstr "" #: src/libdar/i_archive.cpp:354 #, c-format msgid "" "LAX MODE: The end of the archive is corrupted, cannot get the archive " "contents (the \"catalogue\")" msgstr "" "LAX MODE: Slutet pÃ¥ arkivet är förstört, kan ej hämta arkivinnehÃ¥llet " "(\"katalogen\")" #: src/libdar/i_archive.cpp:355 msgid "" "LAX MODE: Do you want to bypass some sanity checks and try again reading the " "archive contents (this may take some time, this may also fail)?" msgstr "" "LAX MODE: vill du hoppa över nÃ¥gra vettighetskontroller och försöka igen att " "läsa arkivinnehÃ¥llet (detta kan lite tid, detta kan även misslyckas)?" #: src/libdar/i_archive.cpp:370 #, c-format msgid "" "LAX MODE: Could not find a whole catalogue in the archive. If you have an " "isolated catalogue, stop here and use it as backup of the internal " "catalogue, else continue but be advised that all data will not be able to be " "retrieved..." msgstr "" "LAX MODE: Kunde inte hitta hela katalogen i arkivet. Om du har en isolerad " "katalog, stanna här och använd den som säkerhetskopia för den interna " "katalogen, i annat fall fortsätt, men kom ihÃ¥g att alla data inte kan " "Ã¥terställas..." #: src/libdar/i_archive.cpp:371 msgid "" "LAX MODE: Do you want to try finding portions of the original catalogue if " "some remain (this may take even more time and in any case, it will only " "permit to recover some files, at most)?" msgstr "" "LAX MODE: vill du försöka finna delar av originalkatalogen om nÃ¥got Ã¥terstÃ¥r " "(detta kan ta mycket tid och in vilket fall som helst kommer bara nÃ¥gra " "filer, som mest, att Ã¥terställas.)?" #: src/libdar/i_archive.cpp:549 src/libdar/i_archive.cpp:1947 msgid "Compression_level must be between 1 and 9 included" msgstr "KompressionsnivÃ¥n mÃ¥ste ligga mellan 1 och 9, inklusive" #: src/libdar/i_archive.cpp:551 src/libdar/i_archive.cpp:1949 msgid "" "\"first_file_size\" cannot be different from zero if \"file_size\" is equal " "to zero" msgstr "" "\"första_filstorleken\" kan inte vara skild frÃ¥n noll om \"filstorlek\" är " "lika med noll" #: src/libdar/i_archive.cpp:553 src/libdar/i_archive.cpp:1951 msgid "Crypto block size must be greater than 10 bytes" msgstr "Krypteringsblockstorlek mÃ¥ste vara större än 10 byte" #: src/libdar/i_archive.cpp:581 #, fuzzy msgid "Both reference archive are nullptr, cannot merge archive from nothing" msgstr "Bägge referensarkiv är NULL, kan ej sammanfoga frÃ¥n ingenting" #: src/libdar/i_archive.cpp:613 msgid "" "the \"Keep file compressed\" feature is not possible when merging two " "archives using different compression algorithms (This is for a future " "version of dar). You can still merge these two archives but without keeping " "file compressed (thus you will probably like to use compression (-z or -y " "options) for the resulting archive" msgstr "" "Egenskapen \"BehÃ¥lla fil komprimerad\" är inte möjlig när man slÃ¥r ihop tvÃ¥ " "arkiv med olika komprimeringsalgoritmer. (Detta är för en kommande version " "av dar). Du kan fortfarande slÃ¥ ihop dessa tvÃ¥ arkiv men utan att behÃ¥lla " "filer komprimerade (med ändÃ¥ vill du kanske använda komprimering (-z eller -" "y-alternativ) för det resulterande arkivet" #: src/libdar/i_archive.cpp:637 msgid "" "Cannot calculate delta signature when merging if keep compressed is asked" msgstr "" #: src/libdar/i_archive.cpp:639 msgid "" "To calculate delta signatures of files saved as sparse files, you need to " "activate sparse file detection mechanism with merging operation" msgstr "" #: src/libdar/i_archive.cpp:874 #, c-format msgid "" "Archive repairing completed. WARNING! it is strongly advised to test the " "resulting archive before removing the damaged one" msgstr "" #: src/libdar/i_archive.cpp:901 src/libdar/i_archive.cpp:1309 msgid "This archive is not exploitable, check documentation for more" msgstr "" "Detta arkiv kan inte utnyttjas, kontrollera dokumentationen för mer " "information" #: src/libdar/i_archive.cpp:915 msgid "" "File ownership will not be restored du to the lack of privilege, you can " "disable this message by asking not to restore file ownership" msgstr "" #: src/libdar/i_archive.cpp:964 msgid "Error while restoring data: " msgstr "Fel vid Ã¥terställande av data. " #: src/libdar/i_archive.cpp:992 #, fuzzy, c-format msgid "Catalogue size in archive : %i bytes" msgstr "Katalogstorlek i arkiv : %i byte\n" #: src/libdar/i_archive.cpp:994 #, fuzzy, c-format msgid "Catalogue size in archive : N/A" msgstr "Katalogstorlek i arkiv : %i byte\n" #: src/libdar/i_archive.cpp:1000 #, fuzzy, c-format msgid "Archive is composed of %i file(s)" msgstr "Arkivet bestÃ¥r av %i fil(er)\n" #: src/libdar/i_archive.cpp:1004 #, fuzzy, c-format msgid "File size: %i bytes" msgstr "Filstorlek: %i byte\n" #: src/libdar/i_archive.cpp:1013 #, fuzzy, c-format msgid "First file size : %i bytes" msgstr "Första filens storlek : %i byte\n" #: src/libdar/i_archive.cpp:1014 #, fuzzy, c-format msgid "File size : %i bytes" msgstr "Filstorlek : %i byte\n" #: src/libdar/i_archive.cpp:1015 #, fuzzy, c-format msgid "Last file size : %i bytes" msgstr "Sista filens storlek : %i byte\n" #: src/libdar/i_archive.cpp:1016 #, fuzzy, c-format msgid "Archive total size is : %i bytes" msgstr "Arkivets totala storlek är : %i byte\n" #: src/libdar/i_archive.cpp:1024 #, fuzzy, c-format msgid "Archive size is: %i bytes" msgstr "Arkivstorlek är: %i byte\n" #: src/libdar/i_archive.cpp:1025 #, fuzzy, c-format msgid "Previous archive size does not include headers present in each slice" msgstr "" "FöregÃ¥ende arkivstorlek inbegriper inte det huvud, som finns i varje del\n" #: src/libdar/i_archive.cpp:1028 #, fuzzy, c-format msgid "Archive size is unknown (reading from a pipe)" msgstr "Detta arkiv har delar och har ej möjlighet att bli lästa frÃ¥n ett rör" #: src/libdar/i_archive.cpp:1034 #, c-format msgid "" "The overall archive size includes %i byte(s) wasted due to bad compression " "ratio" msgstr "" #: src/libdar/i_archive.cpp:1039 msgid "The global data compression ratio is: " msgstr "" #: src/libdar/i_archive.cpp:1046 #, fuzzy, c-format msgid "" "\n" "WARNING! This archive only contains the catalogue of another archive, it can " "only be used as reference for differential backup or as rescue in case of " "corruption of the original archive's content. You cannot restore any data " "from this archive alone\n" msgstr "" "\n" "VARNING! Detta arkiv innehÃ¥ller bara innehÃ¥llet av ett annat arkiv, det kan " "endast användas som referense vid differentiell säkerhetskopiering eller som " "räddning vid förstört innehÃ¥ll i originalarkivet. Du kan Ã¥terställa alla " "data frÃ¥n detta arkiv ensamt\n" #: src/libdar/i_archive.cpp:1072 src/libdar/i_archive.cpp:1387 msgid "" "This archive is not exploitable, check the archive class usage in the API " "documentation" msgstr "" "Detta arkiv kan inte utnyttjas, kontrollera, hur man använder arkivklassen, " "i API-dokumentationen" #: src/libdar/i_archive.cpp:1157 src/libdar/i_archive.cpp:1637 #: src/libdar/i_archive.cpp:1670 msgid "" "Fetching EA value while listing an archive is not possible in sequential " "read mode" msgstr "" #: src/libdar/i_archive.cpp:1163 msgid "slicing focused output is not available in sequential-read mode" msgstr "" #: src/libdar/i_archive.cpp:1171 #, c-format msgid "" "Using user provided modified slicing (first slice = %i bytes, other slices = " "%i bytes)" msgstr "" #: src/libdar/i_archive.cpp:1174 msgid "" "No slice layout of the archive of reference for the current isolated " "catalogue is available, cannot provide slicing information, aborting" msgstr "" #: src/libdar/i_archive.cpp:1247 #, c-format msgid "Exception caught from archive_listing_callback execution: %s" msgstr "" #: src/libdar/i_archive.cpp:1252 msgid "Exception caught from archive_listing_callback execution" msgstr "" #: src/libdar/i_archive.cpp:1281 msgid "Error while listing archive contents: " msgstr "Fel vid listning av arkivinnehÃ¥ll: " #: src/libdar/i_archive.cpp:1357 msgid "Error while comparing archive with filesystem: " msgstr "Fel vid jämförelse mellan arkiv och filsystem: " #: src/libdar/i_archive.cpp:1399 msgid "" "WARNING! This is an isolated catalogue, no data or EA is present in this " "archive, only the catalogue structure can be checked" msgstr "" "VARNING! Detta är en isolerad katalog, inga data eller EA finns i detta " "arkiv, endast katalogstrukturen kan kontrolleras" #: src/libdar/i_archive.cpp:1433 msgid "A problem occurred while reading this archive contents: " msgstr "Ett problem inträffade vid läsning av innehÃ¥let i detta arkiv: " #: src/libdar/i_archive.cpp:1448 msgid "Error while testing archive: " msgstr "Fel vid arkivtest: " #: src/libdar/i_archive.cpp:1497 msgid "" "Isolation with delta signature is not possible on a just created archive (on-" "fly isolation)" msgstr "" #: src/libdar/i_archive.cpp:1954 msgid "" "nodump flag feature has not been activated at compilation time, it is thus " "not available" msgstr "" "--nodump-funktionen har ej aktiverats vid kompileringstillfället, sÃ¥lunda är " "den ej tillgängligt" #: src/libdar/i_archive.cpp:1981 msgid "" "Cannot sequentially read an archive of reference when delta signature or " "delta patch is requested" msgstr "" #: src/libdar/i_archive.cpp:2017 #, c-format msgid "" "WARNING! The archive is located in the directory to backup, this may create " "an endless loop when the archive will try to save itself. You can either add " "-X \"%S.*.%S\" on the command line, or change the location of the archive " "(see -h for help). Do you really want to continue?" msgstr "" "Varning! Arkivet är placerat i backupmappen, detta kan skapa en ändlös " "slinga när arkivet försöker spara sig själv. Du kan antingen lägga till -X " "\"%S.*.%S\" till kommandoraden eller ändra platsen för arkivet. (se -h för " "hjälp) Vill du verkligen fortsätta?" #: src/libdar/i_archive.cpp:2203 msgid "Ready to start writing down the archive?" msgstr "Klart att börja skriva till arkivet?" #: src/libdar/i_archive.cpp:2249 msgid "Building the catalog object..." msgstr "" #: src/libdar/i_archive.cpp:2267 #, c-format msgid "Error while fetching information for %S: " msgstr "Fel vid hämtning av information för %S: " #: src/libdar/i_archive.cpp:2320 msgid "Processing files for backup..." msgstr "" #: src/libdar/i_archive.cpp:2383 #, fuzzy msgid "Processing files for merging..." msgstr "Kontrollerar filers datumordning..." #: src/libdar/i_archive.cpp:2415 #, fuzzy msgid "Processing files for fixing..." msgstr "Kontrollerar filers datumordning..." #: src/libdar/i_archive.cpp:2497 msgid "" "Adding reference to files that have been destroyed since reference backup..." msgstr "" "Lägger till hänvisning till filer, som har förstörts sedan " "referensbackuppen..." #: src/libdar/i_archive.cpp:2552 msgid "Error while saving data: " msgstr "Fel vid datasparande: " #: src/libdar/i_archive.cpp:2578 msgid "WARNING! Incorrect signature found for archive, continue anyway?" msgstr "" #: src/libdar/i_archive.cpp:2621 msgid "" "Cannot get data name of the archive, this archive is not completely " "initialized" msgstr "" "Kan ej hämta datanamn för arkivet, detta arkiv är ej helt initialiserat" #: src/libdar/i_archive.cpp:2646 msgid "" "This archive contains an isolated catalogue, it cannot be used for this " "operation. It can only be used as reference for a incremental/differential " "backup or as backup of the original archive's catalogue" msgstr "" "Detta arkiv innehÃ¥ller en isolerad katalog, det kan ej användas för denna " "Ã¥tgärd. Det kan bara användas som referens vid inkrementell/differentiell " "säkerhetskopiering eller som säkerhetskopiering av det ursprungliga arkivets " "katalog" #: src/libdar/i_archive.cpp:2653 msgid "" "LAX MODE: Archive seems to be only an isolated catalogue (no data in it), " "Can I assume data corruption occurred and consider the archive as being a " "real archive?" msgstr "" "LAX MODE: Arkivet verkar utgöras av bara en isolerad katalog (inga data i " "det). Kan det antagas att data förstörts och kan det arkivet betraktas som " "ett riktigt arkiv?" #: src/libdar/i_archive.cpp:2658 msgid "Error while fetching archive properties: " msgstr "Fel vid hämtning av arkivegenskaper: " #: src/libdar/i_archive.cpp:2683 #, fuzzy msgid "Sorry, file size is unknown at this step of the program." msgstr "Tyvärr, filstorleken är okänd vid detta stadium i programmet.\n" #: src/libdar/i_archive.cpp:2782 #, c-format msgid "" "libgcrypt version < %s. Ligcrypt used has a bug that leads md5 and sha1 hash " "results to be erroneous for files larger than 256 Gio (gibioctet), do you " "really want to spend CPU cycles calculating a useless hash?" msgstr "" #: src/libdar/data_tree.cpp:94 msgid "reached End of File before all expected data could be read" msgstr "nÃ¥dde filslut innan alla väntade data kunde läsas" #: src/libdar/data_tree.cpp:119 msgid "Unexpected value found in database" msgstr "Oväntat värde hitta i databas" #: src/libdar/data_tree.cpp:928 #, c-format msgid "" "Dates of file's %S are not increasing when database's archive number grows. " "Concerned file is: %S" msgstr "" "Fils datum %S ökar ej när databasens arkivnummer växer. Berörd fil är: %S" #: src/libdar/data_tree.cpp:931 msgid "" "Dates are not increasing for all files when database's archive number grows, " "working with this database may lead to improper file's restored version. " "Please reorder the archive within the database in the way that the older is " "the first archive and so on up to the most recent archive being the last of " "the database" msgstr "" "datum är ej i stigande ordning för alla filer, när databasens arkivnummer " "ökar, arbete med denna databas kan leda till felaktig version för Ã¥terställd " "fil. Ordna om arkivet inom databasen sÃ¥ att det äldsta är det första arkivet " "och det senaste är det sista i databasen" #: src/libdar/data_tree.cpp:934 msgid "Do you want to ignore the same type of error for other files?" msgstr "Vill du ignorera samma feltyp för andra filer?" #: src/libdar/slave_zapette.cpp:66 msgid "Input cannot be read" msgstr "'Input' kan ej läsas" #: src/libdar/slave_zapette.cpp:68 msgid "Cannot write to output" msgstr "Kan ej skriva till 'output'" #: src/libdar/slave_zapette.cpp:70 msgid "Data should be read-only" msgstr "Data bör vara endast läsning" #: src/libdar/slave_zapette.cpp:146 msgid "Cannot skip at end of file" msgstr "Kan ej hoppa över vid filslut" #: src/libdar/slave_zapette.cpp:199 msgid "Received unknown special order" msgstr "Mottaget okänd specialorder" #: src/libdar/delta_sig_block_size.cpp:66 msgid "Invalid divisor used for delta signature block len calculation" msgstr "" #: src/libdar/delta_sig_block_size.cpp:68 msgid "" "minimum size should be lesser or equal than maximum size when specifying " "delta signature block size formula" msgstr "" #: src/libdar/filesystem_specific_attribute.hpp:273 msgid "true" msgstr "" #: src/libdar/filesystem_specific_attribute.hpp:273 msgid "false" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:233 #: src/libdar/filesystem_specific_attribute.cpp:1166 msgid "invalid length for FSA family flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:239 msgid "invalid length for FSA nature flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:716 msgid "Failed setting (opening) extX family FSA: " msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:729 #, fuzzy msgid "Failed reading existing extX family FSA: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/libdar/filesystem_specific_attribute.cpp:758 #: src/libdar/filesystem_specific_attribute.cpp:773 #: src/libdar/filesystem_specific_attribute.cpp:788 #: src/libdar/filesystem_specific_attribute.cpp:803 #: src/libdar/filesystem_specific_attribute.cpp:824 #: src/libdar/filesystem_specific_attribute.cpp:840 #: src/libdar/filesystem_specific_attribute.cpp:855 #: src/libdar/filesystem_specific_attribute.cpp:870 #: src/libdar/filesystem_specific_attribute.cpp:885 #: src/libdar/filesystem_specific_attribute.cpp:900 #: src/libdar/filesystem_specific_attribute.cpp:915 #: src/libdar/filesystem_specific_attribute.cpp:930 #, c-format msgid "" "Warning: FSA %s/%s support has not been found at compilation time, cannot " "restore it for inode %s" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:975 msgid "Failed set extX family FSA: " msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1002 #, c-format msgid "" "Not setting FSA extX IMMUTABLE flags for %s due to of lack of capability" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1030 #, c-format msgid "" "Not setting FSA extX SYSTEM RESOURCE flags for %s due to of lack of " "capability" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1048 #, c-format msgid "" "Warning! %s Filesystem Specific Attribute support have not been activated at " "compilation time and could not be restored for %s" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1068 #, c-format msgid "" "Birth Time attribute cannot be restored for %s because no FSA familly able " "to carry that attribute could be activated at compilation time." msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1172 #: src/libdar/filesystem_specific_attribute.cpp:1173 msgid "invalid FSA family flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1207 #: src/libdar/filesystem_specific_attribute.cpp:1209 msgid "invalid FSA nature flag" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1233 msgid "Unexepected value for boolean FSA, data corruption may have occurred" msgstr "" #: src/libdar/filesystem_specific_attribute.cpp:1237 #, fuzzy msgid "Error while reading FSA: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/libdar/op_tools.cpp:48 src/libdar/op_tools.cpp:119 #: src/libdar/op_tools.cpp:199 #, fuzzy, c-format msgid "" "Conflict found while selecting the file to retain in the resulting archive:" msgstr "" "Konflikt funnen vid väljande av fil att behÃ¥lla i det resulterande arkivet:" #: src/libdar/op_tools.cpp:49 #, c-format msgid "User Decision requested for data of file %S" msgstr "Användarbeslut begärt för data hos fil %S" #: src/libdar/op_tools.cpp:52 msgid "" "\n" "Your decision about file's data:\n" "[P]reserve\n" "[O]verwrite\n" "mark [S]aved and preserve\n" "mark saved and overwri[T]e\n" "[R]emove\n" "[*] keep undefined\n" "[A]bort\n" " Your Choice? " msgstr "" "\n" "Ditt beslut om fildata:\n" "[P] bevara\n" "[O] skriva över\n" "[S] märk som sparad och bevara\n" "[T] märk som sparad och överskriv\n" "[R] tag bort\n" "[*] behÃ¥ll odefinierad\n" "[A] avbryt\n" " Ditt val? " #: src/libdar/op_tools.cpp:54 src/libdar/op_tools.cpp:125 #: src/libdar/op_tools.cpp:205 msgid "" "Please answer by the character between brackets ('[' and ']') and press " "return" msgstr "" "Svara med de tecken mellan hakparenteser ('[' och ']') och tryck vagnretur" #: src/libdar/op_tools.cpp:84 src/libdar/op_tools.cpp:163 #: src/libdar/op_tools.cpp:231 #, c-format msgid "" "Warning, are you sure you want to abort (please answer \"%S\" to confirm)? " msgstr "" "Varning, är du säker du vill avbryta (svara med \"%S\" för att bekräfta)?" #: src/libdar/op_tools.cpp:88 src/libdar/op_tools.cpp:167 #: src/libdar/op_tools.cpp:235 msgid "Cancellation no confirmed" msgstr "Avbrott ej bekräftat" #: src/libdar/op_tools.cpp:91 src/libdar/op_tools.cpp:170 #: src/libdar/op_tools.cpp:238 msgid "Unknown choice: " msgstr "Okänt val: " #: src/libdar/op_tools.cpp:120 #, c-format msgid "User Decision requested for EA of file %S" msgstr "Användarbeslut begärt för EA hos fil %S" #: src/libdar/op_tools.cpp:123 msgid "" "\n" "Your decision about file's EA:\n" "[p]reserve\n" "[o]verwrite\n" "mark [s]aved and preserve\n" "mark saved and overwri[t]e\n" "[m]erge EA and preserve\n" "merge EA a[n]d overwrite\n" "[r]emove\n" "[*] keep undefined\n" "[a]bort\n" " Your choice? " msgstr "" "\n" "ditt beslut om filens EA:\n" "[p] bevara\n" "[o] skrivöver\n" "[s] märk som sparad och bevara\n" "[t] märk som sparad och överskriv\n" "[m] slÃ¥ ihop EA och bevara\n" "[n] slÃ¥ ihop EA och överskriv\n" "[r] tag bort\n" "[*] behÃ¥ll oförändrad\n" "[a] avbryt\n" " Ditt val? " #: src/libdar/op_tools.cpp:200 #, fuzzy, c-format msgid "User Decision requested for FSA of file %S" msgstr "Användarbeslut begärt för EA hos fil %S" #: src/libdar/op_tools.cpp:203 #, fuzzy msgid "" "\n" "Your decision about file's FSA:\n" "[p]reserve\n" "[o]verwrite\n" "mark [s]aved and preserve\n" "mark saved and overwri[t]e\n" "[*] keep undefined\n" "[a]bort\n" " Your choice? " msgstr "" "\n" "Ditt beslut om fildata:\n" "[P] bevara\n" "[O] skriva över\n" "[S] märk som sparad och bevara\n" "[T] märk som sparad och överskriv\n" "[R] tag bort\n" "[*] behÃ¥ll odefinierad\n" "[A] avbryt\n" " Ditt val? " #: src/libdar/op_tools.cpp:259 src/dar_suite/line_tools.cpp:58 #: src/dar_suite/dar_suite.cpp:279 msgid "YES" msgstr "JA" #: src/libdar/op_tools.cpp:260 src/dar_suite/line_tools.cpp:58 #: src/dar_suite/dar_suite.cpp:281 msgid "NO" msgstr "NEJ" #: src/libdar/op_tools.cpp:272 #, c-format msgid "Entry information:\t\"in place\"\t\"to be added\"" msgstr "Notis information:\t\"i plats\"\t\"att läggas till" #: src/libdar/op_tools.cpp:273 #, c-format msgid "Is inode :\t %S \t\t %S" msgstr "Är inode :\t %S \t\t %S" #: src/libdar/op_tools.cpp:274 #, c-format msgid "Is directory :\t %S \t\t %S" msgstr "Är mappar :\t %S \t\t %S" #: src/libdar/op_tools.cpp:275 #, c-format msgid "Is plain file :\t %S \t\t %S" msgstr "Är enkel fil :\t %S \t\t %S" #: src/libdar/op_tools.cpp:276 #, c-format msgid "Is hard linked :\t %S \t\t %S" msgstr "Är hÃ¥rdlänkad :\t %S \t\t %S" #: src/libdar/op_tools.cpp:277 #, c-format msgid "Entry type :\t %s \t %s" msgstr "Notistyp :\t %s \t %s" #: src/libdar/op_tools.cpp:283 msgid "me" msgstr "jag" #: src/libdar/op_tools.cpp:292 #, fuzzy, c-format msgid "Data more recent :\t %S \t\t %S" msgstr "Senare data :\t %S \t %S" #: src/libdar/op_tools.cpp:302 #, fuzzy, c-format msgid "Data size :\t %i \t\t %i" msgstr "Datastorlek :\t %i \t %i" #: src/libdar/op_tools.cpp:303 #, c-format msgid "Sparse file :\t %S \t\t %S" msgstr "Gles fil :\t %S \t\t %S " #: src/libdar/op_tools.cpp:304 #, c-format msgid "Dirty file :\t %S \t\t %S" msgstr "Smutsig fil :\t %S \t\t %S" #: src/libdar/op_tools.cpp:306 #, c-format msgid "Data full saved :\t %S \t\t %S" msgstr "Data helt sparat :\t %S \t\t %S" #: src/libdar/op_tools.cpp:307 #, c-format msgid "EA full saved :\t %S \t\t %S" msgstr "EA helt sparat :\t %S \t\t %S" #: src/libdar/op_tools.cpp:309 #, c-format msgid "EA more recent :\t %S \t\t %S" msgstr "EA senare :\t %S \t\t %S" #: src/libdar/op_tools.cpp:310 #, fuzzy, c-format msgid "FSA full saved :\t %S \t\t %S" msgstr "EA helt sparat :\t %S \t\t %S" #: src/libdar/op_tools.cpp:315 #, fuzzy, c-format msgid "FSA familly :\t %S \t\t %S" msgstr "Gles fil :\t %S \t\t %S " #: src/libdar/op_tools.cpp:324 #, c-format msgid "EA number :\t %i \t\t %i" msgstr "EA tal :\t %i \t\t %i" #: src/libdar/op_tools.cpp:327 #, fuzzy, c-format msgid "EA size :\t %i \t\t %i" msgstr "EA storlek :\t %i \t %i" #: src/libdar/mask.cpp:69 #, c-format msgid "%Sglob expression: %S [%S]" msgstr "" #: src/libdar/mask.cpp:110 #, c-format msgid "%Sregular expression: %S [%S]" msgstr "" #: src/libdar/mask.cpp:160 #, c-format msgid "" "%Snot(\n" "%S\n" "%S)" msgstr "" #: src/libdar/mask.cpp:279 #, c-format msgid "%SIs subdir of: %S [%S]" msgstr "" #: src/libdar/mask.cpp:297 #, c-format msgid "%SPath is: %S [%S]" msgstr "" #: src/libdar/mask.cpp:307 #, c-format msgid "%SPath leads to: %S [%S]" msgstr "" #: src/libdar/mask.cpp:315 msgid "case sensitive" msgstr "" #: src/libdar/mask.cpp:315 msgid "case in-sensitive" msgstr "" #: src/libdar/cat_directory.cpp:142 msgid "" "LAX MODE: Error met building a catalogue entry, skipping this entry and " "continuing. Skipped error is: " msgstr "" "LAX MODE: fel vid skapnde av mapp, hoppar över detta data och fortsätter. " "Överhoppat fel var: " #: src/libdar/cat_directory.cpp:185 #, fuzzy msgid "missing data to build a cat_directory" msgstr "data saknas till att bygga en mapp" #: src/libdar/cat_directory.cpp:443 #, c-format msgid "Cannot remove nonexistent entry %S from catalogue" msgstr "Kan ej flytta obefintligt data %S frÃ¥n katalog" #: src/libdar/scrambler.cpp:34 msgid "Key cannot be an empty string" msgstr "Nyckel kan ej vara en tom sträng" #: src/libdar/path.cpp:65 src/libdar/path.cpp:87 msgid "Empty string is not a valid path" msgstr "En tom sträng är ej en giltig sökväg" #: src/libdar/path.cpp:95 #, c-format msgid "%S is an not a valid path: %S" msgstr "%S är ej en giltig sökväg: %S" #: src/libdar/path.cpp:199 #, fuzzy msgid "Cannot add an absolute path" msgstr "Kan ej lägga till en etikett till en tom stack" #: src/libdar/path.cpp:357 msgid "Empty string as subdirectory does not make a valid path" msgstr "En tom sträng som undermapp utgör inte en giltig sökväg" #: src/libdar/crc.cpp:210 src/libdar/crc.cpp:311 msgid "Invalid size for CRC width" msgstr "Ogiltig storlek för CRC-bredd" #: src/libdar/i_database.cpp:122 msgid "Badly formatted database" msgstr "DÃ¥ligt formaterad databas" #: src/libdar/i_database.cpp:171 #, fuzzy msgid "Cannot write down a read-only database" msgstr "Kan ej synka skrivning med en skrivskyddad allmän fil." #: src/libdar/i_database.cpp:231 msgid "Empty string is an invalid archive basename" msgstr "Tom sträng är ett ogiltigt arkivabasnamn" #: src/libdar/i_database.cpp:257 src/libdar/i_database.cpp:259 msgid "Incorrect archive range in database" msgstr "Felaktigt arkivomrÃ¥de i databasen" #: src/libdar/i_database.cpp:286 src/libdar/i_database.cpp:305 #: src/libdar/i_database.cpp:408 msgid "Non existent archive in database" msgstr "Icke existerande arkiv i databasen" #: src/libdar/i_database.cpp:325 src/libdar/i_database.cpp:327 msgid "Invalid archive number: " msgstr "Ogiltigt arkivnummer: " #: src/libdar/i_database.cpp:434 msgid "Invalid path, path must be relative" msgstr "Ogiltig sökväg, sökväg mÃ¥ste vara relativ" #: src/libdar/i_database.cpp:440 src/libdar/i_database.cpp:445 #: src/libdar/i_database.cpp:449 msgid "Non existent file in database" msgstr "Icke existerande fil i databas" #: src/libdar/i_database.cpp:513 msgid "Checking chronological ordering of files between the archives..." msgstr "Kontrollerar datumordning hos filer mellan arkiv..." #: src/libdar/i_database.cpp:539 msgid "File recorded as removed at this date in database: " msgstr "Fil noterad som borttagen vid detta datum i databasen: " #: src/libdar/i_database.cpp:543 msgid "File not found in database: " msgstr "Fil ej funnen i databas: " #: src/libdar/i_database.cpp:547 #, fuzzy msgid "" "File found in database but impossible to restore (only found \"unchanged\" " "in differential backups, or delta patch without reference to base it on in " "any previous archive of the base): " msgstr "" "Fil hittad i databasen, men omöjlig att Ã¥terställe (bara hittad som " "\"oförändrad\" i differentiella säkerhetskopior)" #: src/libdar/i_database.cpp:566 #, fuzzy msgid "" "Extended Attribute of file found in database but impossible to restore (only " "found \"unchanged\" in differential backups): " msgstr "" "Extended Attribute för fil hittad i databasen, men omöjlig att Ã¥terställe " "(bara hittad som \"oförändrad\" i differentiella säkerhetskopior)" #: src/libdar/i_database.cpp:579 #, c-format msgid "%S did not exist before specified date and cannot be restored" msgstr "%S fanns ej före specificerat datum och kan ej Ã¥terställas" #: src/libdar/i_database.cpp:608 #, fuzzy, c-format msgid "" "Either archives in database are not properly tidied, or file last " "modification date has been artificially set to an more ancient date. This " "may lead improper Extended Attribute restoration for inode %S" msgstr "" "Antingen är arkiven i databasen inte ordentligt vÃ¥rdade eller sÃ¥ har filens " "senaste ändringsdatum satt pÃ¥ konstgjord väg till ett äldre datum. Detta kan " "leda till att Ã¥terställning av Extended Attribute blir felaktig" #: src/libdar/i_database.cpp:627 #, c-format msgid "Cannot restore file %S : non existent file in database" msgstr "Kan ej Ã¥terställa fil %S : icke existerande fil i databasen" #: src/libdar/i_database.cpp:683 msgid "Error while restoring the following files: " msgstr "Fel vid Ã¥terställande av följande filer: " #: src/libdar/i_database.cpp:692 msgid "Cannot restore any file, nothing done" msgstr "Kan ej Ã¥terställa nÃ¥gon fil, inget utfört" #: src/libdar/i_database.cpp:705 src/libdar/i_database.cpp:713 #, fuzzy, c-format msgid "Invalid archive number: %d" msgstr "Ogiltigt arkivnummer: " #: src/libdar/catalogue.cpp:154 msgid "" "LAX MODE: catalogue label does not match archive label, as if it was an " "extracted catalogue, assuming data corruption occurred and fixing the " "catalogue to be considered an a plain internal catalogue" msgstr "" "LAX MODE: katalogetikett matchar inte arkivetikett, som om det vore en " "extraherad katalog, antar dataförstörelse skett och fixar till katalogen som " "en enkel intern katalog" #: src/libdar/catalogue.cpp:210 #, fuzzy msgid "CRC failed for the catalogue" msgstr "LAX MODE: Misslyckades med att läsa katalogen" #: src/libdar/catalogue.cpp:212 msgid "" "LAX MODE: CRC failed for catalogue, the archive contents is corrupted. This " "may even lead dar to see files in the archive that never existed, but this " "will most probably lead to other failures in restoring files. Shall we " "proceed anyway?" msgstr "" "LAX MODE: CRC misslyckades, arkivinnehÃ¥llet är förstört. Detta kan leda till " "att dar ser filer i arkivet, vilka aldrig funnits men detta kommer sannolikt " "leda till andar fel vid Ã¥terställande av filer. Skall vi fortsätta ändÃ¥?" #: src/libdar/catalogue.cpp:262 msgid "root does not have a parent directory" msgstr "rot har inte nÃ¥gon föräldramapp" #: src/libdar/catalogue.cpp:300 msgid "no current directory defined" msgstr "ingen aktuell mapp definierad" #: src/libdar/catalogue.cpp:304 msgid "root directory has no parent directory" msgstr "rotmappen har ingen föräldramapp" #: src/libdar/catalogue.cpp:326 msgid "no current reading directory defined" msgstr "ingen aktuell läsmapp definierad" #: src/libdar/catalogue.cpp:399 src/libdar/catalogue.cpp:413 msgid " is not present in the archive" msgstr " är inte närvarande i arkivet" #: src/libdar/catalogue.cpp:491 msgid "Cannot recurs in a non directory entry" msgstr "Kan ej gÃ¥ rekursivt i en icke-mapp" #: src/libdar/catalogue.cpp:494 msgid "" "The entry to recurs in does not exist, cannot add further entry to that " "absent subdirectory" msgstr "" "Posten at gÃ¥ rekursivt in i saknas, kqan ej lägga till ytterligare poster " "till den obefintliga undermappen." #: src/libdar/catalogue.cpp:559 msgid "root has no parent directory" msgstr "rot har ingen föräldramapp" #: src/libdar/archive_aux.cpp:134 #, c-format msgid "unknown hash algorithm corresponding to char `%c'" msgstr "" #: src/libdar/integers.cpp:36 #, c-format msgid "%s type length is not %d byte(s) but %d" msgstr "%s typlängd är ej %d byte(s) men %d" #: src/libdar/integers.cpp:44 #, c-format msgid "%s type is not a signed type as expected" msgstr "%s type är ej ett tal med tecken som förväntat" #: src/libdar/integers.cpp:46 #, c-format msgid "%s type is not an unsigned type as expected" msgstr "%s typ är ej tal utan tecken som förväntat" #: src/libdar/integers.cpp:101 #, c-format msgid "" "type %s is neither big nor little endian! Do not know how to handle integer " "in a portable manner on this host, aborting" msgstr "" "typ %s är varken big eller little endian! FörstÃ¥r ej hantera heltal pÃ¥ ett " "flyttbart sätt pÃ¥ denna värd, avbryter" #: src/libdar/integers.cpp:154 msgid "incoherent endian between U_16 and U_32" msgstr "osammanhängande endian mellan U_16 och U_32" #: src/libdar/integers.cpp:156 msgid "incoherent endian between U_16 and U_64" msgstr "osammanhängande endian mellan U_16 och U_64" #: src/libdar/integers.cpp:158 msgid "incoherent endian between U_16 and U_I" msgstr "osammanhängande endian mellan U_16 och U_I" #: src/libdar/storage.cpp:85 msgid "Not enough data to initialize storage field" msgstr "Inte tillräckligt med data för att initiera lagringsfält" #: src/libdar/storage.cpp:110 msgid "Asking for an element out of array" msgstr "Begäran om ett element utanför en tabell" #: src/libdar/storage.cpp:162 msgid "The iterator is not indexing the object it has been asked to write to" msgstr "" "'Iteratorn' indexerar inte det objekt den har blivit ombedd att skriva till" #: src/libdar/storage.cpp:196 msgid "The iterator is not indexing the object it has been asked to read from" msgstr "" "'Iteratorn' indexerar inte det objekt den har blivit ombedd att läsa frÃ¥n" #: src/libdar/storage.cpp:515 msgid "The iterator is not indexing the object it has been defined for" msgstr "'Iteratorn' indexerar inte det objekt som den har definierats för" #: src/libdar/storage.cpp:767 src/libdar/storage.cpp:832 msgid "Iterator does not point to data" msgstr "'Iteratorn' pekar inte pÃ¥ data" #: src/libdar/storage.cpp:826 msgid "Reference storage of the iterator is empty or non existent" msgstr "Referenslager hos iteratorn är tomt eller finns ej" #: src/libdar/storage.cpp:843 msgid "The iterator position is not inside the storage of reference" msgstr "Iteratorns position är ej innanför referensens lagringsutrymme" #: src/libdar/etage.cpp:118 msgid "Error opening directory in furtive read mode: " msgstr "Fel vid öppning i smygläsningsmode: " #: src/libdar/etage.cpp:122 #, c-format msgid "" "Could not open directory %s in furtive read mode (%s), using normal mode" msgstr "Kunde inte öppna mapp %s i smygläsningsmode (%s), använder normal mode" #: src/libdar/etage.cpp:147 msgid "Error opening directory: " msgstr "Fel vid öppnande av mapp: " #: src/libdar/etage.cpp:165 #, c-format msgid "" "Filename provided by the operating system seems truncated in directory %s, " "storing filename as is: %s" msgstr "" #: src/libdar/etage.cpp:197 #, c-format msgid "" "Detected Cache Directory Tagging Standard for %s, the contents of that " "directory will not be saved" msgstr "" "Upptäckt \"Cache Directory Tagging Standard\" för %s, innehÃ¥llet i den " "mappen kommer aj att sparas" #: src/libdar/zapette_protocol.cpp:69 msgid "Partial request received, aborting\n" msgstr "DelförfrÃ¥gan mottagen, avbryter\n" #: src/libdar/zapette_protocol.cpp:142 msgid "Corrupted data read on pipe" msgstr "Felkaktiga data lästa frÃ¥n rör" #: src/libdar/libdar5.cpp:162 msgid "Caught an unknown Egeneric exception: " msgstr "FÃ¥ngade ett okänt 'Egeneric exception:'" #: src/libdar/libdar5.cpp:167 msgid "Caught a none libdar exception" msgstr "FÃ¥ngade att icke-libdaravbrott" #: src/libdar/libdar5.cpp:256 #, fuzzy msgid "Invald nullptr argument given to 'ptr'" msgstr "Ogiltigt NULL-argument givet till 'ptr'" #: src/libdar/libdar5.cpp:303 #, fuzzy msgid "Invalid nullptr pointer given to close_archive" msgstr "Ogiltig NULL-pekare givet till 'close_archive'" #: src/libdar/libdar5.cpp:326 src/libdar/libdar5.cpp:346 #: src/libdar/libdar5.cpp:365 src/libdar/libdar5.cpp:387 #: src/libdar/libdar5.cpp:407 #, fuzzy msgid "Invalid nullptr argument given to 'ptr'" msgstr "Ogiltigt NULL-argument givet till 'ptr'" #: src/libdar/zapette.cpp:65 msgid "Cannot read on input" msgstr "Kan ej läsa pÃ¥ 'input'" #: src/libdar/zapette.cpp:67 msgid "Cannot write on output" msgstr "Kan ej skriva pÃ¥ 'output'" #: src/libdar/zapette.cpp:97 #, fuzzy, c-format msgid "" "Failed driving dar_slave to the end of archive: %S. Trying to open the " "archive from the first bytes" msgstr "" "Fel vid öppning av sista delsäkerhetskopia: %S. Försöker öppna arkivet genom " "att använda första delsäkerhetskopia..." #: src/libdar/zapette.cpp:213 msgid "Uncomplete answer received from peer" msgstr "Ofullständigt svar frÃ¥n 'peer'" #: src/libdar/zapette.cpp:297 msgid "Communication problem with peer, retry ?" msgstr "Kommunikationsproblem med 'peer', försöka igen ?" #: src/libdar/zapette.cpp:313 src/libdar/zapette.cpp:327 #: src/libdar/zapette.cpp:347 src/libdar/zapette.cpp:352 msgid "Incoherent answer from peer" msgstr "Osammanhängande svar frÃ¥n 'peer'" #: src/libdar/zapette.cpp:322 msgid "Bad answer from peer, while closing connection" msgstr "Fel svar frÃ¥n 'peer' vid stängning av förbindelse" #: src/libdar/zapette.cpp:332 src/libdar/zapette.cpp:337 #: src/libdar/zapette.cpp:342 msgid "" "Unexpected answer from slave, communication problem or bug may hang the " "operation" msgstr "" "Oväntat svar frÃ¥n slav, kommunikationsproblem eller bug kan hänga Ã¥tgärden" #: src/libdar/zapette.cpp:355 msgid "Corrupted data read from pipe" msgstr "Felaktiga data lästa frÃ¥n rör" #: src/libdar/mask_list.cpp:97 #, c-format msgid "Cannot allocate memory for buffer while reading %S" msgstr "Kan ej tilldela buffertminne under läsning av %S" #: src/libdar/mask_list.cpp:122 msgid "Found '" msgstr "Fann '" #: src/libdar/mask_list.cpp:146 #, c-format msgid "" "line exceeding the maximum of %d characters in listing file %S, aborting. " "Concerned line starts with: %s" msgstr "" #: src/libdar/mask_list.cpp:180 msgid "" "Mask_list's prefix must be an absolute path or start with \"\" string " "for archive merging" msgstr "" "Mask_lists prefix mÃ¥ste vara en absolut sökväg eller börja med \"\"-" "sträng vid arkivhopslagning" #: src/libdar/mask_list.cpp:202 #, c-format msgid "" "Error met while reading line\n" "\t%S\n" " from file %S: %S" msgstr "" "Fel pÃ¥träffat vid läsning av rad\n" "\t%S\n" " frÃ¥n fil %S: %S" #: src/libdar/mask_list.cpp:219 #, c-format msgid "Too much line in file %S (integer overflow)" msgstr "För mycket rad i fil %S (heltalsspill)" #: src/libdar/mask_list.cpp:223 #, fuzzy, c-format msgid "Error met while opening %S: " msgstr "Fel vid öppning av %s : " #: src/libdar/filesystem_hard_link_read.cpp:158 #, c-format msgid "Error reading inode of file %s : %s" msgstr "Fel vid läsning av inod för fil %s : %s" #: src/libdar/filesystem_hard_link_read.cpp:163 #, fuzzy, c-format msgid "Failed reading inode information for %s: " msgstr "Fel vid inhämtning av innodinformation för %s : %s" #: src/libdar/filesystem_hard_link_read.cpp:171 msgid "Cannot read inode for " msgstr "Kan ej läsa inod för " #: src/libdar/filesystem_hard_link_read.cpp:182 #: src/libdar/filesystem_hard_link_read.cpp:211 msgid "atime, data access time" msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:188 #: src/libdar/filesystem_hard_link_read.cpp:217 #, fuzzy msgid "mtime, data modification time" msgstr "Kan ej sätta senaste tillgÃ¥ngtid och senaste modifieringstid: " #: src/libdar/filesystem_hard_link_read.cpp:194 #: src/libdar/filesystem_hard_link_read.cpp:223 msgid "ctime, inode change time" msgstr "" #: src/libdar/filesystem_hard_link_read.cpp:305 msgid "Unknown file type! file name is: " msgstr "Okänd filtyp! filnamnet är: " #: src/libdar/filesystem_hard_link_read.cpp:338 msgid "Error reading EA for " msgstr "Fel vid läsning av EA för " #: src/libdar/capabilities.cpp:114 #, fuzzy, c-format msgid "Error met while checking for capability %S: %s" msgstr "" "Fel pÃ¥träffat vid läsning av rad\n" "\t%S\n" " frÃ¥n fil %S: %S" #: src/libdar/capabilities.cpp:144 src/libdar/capabilities.cpp:151 #, fuzzy, c-format msgid "Error met while setting capability %S: %s" msgstr "" "Fel pÃ¥träffat vid läsning av rad\n" "\t%S\n" " frÃ¥n fil %S: %S" #: src/libdar/crypto.cpp:44 #, fuzzy msgid "scrambling (weak encryption)" msgstr "Scrambling eller stark kryptering används : %s\n" #: src/libdar/crypto.cpp:102 msgid "Unknown crypto algorithm" msgstr "Okänd krypteringsalgoritm" #: src/libdar/mycurl_protocol.cpp:44 #, fuzzy, c-format msgid "Unknown protocol: %S" msgstr "Okänt val: " #: src/libdar/filtre.cpp:258 src/libdar/filtre.cpp:638 #: src/libdar/filtre.cpp:1249 src/libdar/filtre.cpp:1430 #: src/libdar/filtre.cpp:2711 #, fuzzy msgid "Inspecting directory " msgstr "Fel vid öppnade av mapp " #: src/libdar/filtre.cpp:294 #, c-format msgid "" "File %S has changed during backup and is probably not saved in a valid state " "(\"dirty file\"), do you want to consider it for restoration anyway?" msgstr "" "Filen %S har ändrats under säkerhetskopieringen och är sannolikt inte sparad " "i ett giltigt tillstÃ¥nd (\"smutsig fil\"), vill du ta med den i " "Ã¥terställningen i alla fall?" #: src/libdar/filtre.cpp:304 msgid "" "File had changed during backup and had been copied another time, restoring " "the next copy of file: " msgstr "" "Fil har ändrats under säkerhetskopiering och har kopierats en annan gÃ¥ng, " "Ã¥terställer nästa kopia av filen: " #: src/libdar/filtre.cpp:375 #, c-format msgid "" "The just restored file %S has been marked as dirty (sequential reading can " "only detect the dirty status after restoration), do we remove this just " "restored dirty file?" msgstr "" "Den precis Ã¥terställda filen %S har markerats som smutsig (sekvensiell " "läsning kan bara detektera smutsigt status efter Ã¥terställning), skall vi ta " "bort den precis Ã¥terställd filen?" #: src/libdar/filtre.cpp:382 #, c-format msgid "" "The just restored file %S has been marked as dirty (sequential reading can " "only detect the dirty status after restoration), removing the just restored " "dirty file as it is asked to ignore this type of file" msgstr "" "Den precis Ã¥terställda filen %S har markerats som smutsig (sekvensiell " "läsning kan bara detektera smutsigt status efter Ã¥terställning), genom " "borttagande av den just Ã¥terställda smutsiga filen som det är frÃ¥gat om att " "ignorera denna filtyp" #: src/libdar/filtre.cpp:384 #, c-format msgid "Removing the dirty file %S" msgstr "Tar bort den smutsiga filen %S" #: src/libdar/filtre.cpp:457 msgid " not restored (user choice)" msgstr " ej Ã¥terställd (användares val)" #: src/libdar/filtre.cpp:461 msgid "No file in this directory will be restored." msgstr "Ingen fil i denna mapp kommer att Ã¥terställas." #: src/libdar/filtre.cpp:479 msgid "Error while restoring " msgstr "Fel vid Ã¥terställning " #: src/libdar/filtre.cpp:484 msgid "Warning! No file in that directory will be restored: " msgstr "Varning! Ingen fil i den mappen kommer att Ã¥terställas: " #: src/libdar/filtre.cpp:679 msgid "Recording hard link into the archive: " msgstr "Sparar hÃ¥rd länk till arkiv: " #: src/libdar/filtre.cpp:746 #, fuzzy, c-format msgid "" "SECURITY WARNING! SUSPICIOUS FILE %S: ctime changed since archive of " "reference was done, while no other inode information changed" msgstr "" "SÄKERHETSVARNING %S: ctime har ändrats sedan arkivering av referenser " "gjorts, medan ingen inod eller data har ändrats" #: src/libdar/filtre.cpp:1097 msgid "Error while saving " msgstr "Fel vid sparande " #: src/libdar/filtre.cpp:1112 msgid "NO FILE IN THAT DIRECTORY CAN BE SAVED." msgstr "INGEN FIL I DEN MAPPEN KAN SPARAS." #: src/libdar/filtre.cpp:1117 msgid "Cannot write down the archive: " msgstr "Kan ej skriva till arkivet:" #: src/libdar/filtre.cpp:1134 #, fuzzy msgid ", compression ratio " msgstr "lzo-komprimering" #: src/libdar/filtre.cpp:1140 #, c-format msgid "Finished Inspecting directory %S , saved %S%S" msgstr "" #: src/libdar/filtre.cpp:1241 msgid "SKIPPED (hard link in sequential read mode): " msgstr "ÖVERHOPPAD (hÃ¥rd länk i sekvensiell läsmode): " #: src/libdar/filtre.cpp:1277 msgid "OK " msgstr "OK " #: src/libdar/filtre.cpp:1288 src/libdar/filtre.cpp:1320 msgid "DIFF " msgstr "DIFF " #: src/libdar/filtre.cpp:1320 msgid ": file not present in filesystem" msgstr ": fil finns inte filsystem" #: src/libdar/filtre.cpp:1372 msgid "ERR " msgstr "ERR " #: src/libdar/filtre.cpp:1455 #, fuzzy msgid "Data" msgstr "[Data]" #: src/libdar/filtre.cpp:1464 msgid "Can't read saved data." msgstr "Kan ej läsas sparade data." #: src/libdar/filtre.cpp:1510 msgid "CRC error: data corruption." msgstr "CRC-fel: data förstört." #: src/libdar/filtre.cpp:1611 msgid "OK " msgstr "OK " #: src/libdar/filtre.cpp:1646 msgid "ERR " msgstr "ERR " #: src/libdar/filtre.cpp:1768 msgid "" "Decremental mode is useless when merging is not applied to both an archive " "of reference and an auxiliary archive of reference. Ignore decremental mode " "and continue?" msgstr "" "Mod med stegvis minskning är oanvändbar när hopslagning inte är tillämpbar " "pÃ¥ bÃ¥de arkivreferens och en extra arkivreferens. Ignorera denna mod och " "fortsätta?" #: src/libdar/filtre.cpp:1902 msgid "first" msgstr "första" #: src/libdar/filtre.cpp:1905 msgid "second" msgstr "andra" #: src/libdar/filtre.cpp:1908 msgid "next" msgstr "nästa" #: src/libdar/filtre.cpp:1911 #, fuzzy, c-format msgid "Merging/filtering files from the %s archive..." msgstr "SlÃ¥r ihop/filtrerar filer frÃ¥n %s-arkiv" #: src/libdar/filtre.cpp:1993 msgid "overwritten" msgstr "överskriven" #: src/libdar/filtre.cpp:1997 src/libdar/filtre.cpp:2041 msgid "dropped from the archive and marked as already saved" msgstr "släppt frÃ¥n arkivet och markerad som redan sparad" #: src/libdar/filtre.cpp:2000 msgid "removed" msgstr "borttagen" #: src/libdar/filtre.cpp:2005 #, c-format msgid "Data of file %S is about to be %S, proceed?" msgstr "Data hos filen %S skall till att bli %S, fortsätta?" #: src/libdar/filtre.cpp:2034 msgid "replaced" msgstr "ersatt" #: src/libdar/filtre.cpp:2037 msgid "removed from the archive" msgstr "borttagen frÃ¥n arkivet" #: src/libdar/filtre.cpp:2044 msgid "merged with possible overwriting" msgstr "ihopslagning med möjlig överskrivning" #: src/libdar/filtre.cpp:2049 #, fuzzy, c-format msgid "EA and FSA of file %S are about to be %S, proceed?" msgstr "EA för fil %S skall till att bli %S, fortsätta?" #: src/libdar/filtre.cpp:2115 #, fuzzy, c-format msgid "" "EA and FSA of file %S from first archive have been updated with those of " "same named file of the auxiliary archive" msgstr "" "EA för fil %S frÃ¥n första arkivet har uppdaterats med dessa av samma filnamn " "frÃ¥n det extra arkivet" #: src/libdar/filtre.cpp:2125 #, c-format msgid "" "EA of file %S from first archive have been dropped and marked as already " "saved" msgstr "" "EA för fil %S frÃ¥n första arkivet har släppts och markerats som redan sparade" #: src/libdar/filtre.cpp:2131 #, fuzzy, c-format msgid "" "FSA of file %S from first archive have been dropped and marked as already " "saved" msgstr "" "EA för fil %S frÃ¥n första arkivet har släppts och markerats som redan sparade" #: src/libdar/filtre.cpp:2141 src/libdar/filtre.cpp:2279 #, c-format msgid "EA of file %S from first archive have been removed" msgstr "EA för fil %S frÃ¥n första arkivet har tagits bort" #: src/libdar/filtre.cpp:2149 #, fuzzy, c-format msgid "FSA of file %S from first archive have been removed" msgstr "EA för fil %S frÃ¥n första arkivet har tagits bort" #: src/libdar/filtre.cpp:2163 #, c-format msgid "Data of file %S from first archive has been preserved from overwriting" msgstr "Data frÃ¥n fil %S frÃ¥n första arkivet har bevarats frÃ¥n överskrivning" #: src/libdar/filtre.cpp:2214 #, c-format msgid "" "Data of file %S taken from the first archive of reference has been removed" msgstr "Data frÃ¥n fil %S taget frÃ¥n det första referensarkivet har tagits bort" #: src/libdar/filtre.cpp:2217 #, c-format msgid "" "Data of file %S taken from the first archive of reference has been " "overwritten" msgstr "" "Data frÃ¥n fil %S taget frÃ¥n det första referensarkivet har skrivits över" #: src/libdar/filtre.cpp:2252 #, c-format msgid "EA of file %S has been overwritten" msgstr "EA för fil %S har skrivits över" #: src/libdar/filtre.cpp:2256 src/libdar/filtre.cpp:2272 #, c-format msgid "EA of file %S has been overwritten and marked as already saved" msgstr "EA för fil %S har skrivits över och markerats som redan sparat" #: src/libdar/filtre.cpp:2262 src/libdar/filtre.cpp:2267 #, c-format msgid "" "EA of file %S from first archive have been updated with those of the same " "named file of the auxiliary archive" msgstr "" "EA för fil %S frÃ¥n första arkivet har uppdaterats med dessa med samma " "filnamn frÃ¥n det extra arkivet" #: src/libdar/filtre.cpp:2551 msgid " not merged (user choice)" msgstr " ej ihopslagen (användares val)" #: src/libdar/filtre.cpp:2555 msgid "No file in this directory will be considered for merging." msgstr "Ingen fil i denna mapp kommer att tas med vid ihopslagning." #: src/libdar/filtre.cpp:2575 msgid "Error while considering file " msgstr "Fel vid bestämning av fil: " #: src/libdar/filtre.cpp:2579 msgid "Warning! No file in this directory will be considered for merging: " msgstr "Varning! Ingen fil i den mappen kommer att slÃ¥s ihop: " #: src/libdar/filtre.cpp:2616 msgid "" "File selection has been aborted. Now building the resulting archive with the " "already selected files" msgstr "" "Filval har avbrutits. Skapar nu det resulterande arkivet med de redan valda " "filerna" #: src/libdar/filtre.cpp:2795 #, c-format msgid "" "Need to activate sparse file detection in order to calculate delta signature " "for sparse file %S" msgstr "" #: src/libdar/filtre.cpp:2882 msgid "Adding Hard link to archive: " msgstr "Lägger till hÃ¥rd länk till arkiv: " #: src/libdar/filtre.cpp:2898 #, fuzzy, c-format msgid "error met while creating archive: %S" msgstr "Fel vid arkivtest: " #: src/libdar/filtre.cpp:2966 #, fuzzy msgid "failed reading CRC from file: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/libdar/filtre.cpp:2994 #, fuzzy msgid "Failed reading CRC for EA and FSA: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/libdar/filtre.cpp:3005 #, fuzzy msgid "Error met while reading next entry: " msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/libdar/filtre.cpp:3116 #, fuzzy, c-format msgid "Adding only inode metadata to archive: %S" msgstr "Lägger till fil till arkiv: " #: src/libdar/filtre.cpp:3128 msgid "Resaving file without compression: " msgstr "" #: src/libdar/filtre.cpp:3132 #, fuzzy msgid "Delta saving file to archive: " msgstr "Lägger till fil till arkiv: " #: src/libdar/filtre.cpp:3136 #, fuzzy, c-format msgid "Adding %S to archive: %S" msgstr "Lägger till fil till arkiv: " #: src/libdar/filtre.cpp:3176 src/libdar/filtre.cpp:3188 #, c-format msgid "building delta signature with block size of %d bytes" msgstr "" #: src/libdar/filtre.cpp:3280 #, c-format msgid "" "Failed reading data CRC for %S, file may be damaged and will be marked dirty" msgstr "" #: src/libdar/filtre.cpp:3300 #, c-format msgid "" "Failed setting storage size to zero for this file with missing data CRC, CRC " "error will be reported for that file while reading the repaired archive" msgstr "" #: src/libdar/filtre.cpp:3337 msgid "Copied data does not match CRC" msgstr "Kopierade data matchar inte CRC" #: src/libdar/filtre.cpp:3484 msgid " : Failed resaving uncompressed the inode data" msgstr "" #: src/libdar/filtre.cpp:3494 msgid "" " : Resaving uncompressed the inode data to gain space is not possible, " "keeping data compressed" msgstr "" #: src/libdar/filtre.cpp:3518 #, c-format msgid "" "File has disappeared while we were reading it, cannot check whether it has " "changed during its backup: %S" msgstr "" "Fil försvann medan den lästes, kan ej kontrollera huruvida den har ändrats " "under dess säkerhetskopiering: %S" #: src/libdar/filtre.cpp:3553 #, c-format msgid "" "WARNING! File modified while reading it for backup. Performing retry %i of %i" msgstr "" "VARNING! Fil ändrad under läsning för säkerhetskopiering. Genomför nytt " "försök %i av %i" #: src/libdar/filtre.cpp:3570 msgid "" "WARNING! File modified while reading it for backup. No more retry for that " "file to not exceed the wasted byte limit. File is " msgstr "" "VARNING! Fil ändrad vid läsning för säkerhetskopiering. Inga fler försök för " "den filen för att inte överskrida bortkastad byt-gräns. Fil är " #: src/libdar/filtre.cpp:3577 msgid "" "WARNING! File modified while reading it for backup, but no more retry " "allowed: " msgstr "" "VARNING! Fil ändrad under läsning för säkerhetskopiering, men inga fler " "försök tillÃ¥tna: " #: src/libdar/filtre.cpp:3599 msgid "Dumping delta signature structure for saved file: " msgstr "" #: src/libdar/filtre.cpp:3755 msgid "Saving Extended Attributes for " msgstr "Sparar Extended Attribut för " #: src/libdar/filtre.cpp:3790 #, c-format msgid "" "Computed EA CRC for file %S differs from what was stored in the archive, " "this file's EA may have been corrupted" msgstr "" #: src/libdar/filtre.cpp:3837 msgid "Error saving Extended Attributes for " msgstr "Fel vid sparande av Extended Attribut för " #: src/libdar/filtre.cpp:3841 msgid "" "be advised that a CRC error will be reported for the EA of that file while " "sequentially reading the repaired archive" msgstr "" #: src/libdar/filtre.cpp:3876 #, fuzzy msgid "Saving Filesystem Specific Attributes for " msgstr "Sparar Extended Attribut för " #: src/libdar/filtre.cpp:3906 #, c-format msgid "" "Computed FSA CRC for file %S differs from what was stored in the archive, " "this file's EA may have been corrupted" msgstr "" #: src/libdar/filtre.cpp:3951 #, fuzzy msgid "Error saving Filesystem Specific Attributes for " msgstr "Fel vid sparande av Extended Attribut för " #: src/libdar/filtre.cpp:3955 msgid "" "be advised that a CRC error will be reported for the FSA of that file while " "sequentially reading the repaired archive" msgstr "" #: src/libdar/filtre.cpp:4513 #, fuzzy msgid "Copying delta signature structure from the archive of reference: " msgstr "Beaktar det (första) referensarkivet:" #: src/libdar/filtre.cpp:4552 msgid "Calculating delta signature from filesystem: " msgstr "" #: src/libdar/filtre.cpp:4632 #, c-format msgid "" "Furtive read mode requires either root permission and FOWNER capability, " "falling back to normal filesystem read" msgstr "" #: src/libdar/escape.cpp:138 msgid "" "Adding an explicit escape sequence of type seqt_not_a_sequence is forbidden" msgstr "" "Lägga till en tydlig escape-sekvens av typen 'seqt_not_a_sequence' är " "förbjudet" #: src/libdar/escape.cpp:898 msgid "Unknown escape sequence type" msgstr "Okänd escape-sekvenstyp" #: src/libdar/sparse_file.cpp:157 msgid "Incoherent structure in data carrying sparse files: unknown mark" msgstr "Icke översenstämmande struktur i data med glesa filer. okänt märke" #: src/libdar/sparse_file.cpp:311 msgid "Cannot skip forward to restore a hole" msgstr "Kan ej hoppa över framÃ¥t för att Ã¥terställa ett hÃ¥l" #: src/libdar/sparse_file.cpp:325 msgid "Data corruption or unknown sparse_file mark found in file's data" msgstr "Förstörda data eller ett okänt märke för gles fil hittat i filens data" #: src/libdar/criterium.cpp:412 msgid "" "Cannot evaluate this crit_and criterium as no criterium has been added to it" msgstr "" "Kan ej utvärdera crit_and criterium dÃ¥ inget kriterium har lagts till det" #: src/libdar/criterium.cpp:481 msgid "" "Cannot evaluate this crit_or criterium as no criterium has been added to it" msgstr "" "Kan ej utvärdera crit_or criterium dÃ¥ inget kriterium har lagts till det" #: src/dar_suite/dar_slave.cpp:150 msgid "Cannot read arguments on command line, aborting" msgstr "Kan ej läsa argument pÃ¥ kommandorad, avbryter" #: src/dar_suite/dar_slave.cpp:160 msgid "Missing argument to -i option" msgstr "Saknar argument till -i alternativ" #: src/dar_suite/dar_slave.cpp:168 msgid "Missing argument to -o option" msgstr "Saknar argument till -o alternativ" #: src/dar_suite/dar_slave.cpp:182 msgid "Missing argument to -E option" msgstr "Saknar argument till -E alternativ" #: src/dar_suite/dar_slave.cpp:192 src/dar_suite/command_line.cpp:1834 #: src/dar_suite/dar_xform.cpp:347 msgid "Missing argument to --min-digits" msgstr "Saknar argument till --min-digits" #: src/dar_suite/dar_slave.cpp:200 src/dar_suite/dar_xform.cpp:355 #, c-format msgid "Missing parameter to option -%c" msgstr "Saknar parameter till alternativ -%c" #: src/dar_suite/dar_slave.cpp:202 src/dar_suite/dar_slave.cpp:204 #: src/dar_suite/dar_manager.cpp:514 src/dar_suite/dar_manager.cpp:516 #: src/dar_suite/dar_xform.cpp:357 #, c-format msgid "Ignoring unknown option -%c" msgstr "Ignorerar okänt alternativ -%c" #: src/dar_suite/dar_slave.cpp:210 msgid "Missing archive basename, see -h option for help" msgstr "Saknar arkivbasnamn, se -h alternativ för hjälp" #: src/dar_suite/dar_slave.cpp:216 src/dar_suite/dar_xform.cpp:371 msgid "Too many argument on command line, see -h option for help" msgstr "För mÃ¥nga argument pÃ¥ kommandorad, se -h alternativ för hjälp" #: src/dar_suite/dar_slave.cpp:256 src/dar_suite/dar_slave.cpp:261 #: src/dar_suite/dar_manager.cpp:906 src/dar_suite/dar_manager.cpp:930 #: src/dar_suite/dar_manager.cpp:933 src/dar_suite/command_line.cpp:2062 #: src/dar_suite/command_line.cpp:2072 src/dar_suite/command_line.cpp:2075 #: src/dar_suite/command_line.cpp:2113 src/dar_suite/command_line.cpp:2139 #: src/dar_suite/command_line.cpp:2144 src/dar_suite/command_line.cpp:2148 #: src/dar_suite/dar_xform.cpp:418 src/dar_suite/dar_xform.cpp:420 #: src/dar_suite/dar_xform.cpp:422 src/dar_suite/dar_xform.cpp:437 #, c-format msgid "\n" msgstr "\n" #: src/dar_suite/dar_slave.cpp:257 src/dar_suite/command_line.cpp:2076 #: src/dar_suite/dar_xform.cpp:423 #, c-format msgid "Common options:\n" msgstr "Allmänna alternativ:\n" #: src/dar_suite/dar_slave.cpp:258 #, c-format msgid "" " -i pipe to use instead of std input to read orders from dar\n" msgstr "" " -i rör som änvänds i stället för 'std input' till att läsa " "beordringar frÃ¥n dar\n" #: src/dar_suite/dar_slave.cpp:259 #, c-format msgid "" " -o pipe to use instead of std output to write data to dar\n" msgstr "" " -i rör som används i stället för 'std output' till att " "skriva data till dar\n" #: src/dar_suite/dar_slave.cpp:260 #, c-format msgid "" " -E \t command line to execute between slices of the archive\n" msgstr "" " -E \t kommandorad, som skall utföras mellan delar i ett arkiv\n" #: src/dar_suite/dar_slave.cpp:262 src/dar_suite/dar_manager.cpp:934 #: src/dar_suite/dar_xform.cpp:438 #, fuzzy, c-format msgid "See man page for more options.\n" msgstr "Se 'man page' för mer detaljer.\n" #: src/dar_suite/dar_slave.cpp:275 src/dar_suite/dar_manager.cpp:948 #: src/dar_suite/dar_xform.cpp:452 #, c-format msgid " Using libdar %u.%u.%u built with compilation time options:\n" msgstr "" " Använder libdar %u.%u.%u byggd med alternativ vid kompileringstillfället:\n" #: src/dar_suite/dar_slave.cpp:277 src/dar_suite/dar_manager.cpp:950 #: src/dar_suite/dar_xform.cpp:454 #, c-format msgid " Using libdar %u.%u built with compilation time options:\n" msgstr "" " Använder libdar %u.%u byggd med alternativ vid kompileringstillfället:\n" #: src/dar_suite/dar_slave.cpp:280 src/dar_suite/dar_cp.cpp:145 #: src/dar_suite/dar_manager.cpp:953 src/dar_suite/command_line.cpp:2533 #: src/dar_suite/dar_xform.cpp:457 #, c-format msgid " compiled the %s with %s version %s\n" msgstr " kompilerad den %s med %s version %s\n" #: src/dar_suite/dar_slave.cpp:281 src/dar_suite/dar_cp.cpp:146 #: src/dar_suite/dar_manager.cpp:954 src/dar_suite/command_line.cpp:2534 #: src/dar_suite/dar_xform.cpp:458 #, c-format msgid " %s is part of the Disk ARchive suite (Release %s)\n" msgstr " %s är en del av Disk ARchieve sviten (UtgÃ¥va %s)\n" #: src/dar_suite/dar_slave.cpp:282 #, c-format msgid " %s comes with ABSOLUTELY NO WARRANTY;" msgstr " %s kommer med ABSOLUT INGA GARANTIER;" #: src/dar_suite/dar_slave.cpp:283 msgid "" " for details\n" " type `dar -W'." msgstr "" " för detaljer\n" " skriv 'dar -W'." #: src/dar_suite/dar_slave.cpp:284 src/dar_suite/dar_manager.cpp:956 #: src/dar_suite/command_line.cpp:2536 src/dar_suite/dar_xform.cpp:460 msgid "" " This is free software, and you are welcome\n" " to redistribute it under certain conditions;" msgstr "" " Detta är fri programvara, och du är välkommen\n" " att Ã¥terdistribuera den under vissa villkor;" #: src/dar_suite/dar_slave.cpp:285 msgid "" " type `dar -L | more'\n" " for details.\n" "\n" msgstr "" " skriv 'dar -L | more'\n" " för detaljer.\n" "\n" #: src/dar_suite/line_tools.hpp:387 #, fuzzy, c-format msgid "Parse error: Unmatched `%c'" msgstr "Analysfel: " #: src/dar_suite/dar_cp.cpp:133 #, c-format msgid "usage : %s \n" msgstr "användning ; %s \n" #: src/dar_suite/dar_cp.cpp:147 #, c-format msgid " %s comes with ABSOLUTELY NO WARRANTY; for details type `dar -W'." msgstr " %s medför ABSOLUT INGA GARANTIER; för detaljer skriv 'dar -W'." #: src/dar_suite/dar_cp.cpp:148 msgid " This is free software, and you are welcome to redistribute it under" msgstr "" " Detta är fri programvara, och du är välkommen att Ã¥terdistribuera den under" #: src/dar_suite/dar_cp.cpp:149 msgid "" " certain conditions; type `dar -L | more' for details.\n" "\n" msgstr " vissa villkor; skriv 'dar -L | more' för detaljer.\n" #: src/dar_suite/dar_cp.cpp:153 msgid "Unexpected exception from libdar" msgstr "Oväntat avbrott frÃ¥n libdar" #: src/dar_suite/dar_cp.cpp:170 #, c-format msgid "Memory allocation failed : %s" msgstr "Minnestilldelning misslyckades : %s" #: src/dar_suite/dar_cp.cpp:186 #, c-format msgid "Cannot open source file : %s" msgstr "Kan ej öppna källfil : %s" #: src/dar_suite/dar_cp.cpp:197 #, c-format msgid "Cannot open destination file : %s" msgstr "Kan ej öppna mÃ¥lfil : %s" #: src/dar_suite/dar_cp.cpp:222 #, fuzzy, c-format msgid "Starting the copy of %u byte(s)" msgstr "Börjar kopiering av %d byte" #: src/dar_suite/dar_cp.cpp:232 #, c-format msgid "" "Error reading source file (we are at %.2f %% of data copied), trying to read " "further: %s\n" msgstr "" "Fel vid läsning av källfil ( vi är vid %.2f %% av kopierade data), försöker " "att läsa vidare: %s\n" #: src/dar_suite/dar_cp.cpp:236 #, c-format msgid "" "Skipping done (missing %.0f byte(s)), found correct data to read, continuing " "the copy...\n" msgstr "" "Överhoppning gjord (missade %.0f byte), fann riktiga data för läsning, " "fortsätter kopieringen...\n" #: src/dar_suite/dar_cp.cpp:242 msgid "" "Reached End of File, no correct data could be found after the last error\n" msgstr "" "NÃ¥tt filslut, inga korrekta data kunde Ã¥terfinnas efter senaste felet\n" #: src/dar_suite/dar_cp.cpp:250 #, c-format msgid "Copy finished. Missing %.0f byte(s) of data\n" msgstr "Kopiering avslutad. Saknar %.0f byte data\n" #: src/dar_suite/dar_cp.cpp:251 #, c-format msgid "Which is %.2f %% of the total amount of data\n" msgstr "Vilket är %.2f %% av det totala mängden data)\n" #: src/dar_suite/dar_cp.cpp:260 msgid "Cannot seek back one char" msgstr "Kan ej leta bakÃ¥t ett tecken" #: src/dar_suite/dar_cp.cpp:271 src/dar_suite/dar_cp.cpp:356 msgid "Cannot write to destination, aborting" msgstr "Kan ej skriva till mÃ¥l, avbryter" #: src/dar_suite/dar_cp.cpp:315 msgid "Cannot seek in file" msgstr "Kan ej leta i fil" #: src/dar_suite/dar_cp.cpp:359 #, c-format msgid "Non fatal error while writing to destination file, retrying\n" msgstr "Icke fatalt fel vid skrivning till mÃ¥lfil, gör om\n" #: src/dar_suite/dar_manager.cpp:244 msgid "Decompressing and loading database header to memory..." msgstr "Dekomprimerar och laddar databashuvud in i minnet..." #: src/dar_suite/dar_manager.cpp:246 msgid "Decompressing and loading database to memory..." msgstr "Dekomprimerar och laddar databasen in i minnet..." #: src/dar_suite/dar_manager.cpp:258 msgid "Error met while processing operation: " msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/dar_suite/dar_manager.cpp:331 msgid "-B option cannot be given inside a batch file" msgstr "-B alternativ kan ej ges inuti en batch-fil" #: src/dar_suite/dar_manager.cpp:469 msgid "Running batch file from a batch file is not allowed" msgstr "Att köra en batch-fil frÃ¥n en batch-fil är ej tillÃ¥tet" #: src/dar_suite/dar_manager.cpp:495 #, fuzzy, c-format msgid "invalid number given to -9 option: %s" msgstr "Ogiltigt tal givet till -; alternativ: %s" #: src/dar_suite/dar_manager.cpp:541 msgid "-e option is only available when using -r option, aborting" msgstr "-e alternativ är bara tillgängligt när -r alternativ används, avbryter" #: src/dar_suite/dar_manager.cpp:552 #, fuzzy msgid "-w option is only valid with -r option, ignoring it" msgstr "-; alternativ är användbart bara med -A alternativ, ignoreras" #: src/dar_suite/dar_manager.cpp:559 #, fuzzy msgid "-9 option is only valid with -A option, ignoring it" msgstr "-; alternativ är användbart bara med -A alternativ, ignoreras" #: src/dar_suite/dar_manager.cpp:570 msgid "No action specified, aborting" msgstr "Ingen Ã¥tgärd specificerad, avbryter" #: src/dar_suite/dar_manager.cpp:582 src/dar_suite/dar_manager.cpp:586 msgid "Ignoring extra arguments on command line" msgstr "Ignorerer extra argument pÃ¥ kommandorad" #: src/dar_suite/dar_manager.cpp:592 src/dar_suite/dar_manager.cpp:609 msgid "Missing argument to command line, aborting" msgstr "Saknar argument pÃ¥ kommandorad, avbryter" #: src/dar_suite/dar_manager.cpp:601 msgid "Arguments to -r must be relative path (never begin by '/')" msgstr "" "Argument till -r mÃ¥ste vara en relativ sökväg (fÃ¥r aldrig börja med '/')" #: src/dar_suite/dar_manager.cpp:623 msgid "No database specified, aborting" msgstr "Ingen databas specificerad, avbryter" #: src/dar_suite/dar_manager.cpp:629 msgid "Parse error on command line (or included files): " msgstr "Analysfel i kommandoraden (eller i inkluderade filer): " #: src/dar_suite/dar_manager.cpp:643 msgid "Creating file..." msgstr "Skapar fil..." #: src/dar_suite/dar_manager.cpp:644 msgid "Formatting file as an empty database..." msgstr "Formaterar fil som en tom databas..." #: src/dar_suite/dar_manager.cpp:648 msgid "Database has been successfully created empty." msgstr "Databasen har med lyckat resultat skapats tom." #: src/dar_suite/dar_manager.cpp:663 src/dar_suite/dar_manager.cpp:1136 msgid "Reading catalogue of the archive to add..." msgstr "Läsning av arkivkatalogen för att lägga till..." #: src/dar_suite/dar_manager.cpp:677 src/dar_suite/dar_manager.cpp:1145 msgid "Updating database with catalogue..." msgstr "Uppdaterar databas med katalog..." #: src/dar_suite/dar_manager.cpp:684 src/dar_suite/dar_manager.cpp:877 #: src/dar_suite/dar_manager.cpp:1148 src/dar_suite/dar_manager.cpp:1178 #: src/dar_suite/dar_manager.cpp:1272 #, fuzzy msgid "Checking date ordering of files between archives..." msgstr "Kontrollerar datumordning hos filer mellan arkiv..." #: src/dar_suite/dar_manager.cpp:697 src/dar_suite/dar_manager.cpp:881 #: src/dar_suite/dar_manager.cpp:1274 msgid "" "Some files do not follow chronological order when archive index increases " "withing the database, this can lead dar_manager to restored a wrong version " "of these files" msgstr "" "Vissa filer följer inte kronologisk ordning när arkivindex ökas inom " "databasen. Detta kan leda till att dar_manager Ã¥terhämtar fel version av " "dessa filer." #: src/dar_suite/dar_manager.cpp:724 src/dar_suite/dar_manager.cpp:1167 #, fuzzy msgid "Removing information from the database..." msgstr "Tar bort information frÃ¥n arkivet..." #: src/dar_suite/dar_manager.cpp:744 src/dar_suite/dar_manager.cpp:764 #: src/dar_suite/dar_manager.cpp:778 src/dar_suite/dar_manager.cpp:792 msgid "Changing database header information..." msgstr "Ändrar information i databashuvudet..." #: src/dar_suite/dar_manager.cpp:809 msgid "" "Looking in archives for requested files, classifying files archive by " "archive..." msgstr "" "Letar i arkiven för begärda filer, klassificerar filer arkiv för arkiv..." #: src/dar_suite/dar_manager.cpp:856 src/dar_suite/dar_manager.cpp:1197 msgid "Computing statistics..." msgstr "Beräknar statistik..." #: src/dar_suite/dar_manager.cpp:866 msgid "" "Negative number or zero not allowed when moving an archive inside a database" msgstr "" "Negativt tal eller noll är ej tillÃ¥tet vid flyttning av ett arkiv inom en " "databas" #: src/dar_suite/dar_manager.cpp:873 msgid "Changing database information..." msgstr "Ändrar databasinformation..." #: src/dar_suite/dar_manager.cpp:907 #, c-format msgid "Commands:\n" msgstr "Kommandon:\n" #: src/dar_suite/dar_manager.cpp:908 #, c-format msgid " -C creates an empty database\n" msgstr " -C skapar en tom databas\n" #: src/dar_suite/dar_manager.cpp:909 #, c-format msgid " -B specify the database to use (read or modify)\n" msgstr "" " -B specificerar använd databas (läsning eller modifiering)\n" #: src/dar_suite/dar_manager.cpp:910 #, c-format msgid " -A add an archive to the database\n" msgstr " -A lägg till ett arkiv till databasen\n" #: src/dar_suite/dar_manager.cpp:911 #, c-format msgid "" " -l\t\t gives information about the archive compiled in the database\n" msgstr " -l\t\t lämna information om det arkiv 'kompilerat' i databasen\n" #: src/dar_suite/dar_manager.cpp:912 #, c-format msgid " -D delete an archive from the database\n" msgstr " -D tag bort ett arkiv frÃ¥n databasen\n" #: src/dar_suite/dar_manager.cpp:913 #, c-format msgid "" " -b \t change the basename to use for the give archive number\n" msgstr "" " -b \t byt basnamnet att användas för det givna arkivnummret\n" #: src/dar_suite/dar_manager.cpp:914 #, c-format msgid "" " -p \t change the path to use for the given archive number\n" msgstr " -p \t byt sökvägen att användas för det givna arkivnummret\n" #: src/dar_suite/dar_manager.cpp:915 #, c-format msgid " -o specify a list of option to always pass to dar\n" msgstr "" " -o specificera en lista med alternativ som alltid skall " "lämnas till dar\n" #: src/dar_suite/dar_manager.cpp:916 #, c-format msgid " -d specify the path to dar\n" msgstr " -d specificera sökvägen till dar\n" #: src/dar_suite/dar_manager.cpp:917 #, c-format msgid " -r \t restores the given files\n" msgstr " -r \t Ã¥terställer de givna filerna\n" #: src/dar_suite/dar_manager.cpp:918 #, c-format msgid "" " -w \t only with -r, restores in state just before the given date\n" msgstr "" " -w \t endast med -r, Ã¥terställer status alldeles före det givna " "datumet\n" #: src/dar_suite/dar_manager.cpp:919 #, c-format msgid "" " \t date format: [[[year/]month]/day-]hour:minute[:second]\n" msgstr "" #: src/dar_suite/dar_manager.cpp:920 #, c-format msgid "" " -u \t list the most recent files contained in the given " "archive\n" msgstr "" " -u \t lista det senaste filerna innehÃ¥llna i det givna arkivet\n" #: src/dar_suite/dar_manager.cpp:921 #, c-format msgid " -f \t list the archives where the given file is present\n" msgstr " -f \t lista de arkiv där den givna filen finns\n" #: src/dar_suite/dar_manager.cpp:922 #, c-format msgid " -s\t\t shows the number of most recent file by archive\n" msgstr " -s\t\t visa antalet senaste filer mot arkiv\n" #: src/dar_suite/dar_manager.cpp:923 #, c-format msgid " -m \t move an archive within a given database.\n" msgstr " -m \t flytta ett arkiv inom en given databas.\n" #: src/dar_suite/dar_manager.cpp:924 #, c-format msgid " -i\t\t user interactive mode\n" msgstr " -i\t\t interaktiv användarmod\n" #: src/dar_suite/dar_manager.cpp:925 #, c-format msgid " -c\t\t check database for dates order\n" msgstr " -c\t\t kontrollera databasen med avseende pÃ¥ datumordning\n" #: src/dar_suite/dar_manager.cpp:926 #, c-format msgid "" " -L execute on a given database a batch of action as defined " "by\n" msgstr "" " -L utför pÃ¥ en given databas en serie Ã¥tgärder definierad " "av\n" #: src/dar_suite/dar_manager.cpp:927 #, c-format msgid "\t\t the provided file.\n" msgstr "\t\t den lämnade filen.\n" #: src/dar_suite/dar_manager.cpp:928 src/dar_suite/dar_xform.cpp:424 #, c-format msgid " -h\t\t displays this help information\n" msgstr " -h\t\t visa denna hjälpinformation\n" #: src/dar_suite/dar_manager.cpp:929 #, c-format msgid " -V\t\t displays software version\n" msgstr " -V\t\t visar programvaruversion\n" #: src/dar_suite/dar_manager.cpp:931 #, c-format msgid "Options:\n" msgstr "Alternativ:\n" #: src/dar_suite/dar_manager.cpp:932 #, c-format msgid " -v\t\t display more information about what is going on\n" msgstr " -v\t\t visar mer information om vad som pÃ¥gÃ¥r\n" #: src/dar_suite/dar_manager.cpp:955 src/dar_suite/command_line.cpp:2535 #: src/dar_suite/dar_xform.cpp:459 #, c-format msgid "" " %s comes with ABSOLUTELY NO WARRANTY; for details\n" " type `%s -W'." msgstr "" " %s kommer med ABSOLUT INGEN GARANTI, för detaljer\n" " skriv '%s -W'." #: src/dar_suite/dar_manager.cpp:957 src/dar_suite/command_line.cpp:2537 #: src/dar_suite/dar_xform.cpp:461 #, c-format msgid "" " type `%s -L | more'\n" " for details.\n" "\n" msgstr "" " skriv '%s -L | more'\n" " för detaljer.\n" "\n" #: src/dar_suite/dar_manager.cpp:1015 msgid "Corrupted database :" msgstr "Förstörd databas :" #: src/dar_suite/dar_manager.cpp:1067 #, c-format msgid "" "\n" "\n" "\t Dar Manager Database used [%s] : %S\n" msgstr "" "\n" "\n" "\t Dar Manager Database använde [%s] : %S\n" #: src/dar_suite/dar_manager.cpp:1067 msgid "Saved" msgstr "Sparad" #: src/dar_suite/dar_manager.cpp:1067 msgid "Not Saved" msgstr "Ej sparad" #: src/dar_suite/dar_manager.cpp:1069 #, c-format msgid "" "\t Pause each %d line of output\n" "\n" msgstr "" "\t Gör en paus för var %d -te rad i utdata\n" "\n" #: src/dar_suite/dar_manager.cpp:1071 #, c-format msgid "" "\t No pause in output\n" "\n" msgstr "" "\t Ingen paus i utdata\n" "\n" #: src/dar_suite/dar_manager.cpp:1072 #, c-format msgid " l : list database contents \t A : Add an archive\n" msgstr " l : listar databasinnehÃ¥ll \t A : lägg till ett arkiv\n" #: src/dar_suite/dar_manager.cpp:1073 #, c-format msgid " u : list archive contents \t D : Remove an archive\n" msgstr " u : listar databasinnehÃ¥ll \t D : Tar bort ett arkiv\n" #: src/dar_suite/dar_manager.cpp:1074 #, c-format msgid " f : give file localization \t m : modify archive order\n" msgstr " f : ange filplats \t m : modifiera arkiveringsordning\n" #: src/dar_suite/dar_manager.cpp:1075 #, c-format msgid " p : modify path of archives \t b : modify basename of archives\n" msgstr "" " p : modifiera sökväg för arkiven \t b : modifiera basnamn för arkiven\n" #: src/dar_suite/dar_manager.cpp:1076 #, c-format msgid " d : path to dar \t o : options to dar\n" msgstr " d : sökväg till dar \t o : alternativ hos dar\n" #: src/dar_suite/dar_manager.cpp:1077 #, c-format msgid " w : write changes to file \t s : database statistics\n" msgstr " w : skriv ändringar till fil \t s : databasstatistik\n" #: src/dar_suite/dar_manager.cpp:1078 #, c-format msgid "" " a : Save as \t n : pause each 'n' line (zero for no pause)\n" msgstr "" " a : Spara som \t n : gör paus var n-te rad (noll ger ingen " "paus)\n" #: src/dar_suite/dar_manager.cpp:1079 #, c-format msgid "" " c : check date order\n" "\n" msgstr "" " c : kontrollera datumordning\n" "\n" #: src/dar_suite/dar_manager.cpp:1080 #, c-format msgid "" " q : quit\n" "\n" msgstr "" " q : avsluta\n" "\n" #: src/dar_suite/dar_manager.cpp:1081 #, c-format msgid " Choice: " msgstr " Val: " #: src/dar_suite/dar_manager.cpp:1098 msgid "Archive number: " msgstr "Arkivnummer: " #: src/dar_suite/dar_manager.cpp:1105 msgid "File to look for: " msgstr "Fil att leta efter: " #: src/dar_suite/dar_manager.cpp:1109 msgid "Archive number to modify: " msgstr "Arkivnummer, som skall ändras: " #: src/dar_suite/dar_manager.cpp:1113 #, c-format msgid "New basename for archive number %d: " msgstr "Nytt basnamn för arkiv nummer %d: " #: src/dar_suite/dar_manager.cpp:1118 msgid "Path to dar (empty string to use the default from PATH variable): " msgstr "Sökväg till dar (vid tom sträng används standard frÃ¥n PATH-variabel): " #: src/dar_suite/dar_manager.cpp:1123 src/dar_suite/dar_manager.cpp:1129 #: src/dar_suite/dar_manager.cpp:1419 msgid "Compressing and writing back database to file..." msgstr "Komprimerar och skriver tillbaka databas till fil..." #: src/dar_suite/dar_manager.cpp:1128 msgid "New database name: " msgstr "Nytt databasnamn: " #: src/dar_suite/dar_manager.cpp:1135 msgid "Archive basename (or extracted catalogue basename) to add: " msgstr "Arkivbasnamn (eller extraherat katalogbasnamn) att lägga till: " #: src/dar_suite/dar_manager.cpp:1162 msgid "Archive number to remove: " msgstr "Arkivnummer, som skall tas bort: " #: src/dar_suite/dar_manager.cpp:1166 #, c-format msgid "Are you sure to remove archive number %d ?" msgstr "Är du säker pÃ¥ att du vill ta bort arkiv nummer %d" #: src/dar_suite/dar_manager.cpp:1172 msgid "Archive number to move: " msgstr "Arkivnummer, som skall flyttas: " #: src/dar_suite/dar_manager.cpp:1174 msgid "In which position to insert this archive: " msgstr "PÃ¥ vilkenplats skall detta arkiv läggas in: " #: src/dar_suite/dar_manager.cpp:1183 msgid "Archive number who's path to modify: " msgstr "Arkivnummer, vars sökväg skall ändras: " #: src/dar_suite/dar_manager.cpp:1187 #, c-format msgid "New path to give to archive number %d: " msgstr "Ny sökväg att ge till arkif nummer %d: " #: src/dar_suite/dar_manager.cpp:1201 msgid "How much line to display at once: " msgstr "Hur mycket rad, som skall visas genast: " #: src/dar_suite/dar_manager.cpp:1205 msgid "Checking file's dates ordering..." msgstr "Kontrollerar filers datumordning..." #: src/dar_suite/dar_manager.cpp:1213 src/dar_suite/dar_manager.cpp:1234 msgid "Database not saved, Do you really want to quit ?" msgstr "Databas ej sparad. Vill du verkligen avsluta?" #: src/dar_suite/dar_manager.cpp:1214 src/dar_suite/dar_manager.cpp:1235 #, c-format msgid "Continuing the action under process which is to exit... so we exit!" msgstr "Fortsätter aktuell process,vilket är att avsluta... sÃ¥ vi avslutar!" #: src/dar_suite/dar_manager.cpp:1223 #, c-format msgid "Unknown choice\n" msgstr "Okänt val\n" #: src/dar_suite/dar_manager.cpp:1249 #, c-format msgid "re-enabling all signal handlers and continuing\n" msgstr "Ã¥terställer alla signalhanterare och fortsätter\n" #: src/dar_suite/dar_manager.cpp:1256 #, c-format msgid "Error performing the requested action: %S" msgstr "Fel vid genomförande av den begärda Ã¥tgärden: %S" #: src/dar_suite/dar_manager.cpp:1276 msgid "No problem found" msgstr "Inga problem hittade" #: src/dar_suite/dar_manager.cpp:1310 msgid "Opening and reading the batch file..." msgstr "Öppnar och läser batch-fil..." #: src/dar_suite/dar_manager.cpp:1334 #, c-format msgid "" "\n" "\tExecuting batch file line: %S\n" " " msgstr "" "\n" "\tUtför batch-fil rad: %S\n" " " #: src/dar_suite/dar_manager.cpp:1358 #, c-format msgid "Syntax error in batch file: %S" msgstr "Syntaxfel i batch-fil: %S" #: src/dar_suite/dar_manager.cpp:1361 msgid "Syntax error in batch file: -C option not allowed" msgstr "Syntaxfel i batch-fil: -C alternativ ej tillÃ¥tet" #: src/dar_suite/dar_manager.cpp:1364 msgid "Syntax error in batch file: -i option not allowed" msgstr "Syntaxfel i batch-fil: -i alternativ ej tillÃ¥tet" #: src/dar_suite/dar_manager.cpp:1372 msgid "Aborting batch operation: " msgstr "Avbryter batch-Ã¥tgärd: " #: src/dar_suite/dar_manager.cpp:1383 #, c-format msgid "Enter each argument line by line, press return at the end\n" msgstr "Mata in varje argument rad för rad, tryck vagnretur vid slutet\n" #: src/dar_suite/dar_manager.cpp:1384 #, c-format msgid "To terminate enter an empty line\n" msgstr "Avsluta genom att mata in en tom rad\n" #: src/dar_suite/line_tools.cpp:101 src/dar_suite/line_tools.cpp:119 msgid "Index out of range" msgstr "Index utanför giltigt omrÃ¥de" #: src/dar_suite/line_tools.cpp:244 msgid "Syntax error in --retry-on-change argument: " msgstr "Syntaxfel i --retry-on-change argument: " #: src/dar_suite/line_tools.cpp:259 msgid "Unknown TLV record type" msgstr "Okänd TLV-posttyp" #: src/dar_suite/line_tools.cpp:300 src/dar_suite/line_tools.cpp:330 msgid "Unbalanced parenthesis in expression: " msgstr "Obalanserade parenteser i uttryck: " #: src/dar_suite/line_tools.cpp:398 #, c-format msgid "Unknown macro %%%d in user comment" msgstr "Okänt makro %%%d i användarkommentar" #: src/dar_suite/line_tools.cpp:590 #, c-format msgid "Invalid number in string: %S" msgstr "Ogiltigt tal i sträng: %S" #: src/dar_suite/line_tools.cpp:815 msgid "unknown cryptographic algorithm: " msgstr "okänd krypteringsalgoritm" #: src/dar_suite/line_tools.cpp:835 #, c-format msgid "" "| Signature Status| Key Status | Finger Print " "| Signature Date |" msgstr "" #: src/dar_suite/line_tools.cpp:1186 msgid "date before 1970 is not allowed" msgstr "datum före 1970 är ej tillÃ¥tet" #: src/dar_suite/line_tools.cpp:1191 msgid "Incorrect month" msgstr "Felaktig mÃ¥nad" #: src/dar_suite/line_tools.cpp:1196 msgid "Incorrect day of month" msgstr "Felaktig dag i mÃ¥nad" #: src/dar_suite/line_tools.cpp:1201 #, fuzzy msgid "Incorrect hour" msgstr "Felaktig timme pÃ¥ dag" #: src/dar_suite/line_tools.cpp:1206 msgid "Incorrect minute" msgstr "Felaktig minut" #: src/dar_suite/line_tools.cpp:1211 msgid "Incorrect second" msgstr "Felaktig sekund" #: src/dar_suite/line_tools.cpp:1215 msgid "Bad formatted date expression" msgstr "DÃ¥ligt formaterad datumuttryck" #: src/dar_suite/line_tools.cpp:1330 msgid "Given date must be in the past" msgstr "Angivet datum mÃ¥ste vara i det förgÃ¥gna" #: src/dar_suite/line_tools.cpp:1345 #, fuzzy, c-format msgid " Libz compression (gzip) : %s" msgstr " Libz-komprimering (gzip) : %s\n" #: src/dar_suite/line_tools.cpp:1346 #, fuzzy, c-format msgid " Libbz2 compression (bzip2) : %s" msgstr " Libbz2-komprimering (bzip2): %s\n" #: src/dar_suite/line_tools.cpp:1347 #, fuzzy, c-format msgid " Liblzo2 compression (lzo) : %s" msgstr " Liblzo2-komprimering (lzo) : %s\n" #: src/dar_suite/line_tools.cpp:1348 #, fuzzy, c-format msgid " Liblzma compression (xz) : %s" msgstr " Libz-komprimering (gzip) : %s\n" #: src/dar_suite/line_tools.cpp:1349 #, fuzzy, c-format msgid " Strong encryption (libgcrypt): %s" msgstr " Stark kryptering : %s\n" #: src/dar_suite/line_tools.cpp:1350 #, fuzzy, c-format msgid " Public key ciphers (gpgme) : %s" msgstr " Libz-komprimering (gzip) : %s\n" #: src/dar_suite/line_tools.cpp:1351 #, fuzzy, c-format msgid " Extended Attributes support : %s" msgstr " Stöd för Extended Attribut : %s\n" #: src/dar_suite/line_tools.cpp:1352 #, fuzzy, c-format msgid " Large files support (> 2GB) : %s" msgstr " Stöd för stora filer(> 2GB): %s\n" #: src/dar_suite/line_tools.cpp:1353 #, fuzzy, c-format msgid " ext2fs NODUMP flag support : %s" msgstr " stöd för ext2fs NODUMP-flagga : %s\n" #: src/dar_suite/line_tools.cpp:1355 #, fuzzy, c-format msgid " Integer size used : unlimited" msgstr " Utnyttjad heltalsstorlek : obegränsad\n" #: src/dar_suite/line_tools.cpp:1357 #, fuzzy, c-format msgid " Integer size used : %d bits" msgstr " Utnyttjad heltalsstorlek : %d bitar\n" #: src/dar_suite/line_tools.cpp:1358 #, fuzzy, c-format msgid " Thread safe support : %s" msgstr " Stöd för säker trÃ¥dning : %s\n" #: src/dar_suite/line_tools.cpp:1359 #, fuzzy, c-format msgid " Furtive read mode support : %s" msgstr " Smygläsningsmode stödd : %s\n" #: src/dar_suite/line_tools.cpp:1360 #, fuzzy, c-format msgid " Linux ext2/3/4 FSA support : %s" msgstr " stöd för ext2fs NODUMP-flagga : %s\n" #: src/dar_suite/line_tools.cpp:1361 #, fuzzy, c-format msgid " Mac OS X HFS+ FSA support : %s" msgstr " Stöd för säker trÃ¥dning : %s\n" #: src/dar_suite/line_tools.cpp:1366 msgid "big" msgstr "stor" #: src/dar_suite/line_tools.cpp:1369 msgid "little" msgstr "liten" #: src/dar_suite/line_tools.cpp:1372 msgid "error!" msgstr "fel!" #: src/dar_suite/line_tools.cpp:1377 #, fuzzy, c-format msgid " Detected system/CPU endian : %s" msgstr " Upptäckte system/CPU endian : %s" #: src/dar_suite/line_tools.cpp:1378 #, fuzzy, c-format msgid " Posix fadvise support : %s" msgstr " Stöd för säker trÃ¥dning : %s\n" #: src/dar_suite/line_tools.cpp:1379 #, fuzzy, c-format msgid " Large dir. speed optimi. : %s" msgstr " Stöd för stora filer(> 2GB): %s" #: src/dar_suite/line_tools.cpp:1384 #, fuzzy, c-format msgid " Timestamp read accuracy : %S" msgstr " Stöd för säker trÃ¥dning : %s\n" #: src/dar_suite/line_tools.cpp:1389 #, fuzzy, c-format msgid " Timestamp write accuracy : %S" msgstr " Stöd för säker trÃ¥dning : %s\n" #: src/dar_suite/line_tools.cpp:1390 #, c-format msgid " Restores dates of symlinks : %s" msgstr "" #: src/dar_suite/line_tools.cpp:1395 #, c-format msgid " Multiple threads (libthreads): %s %s" msgstr "" #: src/dar_suite/line_tools.cpp:1396 #, fuzzy, c-format msgid " Delta compression support : %s" msgstr " Libz-komprimering (gzip) : %s\n" #: src/dar_suite/line_tools.cpp:1397 #, fuzzy, c-format msgid " Remote repository support : %s" msgstr " Smygläsningsmode stödd : %s\n" #: src/dar_suite/line_tools.cpp:1457 #, c-format msgid "" "Warning, %S seems more to be a slice name than a base name. Do you want to " "replace it by %S ?" msgstr "" "Varning, %S verkar mer vara namn pÃ¥ en del och inte ett basnamn. Vill du " "byta det mot %S ?" #: src/dar_suite/line_tools.cpp:1462 #, c-format msgid "OK, keeping %S as basename" msgstr "OK, behÃ¥ller %S som basnamn" #: src/dar_suite/line_tools.cpp:1610 #, fuzzy msgid "Error while fetching hostname: " msgstr "Fel vid listning av arkivinnehÃ¥ll: " #: src/dar_suite/line_tools.cpp:1677 msgid "unknown name give for delta signature block len function" msgstr "" #: src/dar_suite/line_tools.cpp:1751 src/dar_suite/line_tools.cpp:1759 #, fuzzy, c-format msgid "Error met while retrieving current time: %S" msgstr "Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: " #: src/dar_suite/dar_suite.cpp:146 #, c-format msgid "" "We have linked with an incompatible version of libdar. Expecting version %d." "%d.x but having linked with version %d.%d.%d" msgstr "" "Vi har länkats med en inkompatibel version av libdar. Förväntad version %d." "%d.x men blev länkad med version %d.%d.%d" #: src/dar_suite/dar_suite.cpp:158 src/python/pybind11_libdar.cpp:206 msgid "NOT YET IMPLEMENTED FEATURE has been used: " msgstr "EN INTE IMPLEMENTERAD FUNKTION har använts: " #: src/dar_suite/dar_suite.cpp:159 msgid "Please check documentation or upgrade your software if available" msgstr "" "Kontrollera dokumentationen eller uppgradera din programvara om tillgänglig" #: src/dar_suite/dar_suite.cpp:164 src/python/pybind11_libdar.cpp:210 msgid "SEEMS TO BE A HARDWARE PROBLEM: " msgstr "VERKAR VARA ETT HÃ…RDVARUPROBLEM: " #: src/dar_suite/dar_suite.cpp:165 msgid "Please check your hardware" msgstr "Kontrollera din hÃ¥rdvara" #: src/dar_suite/dar_suite.cpp:170 src/python/pybind11_libdar.cpp:214 msgid "Lack of SECURED memory to achieve the operation, aborting operation" msgstr "Brist pÃ¥ SÄKERT minne för att utföra Ã¥tgärden, avbryter densamma" #: src/dar_suite/dar_suite.cpp:175 src/dar_suite/dar_suite.cpp:180 #: src/python/pybind11_libdar.cpp:218 src/python/pybind11_libdar.cpp:222 msgid "Lack of memory to achieve the operation, aborting operation" msgstr "Minnesbrist för att utföra Ã¥tgärden, avbryter densamma" #: src/dar_suite/dar_suite.cpp:185 src/dar_suite/dar_suite.cpp:230 #: src/python/pybind11_libdar.cpp:226 #, fuzzy msgid "FATAL error, aborting operation: " msgstr "FATALT fel, avbryter Ã¥tgärd" #: src/dar_suite/dar_suite.cpp:190 src/python/pybind11_libdar.cpp:230 msgid "Aborting program. User refused to continue while asking: " msgstr "Avbryter program. Användare vägrade fortsätta vid förfrÃ¥gan: " #: src/dar_suite/dar_suite.cpp:195 src/python/pybind11_libdar.cpp:234 msgid "Program has been aborted for the following reason: " msgstr "Progarmmet har avslutats av följande orsak: " #: src/dar_suite/dar_suite.cpp:205 src/python/pybind11_libdar.cpp:242 msgid "Aborting program. An error occurred concerning user command execution: " msgstr "" "Avbryter program. Ett fel inträffade angÃ¥ende utförande av användarkommando: " #: src/dar_suite/dar_suite.cpp:210 src/python/pybind11_libdar.cpp:246 msgid "Aborting program. An error occurred while calling libdar: " msgstr "Avbryter program. Ett fel inträffade vid anrop av libdar: " #: src/dar_suite/dar_suite.cpp:215 src/dar_suite/dar_suite.cpp:220 #: src/python/pybind11_libdar.cpp:250 src/python/pybind11_libdar.cpp:254 msgid "Aborting program. " msgstr "Avbryter program. " #: src/dar_suite/dar_suite.cpp:225 src/python/pybind11_libdar.cpp:258 msgid "" "Aborting program. The requested operation needs a feature that has been " "disabled at compilation time: " msgstr "" "Avbryter program. Den begärda Ã¥tgärden behöver funktioner, som har " "avaktiverats vid kompileringstillfället: " #: src/dar_suite/dar_suite.cpp:235 src/python/pybind11_libdar.cpp:266 #, fuzzy msgid "FATAL error during network communication, aborting operation: " msgstr "FATALT fel, avbryter Ã¥tgärd" #: src/dar_suite/dar_suite.cpp:241 src/dar_suite/dar_suite.cpp:261 msgid "INTERNAL ERROR, PLEASE REPORT THE PREVIOUS OUTPUT TO MAINTAINER" msgstr "INTERNT FEL, RAPPORTERA FÖREGÃ…ENDE UTDATA TILL PROGRAMUNDERHÃ…LLAREN" #: src/dar_suite/dar_suite.cpp:260 msgid "CAUGHT A NON (LIB)DAR EXCEPTION" msgstr "FÃ…NGADE ETT ICKE- (LIB)DAR-AVBROTT" #: src/dar_suite/dar_suite.cpp:268 msgid "" "SANITY CHECK: AT LEAST ONE THREAD_CANCELLATION OBJECT HAS NOT BEEN DESTROYED " "AND REMAINS IN MEMORY WHILE THE PROGRAM REACHED ITS END" msgstr "" "SUNDHETSKONTROLL: Ã…TMINSTONE ETT TRÃ…DAT AVBROTTSOBJEKT HAR INTE FÖRSTÖRTS " "OCH FINNS KVAR I MINNE MEDAN PROGRAMMET NÃ…DDE SITT SLUT" #: src/dar_suite/dar_suite.cpp:284 #, fuzzy, c-format msgid "Long options support : %s\n" msgstr "Stöd för lÃ¥nga alternativ : %s\n" #: src/dar_suite/dar_suite.cpp:300 #, c-format msgid "Received signal: %s" msgstr "Mottagen signal: %s" #: src/dar_suite/dar_suite.cpp:302 #, c-format msgid "Received signal: %d" msgstr "Mottagen signal: %d" #: src/dar_suite/dar_suite.cpp:308 msgid "Archive fast termination engaged" msgstr "Snabb avslutning pÃ¥ arkiv inkopplat" #: src/dar_suite/dar_suite.cpp:312 msgid "Archive delayed termination engaged" msgstr "Fördröjd avslutning pÃ¥ arkiv inkopplat" #: src/dar_suite/dar_suite.cpp:316 #, fuzzy msgid "" "Disabling signal handler, the next time this signal is received the program " "will abort immediately" msgstr "" "Kopplar bort signalhanterare, nästa gÃ¥ng denna signal mottages kommer " "programmet att avslutas omedelbart" #: src/dar_suite/dar_suite.cpp:320 msgid "" "Cannot cleanly abort the operation, thread-safe support is missing, will " "thus abruptly stop the program, generated archive may be unusable" msgstr "" "Kan inte avbryta uppdraget rent, 'thread-safe'-stöd saknas, kommer att " "stoppa programmet abrupt, skapat arkiv kan vara oanvändbart" #: src/dar_suite/dar.cpp:267 msgid "Considering the (first) archive of reference:" msgstr "Beaktar det (första) referensarkivet:" #: src/dar_suite/dar.cpp:269 msgid "" "Sequential reading of the archive of reference is not possible when delta " "difference is requested, you need to read the archive of reference in direct " "access mode (default mode)" msgstr "" #: src/dar_suite/dar.cpp:292 src/dar_suite/dar.cpp:343 msgid "" "Using sequential reading mode for archive source is not possible for merging " "operation" msgstr "" "Att använda inställningen sekvensiell läsning för källarkiv är ej möjligt " "vid hopslagningsÃ¥tgärd" #: src/dar_suite/dar.cpp:320 #, fuzzy msgid "Considering the second (alias auxiliary) archive of reference:" msgstr "Beaktar det (andra alias hjälp) referensarkivet:" #: src/dar_suite/dar.cpp:401 src/dar_suite/dar.cpp:504 #: src/dar_suite/dar.cpp:562 src/dar_suite/dar.cpp:642 #: src/dar_suite/dar.cpp:733 #, fuzzy msgid "Archive signature is only possible with gnupg encryption" msgstr "Arkiv pÃ¥ 'std output' är inte kompatibelt med delning (-s alternativ)" #: src/dar_suite/dar.cpp:594 msgid "" "Making room in memory (releasing memory used by archive of reference)..." msgstr "" #: src/dar_suite/dar.cpp:613 msgid "Now performing on-fly isolation..." msgstr "Nu genomförs 'i flykten'-isolation..." #: src/dar_suite/dar.cpp:871 msgid "All files asked could not be restored" msgstr "Alla efterfrÃ¥gade filer kunde ej Ã¥terställas" #: src/dar_suite/dar.cpp:954 msgid "Some file comparisons failed" msgstr "Vissa filjämförelser misslyckades" #: src/dar_suite/dar.cpp:1029 msgid "" "Some files are corrupted in the archive and it will not be possible to " "restore them" msgstr "" "NÃ¥gra filer i arkivet är förstörda och det är ej möjligt att Ã¥terställa dem" #: src/dar_suite/dar.cpp:1082 msgid "Continue listing archive contents?" msgstr "Fortsätta lista arkivinnehÃ¥ll?" #: src/dar_suite/dar.cpp:1104 src/dar_suite/dar.cpp:1109 msgid "Final memory cleanup..." msgstr "" #: src/dar_suite/dar.cpp:1131 #, c-format msgid " %i inode(s) saved\n" msgstr " %i inod(er) sparade\n" #: src/dar_suite/dar.cpp:1132 #, c-format msgid " including %i hard link(s) treated\n" msgstr " inkluderande %i hÃ¥rd(a) länk(ar) behandlade\n" #: src/dar_suite/dar.cpp:1133 #, c-format msgid "" " %i inode(s) changed at the moment of the backup and could not be saved " "properly\n" msgstr "" " %i inode(er) ändrade vid säkerhetskopieringstillfället och kunde inte " "sparas\n" #: src/dar_suite/dar.cpp:1134 #, c-format msgid " %i byte(s) have been wasted in the archive to resave changing files" msgstr " %i byte(s) har förbrukats i arkivet för att Ã¥terspara ändrande filer" #: src/dar_suite/dar.cpp:1135 #, fuzzy, c-format msgid " %i inode(s) with only metadata changed\n" msgstr " %i inod(er) ej sparad(e) (ingen inode/fil ändrad)\n" #: src/dar_suite/dar.cpp:1136 #, c-format msgid " %i inode(s) not saved (no inode/file change)\n" msgstr " %i inod(er) ej sparad(e) (ingen inode/fil ändrad)\n" #: src/dar_suite/dar.cpp:1137 #, c-format msgid " %i inode(s) failed to be saved (filesystem error)\n" msgstr " %i nod(er) misslyckades att spara (filsystemfel)\n" #: src/dar_suite/dar.cpp:1138 src/dar_suite/dar.cpp:1168 #: src/dar_suite/dar.cpp:1228 #, c-format msgid " %i inode(s) ignored (excluded by filters)\n" msgstr " %i inod(er) ignorerad(e) (uteslutna av filter)\n" #: src/dar_suite/dar.cpp:1139 #, c-format msgid " %i inode(s) recorded as deleted from reference backup\n" msgstr "" " %i inoder(er) noterad(e) som borttagen/borttagna frÃ¥n " "referenssäkerhetskopian\n" #: src/dar_suite/dar.cpp:1141 src/dar_suite/dar.cpp:1172 #: src/dar_suite/dar.cpp:1236 #, c-format msgid " Total number of inode(s) considered: %i\n" msgstr " Totalt antal övervägda inoder: %i\n" #: src/dar_suite/dar.cpp:1144 src/dar_suite/dar.cpp:1232 #, c-format msgid " EA saved for %i inode(s)\n" msgstr " EA sparat för %i inoder(er)\n" #: src/dar_suite/dar.cpp:1146 src/dar_suite/dar.cpp:1234 #, fuzzy, c-format msgid " FSA saved for %i inode(s)\n" msgstr " EA sparat för %i inoder(er)\n" #: src/dar_suite/dar.cpp:1164 #, c-format msgid " %i inode(s) restored\n" msgstr " %i inod(er) Ã¥terställd(a)\n" #: src/dar_suite/dar.cpp:1165 #, c-format msgid " including %i hard link(s)\n" msgstr " inkluderande %i hÃ¥rd länk(s)\n" #: src/dar_suite/dar.cpp:1166 #, c-format msgid " %i inode(s) not restored (not saved in archive)\n" msgstr " %i inod(er) ej Ã¥terställd(a) (ej sparad(e) i arkiv)\n" #: src/dar_suite/dar.cpp:1167 #, c-format msgid " %i inode(s) not restored (overwriting policy decision)\n" msgstr " %i inod(er) ej Ã¥terställd(a) (beslut enl. överskrivningspolicyn)\n" #: src/dar_suite/dar.cpp:1169 #, c-format msgid " %i inode(s) failed to restore (filesystem error)\n" msgstr " %i inod(er) gick inte att Ã¥terställa (filsystemfel)\n" #: src/dar_suite/dar.cpp:1170 #, c-format msgid " %i inode(s) deleted\n" msgstr " %i inod(er) borttagen/borttagna\n" #: src/dar_suite/dar.cpp:1175 #, c-format msgid " EA restored for %i inode(s)\n" msgstr " EA Ã¥terställt för %i inod(er)\n" #: src/dar_suite/dar.cpp:1177 #, fuzzy, c-format msgid " FSA restored for %i inode(s)\n" msgstr " EA Ã¥terställt för %i inod(er)\n" #: src/dar_suite/dar.cpp:1189 src/dar_suite/dar.cpp:1206 #, c-format msgid " %i item(s) treated\n" msgstr " %i företeelse(r) behandlad(e)\n" #: src/dar_suite/dar.cpp:1190 #, c-format msgid " %i item(s) do not match those on filesystem\n" msgstr " %i inod(er) matchar ej motsvarande i filsystem\n" #: src/dar_suite/dar.cpp:1191 src/dar_suite/dar.cpp:1208 #, c-format msgid " %i item(s) ignored (excluded by filters)\n" msgstr " %i företeelse(r) ignorerad(e) (uteslutna av filter)\n" #: src/dar_suite/dar.cpp:1193 src/dar_suite/dar.cpp:1210 #, c-format msgid " Total number of items considered: %i\n" msgstr " Totalt antal övervägda poster: %i\n" #: src/dar_suite/dar.cpp:1207 #, c-format msgid " %i item(s) with error\n" msgstr " %i företeelse(r) med fel\n" #: src/dar_suite/dar.cpp:1226 #, c-format msgid " %i inode(s) added to archive\n" msgstr " %i inod(er) lagda till arkiv\n" #: src/dar_suite/dar.cpp:1227 #, c-format msgid " with %i hard link(s) recorded\n" msgstr " med %i hÃ¥rd(a) länk(ar) noterad(e)\n" #: src/dar_suite/dar.cpp:1229 #, c-format msgid " %i inode(s) recorded as deleted\n" msgstr " %i inod(er) noterade som borttagna\n" #: src/dar_suite/crit_action_cmd_line.cpp:115 msgid "Missing } in conditional statement: " msgstr "Saknat } i villkorssats: " #: src/dar_suite/crit_action_cmd_line.cpp:119 msgid "Missing [ after } in conditional statement: " msgstr "Saknat [ efter } i villkorssats: " #: src/dar_suite/crit_action_cmd_line.cpp:122 msgid "Missing ] in conditional statement: " msgstr "Saknat ] i villkorssats: " #: src/dar_suite/crit_action_cmd_line.cpp:200 #, c-format msgid "Unknown policy for data '%c' in expression %S" msgstr "Okänd policy för data '%c' i uttrycket %S" #: src/dar_suite/crit_action_cmd_line.cpp:233 #, c-format msgid "Unknown policy for EA '%c' in expression %S" msgstr "Okönd policy för EA '%c' i uttrycket %S" #: src/dar_suite/crit_action_cmd_line.cpp:243 msgid "Unknown expression in overwriting policy: " msgstr "Okänt uttryck i överskrivningspolicy: " #: src/dar_suite/crit_action_cmd_line.cpp:426 msgid "Unknown atomic operator, or atomic not allowed with an argument: " msgstr "Okänd 'atomic' operator eller 'atomic' ej tillÃ¥ten med ett argument: " #: src/dar_suite/crit_action_cmd_line.cpp:508 msgid "Unknown character found while parsing conditional string: " msgstr "Okänt tecken hittat under kontroll av villkorssträng: " #: src/dar_suite/crit_action_cmd_line.cpp:517 msgid "Unknown expression found while parsing conditional string: " msgstr "Okänt uttryck hittat under kontroll av villkorssträng: " #: src/dar_suite/hide_file.cpp:32 msgid "hide_file cannot be initialized with write-only file" msgstr "'hide_file' kan ej initieras med 'write-only'-fil" #: src/dar_suite/command_line.cpp:364 #, fuzzy msgid "User target found on command line or included file(s):" msgstr "Analysfel i kommandoraden (eller i inkluderade filer): " #: src/dar_suite/command_line.cpp:373 msgid "No user target found on command line" msgstr "Inget användarmÃ¥l funnet pÃ¥ kommandorad" #: src/dar_suite/command_line.cpp:391 #, c-format msgid "Given user target(s) could not be found: %S" msgstr "" #: src/dar_suite/command_line.cpp:395 #, c-format msgid "Missing -c -x -d -t -l -C -+ option, see `%S -h' for help" msgstr "Saknar -c -x -d -t -l -C -+ alternativ, se %S -h för hjälp" #: src/dar_suite/command_line.cpp:397 msgid "" "Slicing (-s option), is not compatible with archive on standard output (\"-" "\" as filename)" msgstr "" "Delning (-s alternativ), är inte kompatibelt med arkiv pÃ¥ 'standard " "output' (\"-\" som filnamn)" #: src/dar_suite/command_line.cpp:418 msgid "-af option is only available with -c" msgstr "-af alternativ är endast tillgängligt med -c" #: src/dar_suite/command_line.cpp:420 msgid "-A option is not available with -l" msgstr "-A alternativ är ej tillgängligt med -l" #: src/dar_suite/command_line.cpp:422 #, fuzzy msgid "-T option is only available with -l" msgstr "-af alternativ är endast tillgängligt med -c" #: src/dar_suite/command_line.cpp:424 msgid "with -C option, -A option is mandatory" msgstr "med -c alternativ är -A alternativ obligatorisk" #: src/dar_suite/command_line.cpp:426 msgid "with -+ option, -A option is mandatory" msgstr "med -+ alternativ är -A alternativ obligatorisk" #: src/dar_suite/command_line.cpp:428 msgid "-wa is only useful with -x option" msgstr "-wa alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:431 msgid "-o is mandatory when using \"-A -\" with \"-c -\" \"-C -\" or \"-+ -\"" msgstr "" "-o är obligatorisk tillsammans med \"-A -\" with \"-c -\" \"-C -\" or \"-+ -" "\"" #: src/dar_suite/command_line.cpp:441 #, fuzzy msgid "-z option needs only to be used with -c -C or -+ options" msgstr "-z eller -y behöver bara användas med -c -C eller -+" #: src/dar_suite/command_line.cpp:443 msgid "-S option requires the use of -s" msgstr "-S alternativ kräver användning av -s" #: src/dar_suite/command_line.cpp:445 msgid "ignoring -O option, as it is useless in this situation" msgstr "ignorerar -O alternativ, dÃ¥ det är värdelöst i denna situation" #: src/dar_suite/command_line.cpp:448 msgid "-F is only useful with -A option, for the archive of reference" msgstr "-F är endast användbart med -A alternativ, för referensarkivet" #: src/dar_suite/command_line.cpp:451 msgid "-J is only useful with -A option, for the archive of reference" msgstr "-J är endast användbart med -A alternativ, för referensarkivet" #: src/dar_suite/command_line.cpp:454 msgid "-f in only available with -x option, ignoring" msgstr "-f är endast tillgängligt med -x alternativ, ignorerar" #: src/dar_suite/command_line.cpp:456 msgid "-m is only useful with -c" msgstr "-m är bara användbart med -c" #: src/dar_suite/command_line.cpp:462 msgid "-H is only useful with -A option when making a backup" msgstr "-H är endast användbart med -A alternativ dÃ¥ en säkerhetskopia görs" #: src/dar_suite/command_line.cpp:468 msgid "-H is only useful with -r option when extracting" msgstr "-H är endast användbart med -r alternativ dÃ¥ extrahering görs" #: src/dar_suite/command_line.cpp:472 msgid "-H is only useful with -c, -d or -x" msgstr "-H är endast användbart med -c, -d eller -x" #: src/dar_suite/command_line.cpp:476 msgid "-as is only available with -l, ignoring -as option" msgstr "-as är endast tillgängligt med -l, ignorerar -as alternativ" #: src/dar_suite/command_line.cpp:478 msgid "-e is only useful with -x, -c or -+ options" msgstr "-e alternativ är användbart bara med -x, -c eller -+ alternativ" #: src/dar_suite/command_line.cpp:480 msgid "-ac is only useful with -c or -d" msgstr "-ac är endast användbart med -c eller -d" #: src/dar_suite/command_line.cpp:482 msgid "-M is only useful with -c" msgstr "-M är endast användbart med -c" #: src/dar_suite/command_line.cpp:484 msgid "The snapshot backup (-A +) is only available with -c option, ignoring" msgstr "Snabb-backup (-A +) är bara tillgängligt med -c aalternativ, ignorerar" #: src/dar_suite/command_line.cpp:486 msgid "" "The Cache Directory Tagging Standard is only useful while performing a " "backup, ignoring it here" msgstr "" "\"Cache Directory Tagging Standard\" är bara användbart medan en " "säkerhetskopiering görs, tar ej hänsyn till det här" #: src/dar_suite/command_line.cpp:489 msgid "-@ is only available with -+ and -c options" msgstr "-@ är bara tillgängligt med -+ och -c alternativ" #: src/dar_suite/command_line.cpp:491 msgid "-$ is only available with -+ option and -c options" msgstr "-$ är bara tillgängligt med -+ och -c alternativ" #: src/dar_suite/command_line.cpp:493 #, fuzzy msgid "-~ is only available with -+ and -c options" msgstr "-@ är bara tillgängligt med -+ och -c alternativ" #: src/dar_suite/command_line.cpp:495 #, c-format msgid "-%% is only available with -+ option" msgstr "-%% är bara tillgängligt med -+ alternativ" #: src/dar_suite/command_line.cpp:498 msgid "" "-$ is only useful with -@ option, for the auxiliary archive of reference" msgstr "" "-$ är bara nyttigt med -@ alternativ, för det hjälpande referensarkivet" #: src/dar_suite/command_line.cpp:500 #, c-format msgid "" "-%% is only useful with -@ option, for the auxiliary archive of reference" msgstr "" "-%% är bara nyttigt med -@ alternativ, för det hjälpande referensarkivet" #: src/dar_suite/command_line.cpp:502 msgid "" "-~ is only useful with -@ option, for the auxiliary archive of reference" msgstr "" "-~ is är bara nyttig med -@ alternativ, för det hjälpande referensarkivet" #: src/dar_suite/command_line.cpp:505 msgid "-ak is only available while merging (operation -+), ignoring -ak" msgstr "" "-ak är bara tillgängligt under sammanfogning (operation -+), ignorerar -ak" #: src/dar_suite/command_line.cpp:510 #, fuzzy msgid "" "Compression option (-z option) is useless and ignored when using -ak option" msgstr "" "Kompressionsalternativ (-z eller -y) är oanvändbart och ignoreras när -ak " "alternativ används" #: src/dar_suite/command_line.cpp:517 msgid "" "--sparse-file-min-size only available while saving or merging archives, " "ignoring" msgstr "" "--sparse-file-min-size är bara tillgängligt under arkivs sparande eller " "ihopslagning. " #: src/dar_suite/command_line.cpp:520 msgid "" "To use --sparse-file-min-size while merging archive, you need to use -ah " "option too, please check man page for details" msgstr "" "För att utnyttja --sparse-file-min-size under arkivihopslaygnin, mÃ¥ste du " "använda -ah valet ocksÃ¥se man page för detaljer" #: src/dar_suite/command_line.cpp:526 msgid "-k option is only useful with -x option" msgstr "-k alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:529 msgid "-konly and -kignore cannot be used at the same time" msgstr "\"-konly\" och \"-kignore\" kan ej användas samtidigt" #: src/dar_suite/command_line.cpp:532 msgid "-p and -Q options are mutually exclusives" msgstr "" #: src/dar_suite/command_line.cpp:535 #, fuzzy msgid "-vf is only useful with -c option" msgstr "-wa alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:540 #, fuzzy msgid "-A option is required with -y option" msgstr "-k alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:542 #, fuzzy msgid "'-A +' is not possible with -y option" msgstr "-%% är bara tillgängligt med -+ alternativ" #: src/dar_suite/command_line.cpp:544 #, fuzzy msgid "-af is not possible with -y option" msgstr "-wa alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:546 #, fuzzy msgid "-k option is not possible with -y option" msgstr "-k alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:548 msgid "" "-X, -I, -P, -g, -], -[ and any other file selection relative commands are " "not possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:550 #, fuzzy msgid "-D option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:552 #, fuzzy msgid "-r option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:554 msgid "" "-u, -U, -P, -g, -], -[ and any other EA selection relative commands are not " "possible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:556 #, fuzzy msgid "-O option is not possible with -y option" msgstr "-k alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:558 #, fuzzy msgid "-H option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:560 #, fuzzy msgid "-as option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:562 #, fuzzy msgid "-ae option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:564 #, fuzzy msgid "-ad option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:566 #, fuzzy msgid "-asecu option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:568 #, fuzzy msgid "-ai option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:570 msgid "--alter=do-not-compare-symlink-mtime option is useless with -y option" msgstr "" #: src/dar_suite/command_line.cpp:572 #, fuzzy msgid "-M option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:574 #, fuzzy msgid "-@ option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:576 #, fuzzy msgid "-/ option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:578 #, fuzzy msgid "-< and -> options are useless with -y option" msgstr "-k alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:580 #, fuzzy msgid "-5 option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:582 #, fuzzy msgid "-8 option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:584 #, fuzzy msgid "-{ and -} options are useless with -y option" msgstr "-k alternativ är användbart bara med alternativet -x" #: src/dar_suite/command_line.cpp:586 #, fuzzy msgid "-\\ option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:588 #, fuzzy msgid "compression (-z option) cannot be changed with -y option" msgstr "" "Kompressionsalternativ (-z eller -y) är oanvändbart och ignoreras när -ak " "alternativ används" #: src/dar_suite/command_line.cpp:590 #, fuzzy msgid "-ak option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:592 #, fuzzy msgid "-ah option is useless with -y option" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:594 #, fuzzy msgid "--sequential-read is useless with -y option" msgstr "-r är oanvändbart med -n" #: src/dar_suite/command_line.cpp:596 msgid "--alter=tape-marks is impossible with -y option" msgstr "" #: src/dar_suite/command_line.cpp:641 #, fuzzy msgid "" "-Y and -Z are only useful with compression (-z option), ignoring any -Y and -" "Z option" msgstr "" "-Y och -Z är endast användbara med komprimering (-z eller -y till exempel), " "ignorerar alla -Y och -Z alternativ" #: src/dar_suite/command_line.cpp:643 #, fuzzy msgid "-m is only useful with compression (-z option), ignoring -m" msgstr "" "-m är endast användbart med komprimering (-z eller -y alternativ), ignorerar " "-m" #: src/dar_suite/command_line.cpp:693 msgid "" "-= option is valid only while saving files, thus in conjunction with -c " "option, ignoring" msgstr "" "-= alternativ är bara giltigt option under sparande av filer, sÃ¥lunda i " "samband med -c alternativ, ignorerar" #: src/dar_suite/command_line.cpp:695 msgid "" "-= option will be ignored as it is useless if you do not specify to which " "files or directories this backup hook is to be applied, thanks to -< and -> " "options. See man page for more details." msgstr "" "-= valet kommer att ignoreras dÃ¥ det är oanvändbart om du inte specificerar " "till vilka filer eller mappar denna koppling skall tillämpas, tack vare -< " "och -> valen. Se man page för ytterligare detaljer." #: src/dar_suite/command_line.cpp:701 msgid "" "backup hook feature (-<, -> or -= options) is only available when saving " "files, ignoring" msgstr "" "'backup hook' funktion (-<, -> or -= options) är bara tillgänglig vid " "sparande av filer when saving, ignorerar" #: src/dar_suite/command_line.cpp:769 msgid "-/ option is only useful with -+ option, ignoring" msgstr "-/ alternativ är användbart bara med -+ alternativ, ignoreras" #: src/dar_suite/command_line.cpp:780 msgid "" "-. option is only useful when merging, creating or isolating an archive, " "ignoring" msgstr "" "-. alternativ är endast användbart vid sammanslagning, skapande eller " "isolering av arkiv, ignorerar" #: src/dar_suite/command_line.cpp:785 #, c-format msgid "" "The following user comment will be placed in clear text in the archive: %S" msgstr "" "Följande användarkommentar kommer att placeras i klartext i arkivet: %S" #: src/dar_suite/command_line.cpp:804 msgid "" "Warning: libdar multi-threading is an experimental and unsupported feature, " "read man page about -G option for more information" msgstr "" #: src/dar_suite/command_line.cpp:809 msgid "Parse error: " msgstr "Analysfel: " #: src/dar_suite/command_line.cpp:854 msgid " Only one option of -c -d -t -l -C -x or -+ is allowed" msgstr " Endast ett alternativ av -c -d -t -l -C -x eller -+ är tillÃ¥tet" #: src/dar_suite/command_line.cpp:907 msgid "Only one -A option is allowed" msgstr "Endast ett -A alternativ är tillÃ¥tet" #: src/dar_suite/command_line.cpp:934 msgid "Error while parsing -A argument as a date: " msgstr "Fel vid analys av -A-argument som ett datum: " #: src/dar_suite/command_line.cpp:1015 msgid "Choose only one compression algorithm" msgstr "Välj bara en komprimeringsalgoritm" #: src/dar_suite/command_line.cpp:1021 msgid "-w option is useless with -n" msgstr "-w alternativ är oanvändbart med -n" #: src/dar_suite/command_line.cpp:1033 msgid "Unknown argument given to -w: " msgstr "Okänt argument givet till -w: " #: src/dar_suite/command_line.cpp:1053 src/dar_suite/command_line.cpp:1060 #: src/dar_suite/command_line.cpp:1067 msgid "\"-k\" (or \"-kignore\") and \"-konly\" are not compatible" msgstr "\"-k\" (eller \"-kignore\") och \"-konly\" är ej kompatibla" #: src/dar_suite/command_line.cpp:1071 #, c-format msgid "Unknown argument given to -k : %s" msgstr "Okänt argument givet till -k : %s" #: src/dar_suite/command_line.cpp:1075 msgid "Only one -R option is allowed" msgstr "Endast ett -R alternativ är tillÃ¥tet" #: src/dar_suite/command_line.cpp:1102 src/dar_suite/dar_xform.cpp:228 msgid "Only one -s option is allowed" msgstr "Endast ett -s alternativ är tillÃ¥tet" #: src/dar_suite/command_line.cpp:1127 src/dar_suite/command_line.cpp:1145 #: src/dar_suite/dar_xform.cpp:253 src/dar_suite/dar_xform.cpp:271 msgid "Only one -S option is allowed" msgstr "Endast ett -S alternativ är tillÃ¥tet" #: src/dar_suite/command_line.cpp:1135 msgid "" "Giving to -S option the same value as the one given to -s option is useless" msgstr "" "Om man ger till -S alternativet samma värde, som det som man ger till -s " "alternativet, är det oanvändbart" #: src/dar_suite/command_line.cpp:1210 msgid "-r is useless with -n" msgstr "-r är oanvändbart med -n" #: src/dar_suite/command_line.cpp:1291 msgid "Invalid argument given to -T option, expecting [:]" msgstr "" #: src/dar_suite/command_line.cpp:1307 #, fuzzy, c-format msgid "Invalid hash algorithm provided to -T opton: %s" msgstr "Ogiltigt tal givet till -; alternativ: %s" #: src/dar_suite/command_line.cpp:1401 #, c-format msgid "" "File inclusion loop detected. The file %s includes itself directly or " "through other files (-B option)" msgstr "" "Slinga i filinkludering har upptäckts. Filen %s inkluderar sig själv direkt " "eller genom andra filer (-B alternativ)" #: src/dar_suite/command_line.cpp:1418 #, fuzzy, c-format msgid "Error reading included file (%s): " msgstr "I ingÃ¥ende filer %S: " #: src/dar_suite/command_line.cpp:1423 #, fuzzy, c-format msgid "Error in included file (%s): " msgstr "I ingÃ¥ende filer %S: " #: src/dar_suite/command_line.cpp:1439 src/dar_suite/command_line.cpp:2841 #: src/dar_suite/command_line.cpp:2923 #, c-format msgid "In included file %S: " msgstr "I ingÃ¥ende filer %S: " #: src/dar_suite/command_line.cpp:1482 #, c-format msgid "" "%d is the default value for -m, no need to specify it on command line, " "ignoring" msgstr "" "%d är standardvärdet för -m, inget behov att specificera det pÃ¥ " "kommandoraden, ignorerar" #: src/dar_suite/command_line.cpp:1499 msgid "" "--nodump feature has not been activated at compilation time, it is thus not " "available" msgstr "" "--nodump-funktionen har ej aktiverats vid kompileringstillfället, sÃ¥lunda är " "den ej tillgängligt" #: src/dar_suite/command_line.cpp:1515 msgid "Argument given to -H is not a positive integer number" msgstr "Det argument, som ges till -H, är inte ett positivt heltal" #: src/dar_suite/command_line.cpp:1521 src/dar_suite/dar_xform.cpp:318 msgid "-a option requires an argument" msgstr "-a alternativ kräver ett argument" #: src/dar_suite/command_line.cpp:1568 msgid "" "-ak option need not be specified more than once, ignoring extra -ak options" msgstr "" "-ak alternativ behöver inte specificeras mer än en gÃ¥ng, ignorer extra ak " "alternativ" #: src/dar_suite/command_line.cpp:1574 msgid "-af must be present before -A option not after!" msgstr "-af mÃ¥ste finnas före -A alternativ inte efter!" #: src/dar_suite/command_line.cpp:1576 msgid "" "-af option need not be specified more than once, ignoring extra -af options" msgstr "" "-af alternativ behöver inte specificeras mer än en gÃ¥ng, ignorer extra af " "alternativ" #: src/dar_suite/command_line.cpp:1614 #, c-format msgid "Unknown argument given to -a : %s" msgstr "Okänt argument givet till -a : %s" #: src/dar_suite/command_line.cpp:1631 msgid "libthreadar required for multithreaded execution" msgstr "" #: src/dar_suite/command_line.cpp:1659 msgid "Only one -@ option is allowed" msgstr "Endast ett -@ alternativ är tillÃ¥tet" #: src/dar_suite/command_line.cpp:1737 msgid "Syntax error in overwriting policy: " msgstr "Syntaxfel i överskrivningspolicy: " #: src/dar_suite/command_line.cpp:1770 #, c-format msgid "" "%d is the default value for --sparse-file-min-size, no need to specify it on " "command line, ignoring" msgstr "" "%d är standardvärdet för --sparse-file-min-size, inget behov att specificera " "det pÃ¥ kommandoraden, ignorerar" #: src/dar_suite/command_line.cpp:1791 #, c-format msgid "Unknown argument given to -2 : %s" msgstr "Okänt argument givet till -2 : %s" #: src/dar_suite/command_line.cpp:1828 src/dar_suite/dar_xform.cpp:336 msgid "Missing argument to --hash" msgstr "Saknar argument till --hash" #: src/dar_suite/command_line.cpp:1830 src/dar_suite/dar_xform.cpp:343 msgid "Unknown parameter given to --hash option: " msgstr "Okänd parameter givet till --hash alternativ: " #: src/dar_suite/command_line.cpp:1843 msgid "Error while parsing --min-digits option: " msgstr "Fel vid analys av --min-digits alternativ: " #: src/dar_suite/command_line.cpp:1849 msgid "Missing argument to --backup-hook-execute" msgstr "Saknar argument till --backup-hook-execute" #: src/dar_suite/command_line.cpp:1920 #, fuzzy msgid "Missing argument to --delta" msgstr "Saknar argument till --hash" #: src/dar_suite/command_line.cpp:1942 #, fuzzy msgid "missing function name argument in string" msgstr "Saknar argument till -i alternativ" #: src/dar_suite/command_line.cpp:1948 #, fuzzy msgid "missing multiplier argument in string" msgstr "Saknar argument till -i alternativ" #: src/dar_suite/command_line.cpp:1966 src/dar_suite/command_line.cpp:1977 msgid "too large value provided for the min block size" msgstr "" #: src/dar_suite/command_line.cpp:1982 msgid "unexpected extra argument in string" msgstr "" #: src/dar_suite/command_line.cpp:1988 #, fuzzy msgid "Unknown parameter given to --delta option: " msgstr "Okänd parameter givet till -a alternativ: " #: src/dar_suite/command_line.cpp:2035 #, fuzzy msgid "Unknown parameter given to --modified-data-detection option: " msgstr "Okänd parameter givet till -a alternativ: " #: src/dar_suite/command_line.cpp:2040 src/dar_suite/command_line.cpp:2042 #, fuzzy, c-format msgid "Unknown option -%c" msgstr "Ignorerar okänt alternativ -%c" #: src/dar_suite/command_line.cpp:2059 #, c-format msgid "" "usage: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] " "[options...]\n" msgstr "" "Användning: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] " "[alternativ...]\n" #: src/dar_suite/command_line.cpp:2063 #, c-format msgid "Commands are:\n" msgstr "Kommandona är:\n" #: src/dar_suite/command_line.cpp:2064 #, c-format msgid " -c creates an archive\n" msgstr " -c skapar ett arkiv\n" #: src/dar_suite/command_line.cpp:2065 #, c-format msgid " -x extracts files from the archive\n" msgstr " -x extraherar filer frÃ¥n arkivet\n" #: src/dar_suite/command_line.cpp:2066 #, c-format msgid " -d compares the archive with the existing filesystem\n" msgstr " -d jämför arkivet med existerande filsystem\n" #: src/dar_suite/command_line.cpp:2067 #, c-format msgid " -t tests the archive integrity\n" msgstr " -t testar arkivets integritet\n" #: src/dar_suite/command_line.cpp:2068 #, c-format msgid " -l lists the contents of the archive\n" msgstr " -l listar innehÃ¥llet i arkivet\n" #: src/dar_suite/command_line.cpp:2069 #, c-format msgid " -C isolates the catalogue from an archive\n" msgstr " -C isolerar katalogen frÃ¥n ett arkiv\n" #: src/dar_suite/command_line.cpp:2070 #, c-format msgid " -+ merge two archives / create a sub archive\n" msgstr " -+ sammanfoga tvÃ¥ arkiv / skapa ett underarkiv\n" #: src/dar_suite/command_line.cpp:2071 #, fuzzy, c-format msgid " -y repair a truncated archive\n" msgstr " -c skapar ett arkiv\n" #: src/dar_suite/command_line.cpp:2073 #, c-format msgid " -h displays this help information\n" msgstr " -h visar denna hjälpinformation\n" #: src/dar_suite/command_line.cpp:2074 #, c-format msgid " -V displays version information\n" msgstr " -V visar versionsinformation\n" #: src/dar_suite/command_line.cpp:2077 #, fuzzy, c-format msgid " -v[s|t|d|m|f|a] verbose output\n" msgstr " -v\t\t detaljerade utdata\n" #: src/dar_suite/command_line.cpp:2078 #, c-format msgid " -q\t\t suppress final statistics report\n" msgstr " -q\t\t undertryck slutgiltig statistikrapport\n" #: src/dar_suite/command_line.cpp:2079 #, c-format msgid " -vs\t\t display skipped files\n" msgstr " -vs\t\t visa överhoppade filer\n" #: src/dar_suite/command_line.cpp:2080 #, c-format msgid " -R \t filesystem root directory (current dir by default)\n" msgstr " -R \t filsystemets rotmapp (gällande mapp som standard)\n" #: src/dar_suite/command_line.cpp:2081 #, c-format msgid "" " -X \t files to exclude from the operation (none by default)\n" msgstr "" " -X \t filer som skall uteslutas frÃ¥n Ã¥tgärden (inga som " "standard)\n" #: src/dar_suite/command_line.cpp:2082 #, c-format msgid " -I \t files to include in the operation (all by default)\n" msgstr "" " -I \t filer som skall inkluderas i Ã¥tgärden (alla som standard)\n" #: src/dar_suite/command_line.cpp:2083 #, c-format msgid " -P \t subdirectory to exclude from the operation\n" msgstr " -P \t mapp som skall utelutas i Ã¥tgärden\n" #: src/dar_suite/command_line.cpp:2084 #, c-format msgid " -g \t subdirectory to include in the operation\n" msgstr " -g \t mapp som skall inkluderas i Ã¥tgärden\n" #: src/dar_suite/command_line.cpp:2085 #, c-format msgid " -[ filename contains a list of files to include\n" msgstr "" " -[ \t filnamn innehÃ¥ller en lista pÃ¥ filer som skall tas med\n" #: src/dar_suite/command_line.cpp:2086 #, c-format msgid " -] \t filename contains a list of files to exclude\n" msgstr "" " -] \t filnamn innehÃ¥ller en lista pÃ¥ filer som skall " "uteslutas\n" #: src/dar_suite/command_line.cpp:2087 src/dar_suite/dar_xform.cpp:429 #, c-format msgid " -n\t\t don't overwrite files\n" msgstr " -n\t\t skriv ej över filer\n" #: src/dar_suite/command_line.cpp:2088 src/dar_suite/dar_xform.cpp:430 #, c-format msgid " -w\t\t don't warn before overwriting files\n" msgstr " -w\t\t varna inte före överskrivning av filer\n" #: src/dar_suite/command_line.cpp:2089 #, c-format msgid " -wa\t\t don't warn before overwriting and removing files\n" msgstr " -wa\t\t varna inte vid överskrivning och borttagande av filer\n" #: src/dar_suite/command_line.cpp:2090 src/dar_suite/dar_xform.cpp:431 #, c-format msgid " -b\t\t ring the terminal bell when user action is required\n" msgstr "" " -b\t\t ring pÃ¥ terminalens klocka när en användarÃ¥tgärd mÃ¥ste till\n" #: src/dar_suite/command_line.cpp:2091 #, c-format msgid "" " -O[ignore-owner | mtime | inode-type] do not consider user and group\n" msgstr "" " -O[ignorera-ägare | mtime | inode-typ] beakta inte ägare och grupp\n" #: src/dar_suite/command_line.cpp:2092 #, c-format msgid "\t\t ownership\n" msgstr "\t\t ägarskap\n" #: src/dar_suite/command_line.cpp:2093 #, c-format msgid " -H [N]\t ignore shift in dates of an exact number of hours\n" msgstr " -H [N]\t ignorera datumbyte pÃ¥ en fixt antal timmar\n" #: src/dar_suite/command_line.cpp:2094 #, c-format msgid " -E \t command to execute between slices\n" msgstr " -E \t kommadon som skall utföras mellan delar\n" #: src/dar_suite/command_line.cpp:2095 #, c-format msgid " -F \t same as -E but for the archive of reference\n" msgstr " -F \t spara som -E men för referensarkivet\n" #: src/dar_suite/command_line.cpp:2096 #, c-format msgid " -u \t mask to ignore certain EA\n" msgstr " -u \t mask för att ignorera viss EA\n" #: src/dar_suite/command_line.cpp:2097 #, c-format msgid " -U \t mask to allow certain EA\n" msgstr " -U \t mask för att tillÃ¥ta viss EA\n" #: src/dar_suite/command_line.cpp:2098 #, c-format msgid " -K \t use as key to encrypt/decrypt\n" msgstr "" " -K \t använd som nyckel för att kryptera/dekryptera\n" #: src/dar_suite/command_line.cpp:2099 #, c-format msgid "" " -J \t same as -K but it does concern the archive of reference\n" msgstr " -J \t samma som -K men det avser referensarkivet\n" #: src/dar_suite/command_line.cpp:2100 #, c-format msgid " -# encryption block size\n" msgstr " -# \t krypteringsblockstorlek\n" #: src/dar_suite/command_line.cpp:2101 #, c-format msgid " -* same as -# but for archive of reference\n" msgstr " -* \t samma som -# men för referensarkivet\n" #: src/dar_suite/command_line.cpp:2102 #, c-format msgid " -B read options from given file\n" msgstr " -B \t läs alternativ frÃ¥n angiven fil\n" #: src/dar_suite/command_line.cpp:2103 #, c-format msgid " -N\t\t do not read ~/.darrc nor /etc/darrc configuration file\n" msgstr "" " -N\t\t läs inte ~/.darrc eller /etc/darrc inställningsparameterfil\n" #: src/dar_suite/command_line.cpp:2104 #, c-format msgid " -e\t\t dry run, fake execution, nothing is produced\n" msgstr " -e\t\t torrsim, fejkad exekvering, inget är producerat\n" #: src/dar_suite/command_line.cpp:2105 #, c-format msgid " -Q\t\t suppress the initial warning when not launched from a tty\n" msgstr " -Q\t\t undertyck begynnelsevarningen om ej startad frÃ¥n en tty\n" #: src/dar_suite/command_line.cpp:2106 #, c-format msgid " -aa\t\t do not try to preserve atime of file open for reading.\n" msgstr " -aa\t\t bevara inte 'atime' hos en läsöppnad fil.\n" #: src/dar_suite/command_line.cpp:2107 #, c-format msgid " -ac\t\t do not try to preserve ctime (default behavior).\n" msgstr " -ac\t\t bevara inte 'ctime' (standarduppträdande).\n" #: src/dar_suite/command_line.cpp:2108 #, c-format msgid " -am\t\t set ordered mode for all filters\n" msgstr " -am\t\t ställ in 'ordered mode' för alla filter\n" #: src/dar_suite/command_line.cpp:2109 #, c-format msgid " -an\t\t the masks that follow are now case insensitive\n" msgstr "" " -an\t\t efterföljande masker är okänsliga för liten/stor bokstav\n" #: src/dar_suite/command_line.cpp:2110 #, c-format msgid " -acase\t the masks that follow are now case sensitive\n" msgstr "" " -acase\t efterföljande masker är känsliga för liten/stor bokstav\n" #: src/dar_suite/command_line.cpp:2111 #, c-format msgid " -ar\t\t set the following masks to be regex expressions\n" msgstr " -ar\t\t ställ in följande masker att vara reguljära uttryck\n" #: src/dar_suite/command_line.cpp:2112 #, c-format msgid " -ag\t\t set the following masks to be glob expressions\n" msgstr " -ag\t\t ställ in följande masker att vara 'glob expressions'\n" #: src/dar_suite/command_line.cpp:2114 #, fuzzy, c-format msgid "" "Saving/Isolation/merging/repairing options (to use with -c, -C, -+ or -y):\n" msgstr "" "Sparande- / Isolations- / sammanfogningsalternativ (att användas med -c, -C " "eller -+):\n" #: src/dar_suite/command_line.cpp:2115 #, c-format msgid " -A [path/] archive to take as reference\n" msgstr " -A [sökväg/] arkiv att ta som referens\n" #: src/dar_suite/command_line.cpp:2116 #, c-format msgid " -@ [path/] auxiliary archive of reference for merging\n" msgstr " -@ [sökväg/] referenshjälparkiv för sammanslagning\n" #: src/dar_suite/command_line.cpp:2117 #, c-format msgid " -$ \t encryption key for auxiliary archive\n" msgstr " -$ \t krypteringsnyckel för hjälparkiv\n" #: src/dar_suite/command_line.cpp:2118 #, c-format msgid " -~ \t command between slices of the auxiliary archive\n" msgstr " -~ \t kommando mellan avsnitt hos hjälparkivet\n" #: src/dar_suite/command_line.cpp:2119 #, fuzzy, c-format msgid " -z [[algo:]level]\t compress data in archive. -z = -z9 = -zgzip:9\n" msgstr " -z [nivÃ¥]\t komprimera data i ett arkiv med gzip-algoritm\n" #: src/dar_suite/command_line.cpp:2120 #, c-format msgid "" " Available algo: gzip,bzip2,lzo,xz. Exemples: -zlzo -zxz:5 -z1 -z\n" msgstr "" #: src/dar_suite/command_line.cpp:2121 src/dar_suite/dar_xform.cpp:426 #, c-format msgid "" " -s split the archive in several files of size \n" msgstr " -s \t klyv arkivet i flera filer av storlek \n" #: src/dar_suite/command_line.cpp:2122 #, c-format msgid " -S first file size (if different from following ones)\n" msgstr "" " -S \t första filens storlek (om skild frÃ¥n de efterföljandes)\n" #: src/dar_suite/command_line.cpp:2123 #, fuzzy, c-format msgid " -aSI \t slice size suffixes k, M, T, G, etc. are powers of 10\n" msgstr " -aSI \t delstorlekssuffix k, M, T, G, etc. är tio-potenser\n" #: src/dar_suite/command_line.cpp:2124 #, fuzzy, c-format msgid " -abinary\t slice size suffixes k, M, T, G, etc. are powers of 2\n" msgstr " -abinary\t delstorlekssuffix k, M, T, G, etc. är tvÃ¥-potenser\n" #: src/dar_suite/command_line.cpp:2125 src/dar_suite/dar_xform.cpp:428 #, c-format msgid " -p\t\t pauses before writing to a new file\n" msgstr " -p\t\t gör en paus innan skrivning till en ny fil\n" #: src/dar_suite/command_line.cpp:2126 #, c-format msgid " -D\t\t excluded directories are stored as empty directories\n" msgstr " -D\t\t uteslutna mappar lagras som tomma mappar\n" #: src/dar_suite/command_line.cpp:2127 #, c-format msgid " -Z \t do not compress the matching filenames\n" msgstr " -Z \t komprimera inte de matchande filnamnen\n" #: src/dar_suite/command_line.cpp:2128 #, c-format msgid " -Y \t do only compress the matching filenames\n" msgstr " -Y \t komprimera endast de matchande filnamnen\n" #: src/dar_suite/command_line.cpp:2129 #, c-format msgid " -m \t do not compress file smaller than \n" msgstr " -m \t komprimera inte filer mindre än \n" #: src/dar_suite/command_line.cpp:2130 #, c-format msgid " --nodump\t do not backup, files having the nodump 'd' flag set\n" msgstr " --nodump\t tag ej med filer med 'nodump'-flaggan 'd' satt\n" #: src/dar_suite/command_line.cpp:2131 #, c-format msgid "" " -@ [path/] Do on-fly catalogue isolation of the resulting " "archive\n" msgstr "" " -@ [sökväg/] Gör katalogisolation av det resulterande arkivet " "i flykten\n" #: src/dar_suite/command_line.cpp:2132 #, c-format msgid " -M\t\t stay in the same filesystem while scanning directories\n" msgstr " -M\t\t stanna i samma filsysytem medan mappar avsöks\n" #: src/dar_suite/command_line.cpp:2133 #, c-format msgid " -,\t\t ignore directories that follow the Directory Tagging\n" msgstr " -,\t\t ignorera mappar som följer \"Directory Tagging\"\n" #: src/dar_suite/command_line.cpp:2134 #, c-format msgid "\t\t Standard\n" msgstr "\t\t Standard\n" #: src/dar_suite/command_line.cpp:2135 #, c-format msgid "" " -/ \t which way dar can overwrite files at archive merging or\n" msgstr "" " -/ \t pÃ¥ vilket sätt dar kan skriva över filer vid ihopslagning " "av arkiv eller\n" #: src/dar_suite/command_line.cpp:2136 #, c-format msgid "\t\t extraction time\n" msgstr "\t\t uthämtningstillfälle\n" #: src/dar_suite/command_line.cpp:2137 src/dar_suite/dar_xform.cpp:436 #, c-format msgid " -^ \t permission[:user[:group]] of created slices\n" msgstr " -^ \t permission[:användare[:grupp]] för skapade bitar\n" #: src/dar_suite/command_line.cpp:2138 #, c-format msgid "" " -8 sig\t add delta signature to perform binary delta if used as ref." msgstr "" #: src/dar_suite/command_line.cpp:2140 #, c-format msgid "Restoring options (to use with -x) :\n" msgstr "Ã…terställningsalternativ (att användas vid -x) :\n" #: src/dar_suite/command_line.cpp:2141 #, c-format msgid " -k\t\t do not remove files destroyed since the reference backup\n" msgstr "" " -k\t\t tag ej bort filer, förstörda sedan referenssäkerhetskopian\n" #: src/dar_suite/command_line.cpp:2142 #, c-format msgid " -r\t\t do not restore file older than those on filesystem\n" msgstr " -r\t\t Ã¥terställ ej filer som är äldre än de i filsystemet\n" #: src/dar_suite/command_line.cpp:2143 #, c-format msgid " -f\t\t do not restore directory structure\n" msgstr " -f\t\t Ã¥terställ ej mappstrukturen\n" #: src/dar_suite/command_line.cpp:2145 #, c-format msgid "Reading options (to use with -x, -d, -t, -l, -A)\n" msgstr "Läsalternativ (att användas vid -x, -d, -t, -l, -A)\n" #: src/dar_suite/command_line.cpp:2146 #, c-format msgid "" " -i pipe to use instead of std input to read data from " "dar_slave\n" msgstr "" " -i rör som används i stället för 'std input' till att " "läsa data frÃ¥n 'dar_slave'\n" #: src/dar_suite/command_line.cpp:2147 #, c-format msgid "" " -o pipe to use instead of std output to orders dar_slave\n" msgstr "" " -o rör som används i stället för 'std output' till att " "berordra 'dar_slave'\n" #: src/dar_suite/command_line.cpp:2149 #, c-format msgid "Listing options (to use with -l):\n" msgstr "Listalternativ (att användas vid -l):\n" #: src/dar_suite/command_line.cpp:2150 #, c-format msgid " -T\t\t tree output format\n" msgstr " -T\t\t trädformat pÃ¥ utmatning\n" #: src/dar_suite/command_line.cpp:2151 #, c-format msgid " -as\t\t only list files saved in the archive\n" msgstr " -as\t\t lista bara filer som sparats i arkivet\n" #: src/dar_suite/command_line.cpp:2152 #, c-format msgid "" "\n" "\n" msgstr "" "\n" "\n" #: src/dar_suite/command_line.cpp:2153 #, fuzzy, c-format msgid "" "Type \"man dar\" for more details and for all other available options.\n" msgstr "" "Skriv \"man dar\" för ytterligare detaljer och för alla tillgängliga " "alternativ.\n" #: src/dar_suite/command_line.cpp:2529 #, c-format msgid " Using libdar %u.%u.%u built with compilation time options:" msgstr "" " Använder libdar med %u.%u.%u byggt med kompileringstillfällets alternativ:" #: src/dar_suite/command_line.cpp:2530 #, c-format msgid " Using libdar %u.%u built with compilation time options:" msgstr "" " Använder libdar med %u.%u byggt med kompileringstillfällets alternativ:" #: src/dar_suite/command_line.cpp:2753 #, c-format msgid "Arguments read from %S :" msgstr "Argument läses frÃ¥n %S :" #: src/dar_suite/command_line.cpp:2875 #, fuzzy, c-format msgid "Failed reading %S: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/dar_suite/command_line.cpp:2956 #, fuzzy, c-format msgid "Warning: Failed reading %S: " msgstr "Fel vid läsning frÃ¥n fil: " #: src/dar_suite/command_line.cpp:3246 #, c-format msgid "" "%s does not name a compression \"[algorithm][:][level]\" , like for examples " "\"gzip\", \"lzo\", \"bzip2\", \"lzo:3\", \"gzip:2\", \"8\" or \"1\". Please " "review the man page about -z option" msgstr "" "%s namnger inte en kompression \"[algoritm][:][nivÃ¥]\" , sÃ¥som t. ex. \"gzip" "\", \"lzo\", \"bzip2\", \"lzo:3\", \"gzip:2\", \"8\" eller \"1\". Titta pÃ¥ " "man page om -z alternativ" #: src/dar_suite/command_line.cpp:3265 msgid "Compression level must be between 1 and 9, included" msgstr "KomprimeringsnivÃ¥n mÃ¥ste ligga mellan 1 och 9 inklusive" #: src/dar_suite/command_line.cpp:3294 msgid "unknown FSA family: " msgstr "" #: src/dar_suite/command_line.cpp:3326 #, c-format msgid "" "User target named \"%s\" is not allowed (reserved word for conditional " "syntax)" msgstr "" "AnvändarmÃ¥l kallat \"%s\" är ej tillÃ¥tet (reseverat ord för villkorssyntax)" #: src/dar_suite/dar_xform.cpp:230 msgid "Missing argument to -s" msgstr "Saknar argument till -s" #: src/dar_suite/dar_xform.cpp:241 msgid "Invalid size for option -s" msgstr "Ogiltig storlek för -s alternativ" #: src/dar_suite/dar_xform.cpp:248 msgid "Missing argument to -S" msgstr "Saknar argument till -S" #: src/dar_suite/dar_xform.cpp:261 msgid "Giving -S option the same value as the one given to -s is useless" msgstr "" "Att ge -S alternativet samma värde som det som gavs till -s är oanvändbart" #: src/dar_suite/dar_xform.cpp:265 msgid "Invalid size for option -S" msgstr "Ogiltig storlek för -S alternativ" #: src/dar_suite/dar_xform.cpp:302 msgid "Missing argument to -E" msgstr "Saknar argument till -E" #: src/dar_suite/dar_xform.cpp:310 msgid "Missing argument to -F" msgstr "Saknar argument till -F" #: src/dar_suite/dar_xform.cpp:325 msgid "Unknown parameter given to -a option: " msgstr "Okänd parameter givet till -a alternativ: " #: src/dar_suite/dar_xform.cpp:331 msgid "Missing argument to -^" msgstr "Saknar argument till -^" #: src/dar_suite/dar_xform.cpp:366 msgid "" "Missing source or destination argument on command line, see -h option for " "help" msgstr "" "Saknar käll- eller mÃ¥l-argument pÃ¥ kommandorad, se -h alternativ för hjälp" #: src/dar_suite/dar_xform.cpp:381 msgid "Invalid argument as source archive" msgstr "Ogiltigt argument som källarkiv" #: src/dar_suite/dar_xform.cpp:388 msgid "Invalid argument as destination archive" msgstr "Ogiltigt argument som mÃ¥larkiv" #: src/dar_suite/dar_xform.cpp:394 msgid "Archive on stdout is not compatible with slicing (-s option)" msgstr "Arkiv pÃ¥ 'std output' är inte kompatibelt med delning (-s alternativ)" #: src/dar_suite/dar_xform.cpp:419 #, c-format msgid "\t\t the first non options argument is the archive to read\n" msgstr "" "\t\t det första icke alternativargumentet är det arkiv som skall läsas\n" #: src/dar_suite/dar_xform.cpp:421 #, c-format msgid "\t\t the second non option argument is the archive to create\n" msgstr "" "\t\t det andra icke alternativargumentet är det arkiv som skall skapas\n" #: src/dar_suite/dar_xform.cpp:425 #, c-format msgid " -V\t\t displays version information\n" msgstr " -V\t\t visar versionsinformation\n" #: src/dar_suite/dar_xform.cpp:427 #, c-format msgid " -S first file size\n" msgstr " -S första filens storlek\n" #: src/dar_suite/dar_xform.cpp:432 #, c-format msgid "" " -E \t command to execute between slices of destination archive\n" msgstr "" " -E \t kommando, som skall utföras mellan delar hos mÃ¥larkivet\n" #: src/dar_suite/dar_xform.cpp:433 #, c-format msgid " -F \t command to execute between slice of source archive\n" msgstr "" " -F \t kommando, som skall utföras mellan delar i källarkivet\n" #: src/dar_suite/dar_xform.cpp:434 #, c-format msgid " -aSI \t slice size suffixes k, M, T, G, etc. are power of 10\n" msgstr " -aSI \t delstorlekssuffix k, M, T, G, etc. är tio-potenser\n" #: src/dar_suite/dar_xform.cpp:435 #, c-format msgid " -abinary\t slice size suffixes k, M, T, G, etc. are power of 2\n" msgstr " -abinary\t delstorlekssuffix k, M, T, G, etc. är tvÃ¥-potenser\n" #: src/python/pybind11_libdar.cpp:262 #, fuzzy msgid "FATAL error from operating system, aborting operation: " msgstr "FATALT fel, avbryter Ã¥tgärd" #: src/check/all_features.cpp:54 msgid "-" msgstr "" #~ msgid "CRC failed for table of contents (aka \"catalogue\")" #~ msgstr "" #~ "CRC misslyckades för innehÃ¥llsförtäckning (ocksÃ¥ känts som \"katalog\")" #, fuzzy #~ msgid "aborting" #~ msgstr "avbryt" #, fuzzy #~ msgid " Special allocation scheme : %s\n" #~ msgstr " Särskilt tilldelningsschema: %s\n" #~ msgid "Cannot get mtime: %s" #~ msgstr "Kan inte hämta mtime: %s" #~ msgid "[--- REMOVED ENTRY ----]" #~ msgstr "[--- BORTAGNA DATA ----] " #~ msgid "" #~ "Archive number | Data | status || " #~ "EA | status \n" #~ msgstr "" #~ "Arkivnummer | Data | status || " #~ "EA | status \n" #, fuzzy #~ msgid "" #~ "---------------+-------------------------+--------+" #~ "+-------------------------+----------\n" #~ msgstr "--------------+-------------------------+-----------------------\n" #~ msgid "Cannot remove file " #~ msgstr "Kan ej ta bort fil " #~ msgid "Cannot copy a wrapperlib object (NOT IMPLEMENTED)" #~ msgstr "Kan ej kopiera ett 'wrapperlib'-objekt (EJ IMPLEMENTERAT)" #~ msgid "argument must be a relative path" #~ msgstr "argumentet mÃ¥ste vara en relativ sökväg" #~ msgid "Archive copy constructor is not implemented" #~ msgstr "\"Archive copy constructor\" är ej implementerad" #~ msgid "Archive assignment operator is not implemented" #~ msgstr "\"Archive assignment operator\" är ej implementerad" #~ msgid "Cannot add another archive, database is full" #~ msgstr "Kan ej lägga till ytterligare arkiv, databasen full" #~ msgid "Absolute value too high for an archive number: %d" #~ msgstr "Absolutvärdet för stort för arkiv nummer: %d" #, fuzzy #~ msgid "" #~ "File ownership will not be restored as %s has not the CHOWN capability " #~ "nor is running as root. to avoid this message use -O option" #~ msgstr "" #~ "Filägarskap kommer ej att Ã¥terställas dÃ¥ %s inte körs som rot. För att " #~ "undvika detta meddelande använd -O alternativ" #, fuzzy #~ msgid "" #~ "Furtive read mode has been disabled as %s has not the FOWNER capability " #~ "nor is running as root" #~ msgstr "Läsning i smyg har kopplats bort dÃ¥ dar inte körs som root" #, fuzzy #~ msgid "nullptr argument given to \"sauv_path\"" #~ msgstr "Tomt argument give till \"sauv_path\"" #, fuzzy #~ msgid "nullptr argument given to \"filename\"" #~ msgstr "Tomt argument givet till \"filename\"" #, fuzzy #~ msgid "nullptr argument given to \"extension\"" #~ msgstr "Tomt argument givet till \"extension\"" #~ msgid "Strong encryption support" #~ msgstr "Stöd för stark kryptering" #~ msgid " -j\t\t ask user what to do when memory is exhausted\n" #~ msgstr "" #~ " -j\t\t frÃ¥ga användaren vad som skall göras dÃ¥ minnet är slut\n" #, fuzzy #~ msgid "" #~ "------------------+----------------" #~ "+----------------------------------------+-------------------------+" #~ msgstr "--------------+-------------------------+-----------------------\n" #, fuzzy #~ msgid "" #~ "---------------+------+-------+-------+-------------------------------" #~ "+------------------------------+-----------\n" #~ msgstr "--------------+-------------------------+-----------------------\n" #, fuzzy #~ msgid "" #~ "-----------------------------+------------+-------+-------+-------" #~ "+-------------------------------+------------\n" #~ msgstr "--------------+-------------------------+-----------------------\n" #, fuzzy #~ msgid "" #~ "+-----------------+----------------" #~ "+----------------------------------------+-------------------------+" #~ msgstr "--------------+-------------------------+-----------------------\n" #~ msgid "Preparing the archive contents for isolation..." #~ msgstr "Förbereder arkivinnehÃ¥llet för isolering..." #~ msgid "Current implementation does not support this (new) crypto algorithm" #~ msgstr "Aktuell implementering stödjer ej denna (nya) krypteringsalgoritm" #~ msgid "Not supported flag or archive corruption" #~ msgstr "Ej understödd flagga eller förstört arkiv" #~ msgid "" #~ "too low value (< 10) given as observation_read_number argument while " #~ "initializing cache" #~ msgstr "" #~ "för litet värde (< 10) givet som observation_read_number- argument medan " #~ "cache initieras" #~ msgid "" #~ "too low value (< 10) given as observation_write_number argument while " #~ "initializing cache" #~ msgstr "" #~ "för litet värde (< 10) givet som observation_write_number- argument " #~ "medan cache initieras" #~ msgid "" #~ "too high value (> 50) given as unused_read_ratio argument, while " #~ "initializing cache" #~ msgstr "" #~ "för stort värde (> 50) angivet som argumentet 'unused_read_ratio', medan " #~ "cachen initieras" #~ msgid "" #~ "unused_write_ratio must be less than max_size_hit_write_ratio, while " #~ "initializing cache" #~ msgstr "" #~ "'unused_write_ratio' mÃ¥ste vara mindre än 'max_size_hit_write_ratio', " #~ "medan cachen initieras" #, fuzzy #~ msgid "" #~ "max_size must be greater or equal to initial_size, while initializing " #~ "cache" #~ msgstr "" #~ "fel värde givet till argumentet initial_size medan cachen initialiseras" #~ msgid "incompatible Zlib version" #~ msgstr "ej kompatibel Zlib-version" #~ msgid "" #~ "Could not find archive information at the end of the last slice, assuming " #~ "an old archive and trying to read at the beginning of the first slice..." #~ msgstr "" #~ "Kunde inte finna arkivinformationen i slutet an den sista delen. Antar " #~ "ett gammalt arkiv och försöker läsa frÃ¥n början av den första delen..." #~ msgid "" #~ "LAX MODE: Failed to read the archive header, I will need your help to " #~ "know what is the missing information." #~ msgstr "" #~ "LAX MODE: Misslyckades med att läsa arkivhuvudet. Behöver din hjälp för " #~ "att fÃ¥ veta vilken information, som saknas." #~ msgid "" #~ "LAX MODE: Archive format revision found is [%s] but the higher version " #~ "this binary can handle is [%s]. Thus, assuming the archive version is " #~ "corrupted and falling back to the higher version this binary can support " #~ "(%s)" #~ msgstr "" #~ "LAX MODE: Arkivets formatrevision hittat är [%s] men den högsta version " #~ "detta program kan hantera är [%s]. SÃ¥lunda antar att arkivversion är " #~ "förstörd och Ã¥tergÃ¥r till den högsta version detta program kan stödja " #~ "(%s)." #~ msgid "LAX MODE: Archive format revision found is [version %s]" #~ msgstr "LAX MODE: Hittad arkivformatsrevision är [version %s]" #~ msgid "LAX MODE: is it correct, seen the table at the following URL: %s ?" #~ msgstr "LAX MODE: är det korrekt, sett tabellen vid följande: %s ?" #, fuzzy #~ msgid "Cannot determine the permission to use for hash files: " #~ msgstr "Kan ej Ã¥terställe behörigheter för %s : %s" #~ msgid "Error opening file " #~ msgstr "Fel vid filöppning " #~ msgid "Error opening file %s : %s" #~ msgstr "Fel vid filöppning %s : %s" #~ msgid "Position out of range" #~ msgstr "Position utan för omrÃ¥de" #~ msgid "Read-write mode not supported for \"trivial_sar\"" #~ msgstr "Läs-skriv-läge ej understött för \"trivial_sar\"" #~ msgid "Cannot create database %S : %s" #~ msgstr "Kan ej skapa databas %S : %s" #~ msgid "" #~ "Thread-safe not initialized for libdar, read manual or contact maintainer " #~ "of the application that uses libdar" #~ msgstr "" #~ "'thread'safe' inte initierat för libdar, läs manualen eller kontakta den " #~ "som underhÃ¥ller libdar-tillämpningen" #~ msgid "Cannot initialize mutex: " #~ msgstr "Kan ej initiera 'mutex': " #~ msgid "Aborting operations for the EA of %S : error while adding EA %s : %s" #~ msgstr "Avbryter Ã¥tgärder för EA till %S : fel vid tillägg av EA %s : %s" #~ msgid "Aborting operations for the EAs of %S : error while removing %s : %s" #~ msgstr "Avbryter Ã¥tgärder för EA till %S : fel vid bortagande av EA %s : %s" #~ msgid "" #~ " -z [bzip2[:level]] compress data in archive using bzip2 algorithm.\n" #~ msgstr "" #~ " -z [bzip2[:level]] komprimera data i ett arkiv med bzip2-algoritm\n" #~ msgid "" #~ "No more (virtual) memory available, you have the opportunity to stop un-" #~ "necessary applications to free up some memory. Can we continue now ?" #~ msgstr "" #~ "Inget ytterligare (virtuellt) minne tillgängligt. du har möjligheter att " #~ "stoppa onödvändiga tillämpningar sÃ¥ att minne kan frigöras. Kan vi " #~ "fortsätta nu?" #~ msgid "Cannot open file %S : %s" #~ msgstr "Kan ej öppna fil %S : %s" #~ msgid "User target found on command line:" #~ msgstr "AnvändarmÃ¥l funnet pÃ¥ kommandorad:" #~ msgid "" #~ "Note that -y option is deprecated it will be removed in future release, " #~ "please use -z option instead (read man page for details)" #~ msgstr "" #~ "Märk att -y alternativet är ersatt och kommer att tas bort i framtida " #~ "utgÃ¥vor." #~ msgid "Choose either -z or -y not both" #~ msgstr "Välj antingen -z eller -y ej bägge" #~ msgid "" #~ "-G option is obsolete, use -@ option instead, see man page for details" #~ msgstr "" #~ "-G alternativet är utgÃ¥nget, använd -@ alternativet i stället, se man " #~ "page för detaljer" #~ msgid "NULL given as argument" #~ msgstr "Tomt givet som argument" #~ msgid "NULL argument given to \"fs_root\"" #~ msgstr "Tomt argument givet till \"fs_root\"" #~ msgid "NULL argument given to \"selection\"" #~ msgstr "Tomt argument givet till \"selection\"" #~ msgid "NULL argument given to \"subtree\"" #~ msgstr "Tomt argument givet till \"subtree\"" #~ msgid "NULL argument given to \"execute\"" #~ msgstr "Tomt argument givet till\"utför\"" #~ msgid "NULL argument given to \"compr_mask\"" #~ msgstr "Tomt argument givet till \"compr_mask\"" #~ msgid "NULL argument given to \"min_compr_size\"" #~ msgstr "Tomt argument givet till \"min_compr_mask\"" #~ msgid "NULL argument given to \"ea_mask\"" #~ msgstr "Tomt argument givet till \"ea_mask\"" #~ msgid "Cannot determine location of the end of cyphered data: " #~ msgstr "Kan ej bestämma slutet pÃ¥ chiffrerade data: " #~ msgid "Argument given to \"major\" is a NULL pointer" #~ msgstr "Argument givet till \"major\" är en NULL-pekare" #~ msgid "Argument given to \"minor\" is a NULL pointer" #~ msgstr "Argument givet till \"minor\" är en NULL-pekare" #~ msgid "Argument given to \"medium\" is a NULL pointer" #~ msgstr "Argument givet till \"medium\" är en NULL-pekare" #~ msgid "argument given to \"minor\" is a NULL pointer" #~ msgstr "argument givet till \"minor\" är en NULL-pekare" #~ msgid "invalid NULL argument given as mask option" #~ msgstr "ogiltigt NULL-argument givet som maskalternativ" #~ msgid "invalid NULL argument given as crit_action option" #~ msgstr "ogiltigt NULL-argument givet som crit_cation alternativ" #~ msgid "-~ is only available with -+ option and -c options" #~ msgstr "-~ är bara tillgängligt med -+ och -c alternativ" #~ msgid "End of file reached while skipping to the begin of a word" #~ msgstr "Filslut uppnÃ¥tt vid överhoppande till början av ett ord" #, fuzzy #~ msgid "Reached end of file while reading a word" #~ msgstr "NÃ¥tt filslut vid läsning av delsäkerhetskopiehuvud" #~ msgid "" #~ "Archive format older than \"08\" (release 2.4.0) cannot be read through a " #~ "single pipe, only using dar_slave or normal plain file (slice) method" #~ msgstr "" #~ "Arkivformat äldre än \"08\" (utgÃ¥va 2.4.0) kan ej läsas genom ett ensamt " #~ "rör, bara genom dar_slave eller normal planfilsmetod" #~ msgid "unary operator" #~ msgstr "unary operator" #~ msgid "Corrupted database, empty entry found" #~ msgstr "Förstörd databas, tom post funnen" #~ msgid "Reached End of File while reading CRC data" #~ msgstr "NÃ¥tt filslut vid läsning av CRC-data" #~ msgid "Error checking for presence of file " #~ msgstr "Fel vid kontroll av filbefintlighet " #~ msgid "Extended Attribute of file not found in database: " #~ msgstr "Extended Attribute för fil ej hittad i databas: " #~ msgid "" #~ "Could not create caching layer to improve performance while writing down " #~ "the archive contents, using the defaut slow method" #~ msgstr "" #~ "Kunde inte skapa ett 'caching'-lager för att förbättra prestanda under " #~ "skrivning av arkivinnehÃ¥llet, använder den lÃ¥ngsamma standardmetoden" #~ msgid "\"-k\" and \"-konly\" are not compatible" #~ msgstr "\"-k\" och \"-konly\" är ej kompatibla" #~ msgid "Reading config file: " #~ msgstr "Läser inställningsfil: " #~ msgid " Total number of item(s) considered: %i\n" #~ msgstr " Total antal övervägda företeelse(r): %i\n" #~ msgid "Unknown TLV type" #~ msgstr "Okänd TLV-typ" #~ msgid "different owner" #~ msgstr "annan ägare" #~ msgid " Extended Attribute: [" #~ msgstr " Extended Attribut: [" #~ msgid "LAX MODE: Failed to read the catalogue (zero byte length archive)" #~ msgstr "" #~ "LAX MODE: Misslyckades med att läsa katalogen (arkiv med längden 0 byte)" #~ msgid "Empty string is an invalid label, cannot clear it" #~ msgstr "Tom sträng är en ogiltigt etikett, kan ej rensa den" #~ msgid "An empty string is an invalid label, cannot add it" #~ msgstr "En tom sträng är en ogiltigt etikett, kan ej lägga till" #~ msgid "Label already used in stack, cannot add it" #~ msgstr "Etikett redan använd i stack, kan ej lägga till" #~ msgid "Bad file descriptor given" #~ msgstr "Felaktig fildeskriptor erhÃ¥llen" #~ msgid "%s cannot be restricted to %s" #~ msgstr "%s kan ej begränsas till %s" #~ msgid "Adding hard link to archive: " #~ msgstr "Lägger till hÃ¥rd länk till arkiv: " #~ msgid "can't read CRC data" #~ msgstr "kan inte läsa CRC-data" #~ msgid "" #~ "The iterator is not indexing the object it has been asked to insert byte " #~ "into" #~ msgstr "" #~ "'Iteratorn' indexerar inte det objekt den har blivit ombedd att sätta in " #~ "byte i" #~ msgid "zip compression not implemented" #~ msgstr "zip-komprimering inte implementerad" #~ msgid "Not overwritten dar_manager_show_version() method has been called!" #~ msgstr "" #~ "Ingen överskriven \"dar_manager_show_version()\"-metod har anropats!" #~ msgid "" #~ "Not overwritten listing() method called with: (%S, %S, %S, %S, %S, %S, " #~ "%S, %s, %s)" #~ msgstr "" #~ "Ingen överskriven listing()-metod anropad med: (%S, %S, %S, %S, %S, %S, " #~ "%S, %s, %s)" #~ msgid "Not overwritten dar_manager_show_files() method has been called!" #~ msgstr "Ingen överskriven \"dar_manager_show_files()\"-metod har anropats!" #~ msgid "Not overwritten dar_manager_contents() method has been called!" #~ msgstr "Ingen överskriven \"dar_manager_contents()\"-metod har anropats!" #~ msgid "Not overwritten dar_manager_statistics() method has been called!" #~ msgstr "Ingen överskriven \"dar_manager_statistics()\"-metod har anropats!" #~ msgid "%%%c is not implemented in tools_printf format argument" #~ msgstr "%%%c är inte implementerat i 'tools_printf'-s formatargument" #~ msgid "ERROR removing file %s : %s" #~ msgstr "FEL vid borttagande av fil %s : %s" #~ msgid "Cannot get inode information for: " #~ msgstr "Kan ej fÃ¥ inodinformation för : " #~ msgid "Reached EOF while reading a word" #~ msgstr "NÃ¥tt EOF vid läsning av ett ord" #, fuzzy #~ msgid "Invalid archive numnber" #~ msgstr "Ogiltigt arkivnummer: " #~ msgid "No backup file is present in " #~ msgstr "Ingen säkerhetskopiefil finns i " #~ msgid "-G option is only available with -c or -+ options" #~ msgstr "-G alternativ är endast tillgängligt med -c or -+ alternativ" #~ msgid "\"-\" not allowed with -G option" #~ msgstr "\"-\" ej tillÃ¥tet med alternativ -G" #~ msgid "NULL argument given to \"pass\"" #~ msgstr "Tomt argument givet till \"pass\"" #~ msgid "Cannot convert the string to integer, overflow" #~ msgstr "Kan inte omvandla sträng till heltal, spill" #~ msgid "Some file could not be saved" #~ msgstr "NÃ¥gon fil kunde inte sparas" #, fuzzy #~ msgid "" #~ " with %i hard link(s) treated (including those excluded by filters)\n" #~ msgstr " %i inod(er) ignorerad(e) (uteslutna av filter)\n" #~ msgid " total number of hard links : %i\n" #~ msgstr " antal hÃ¥rda länkar : %i\n" #~ msgid "" #~ "SSL returned Message Authentication Code (MAC) has an incoherent size " #~ "with provided parameters" #~ msgstr "" #~ "SSL svarade \"Message Authentication Code (MAC)\" har en icke passande " #~ "storlek jämfört med lämnade parametrar" #~ msgid "New blowfish implementation support" #~ msgstr "Nytt stöd för 'blowfish" #~ msgid "libssl call failed: EVP_DigestInit_ex failed" #~ msgstr "libssl-anrop misslyckades: EVP_DigestInit_ex failed" #~ msgid " New Blowfish implementation: %s\n" #~ msgstr " Ny Blowfish-implementering: %s\n" #~ msgid "" #~ "Warning, the following options -[ , -], -P and -g are not used with -l " #~ "(listing) operation" #~ msgstr "" #~ "Varning, följande alternativ -[ , -], -P och -g används ej med -l " #~ "(listnings) operation" #~ msgid "Unknown argument : %s" #~ msgstr "Okänt argument : %s" #~ msgid "Cannot open file %s : %s" #~ msgstr "Kan ej öppna fil %s : %s" #~ msgid " %i inode(s) less recent than the one on filesystem\n" #~ msgstr " %i inod(er) mindre nya än den/de i filsystem\n" #~ msgid "All EA for %S are about to be erased before restoration, continue ?" #~ msgstr "" #~ "Alla EA för %S skall till att raderas före Ã¥terställande, fortsätta ?" #~ msgid "EA for %S will not be restored (overwriting not allowed)" #~ msgstr "EA för %S kommer inte att Ã¥terställas, (överskrivning ej tillÃ¥ten)" #~ msgid "%S will not be remove from filesystem, overwriting not allowed" #~ msgstr "" #~ "%S kommer ej att flyttas frÃ¥n filsystemet, överskrivning ej tillÃ¥ten" #~ msgid "%S has not been overwritten (action not allowed)" #~ msgstr "%S har ej skrivits över (Ã¥tgärden ej tillÃ¥ten)" #~ msgid "%S is about to be removed and replaced by a directory, OK ?" #~ msgstr "%S skall till att tas bort och ersättas med en mapp, OK ?" #~ msgid "" #~ "%S could not be restored, because a file of that name exists and " #~ "overwrite is not allowed" #~ msgstr "" #~ "%S kunde ej Ã¥terställas, ty en fil med samma namn finns och överskrivning " #~ "är ej tillÃ¥ten" #, fuzzy #~ msgid "Cannot temporary change permissions of %S : " #~ msgstr "Kan ej temporärt ändra behörigheter för %S : " #~ msgid "[ REMOVED ]" #~ msgstr "[ FLYTTAD ]" #~ msgid "cannot dump CRC data to file" #~ msgstr "kan inte skriva CRC-data till fil" #~ msgid "Removing file: " #~ msgstr "Tar bort fil: " #~ msgid "Error while restoring EA for " #~ msgstr "Fel vid Ã¥terställande av EA för " #~ msgid "Badly formatted SAR header" #~ msgstr "Felaktigt formaterat SAR-huvud" #~ msgid "Cannot write data to file" #~ msgstr "Kan ej skriva data till fil" #~ msgid "Cannot open file %s: %s" #~ msgstr "Kan ej öppna fil %s : %s" #~ msgid "Cannot read file %s : %s" #~ msgstr "Kan ej läsa fil %s : %s" #~ msgid "" #~ "Both arguments are not NULL, please choose one or the other, not both" #~ msgstr "" #~ "Bägge argumenten är inte tomma, välj det ena eller det andra, ej bägge" #~ msgid "Cannot read from file, both arguments are NULL" #~ msgstr "Kan ej läsa frÃ¥n fil, bägge argumenten är NULL" #~ msgid "Error reading data: " #~ msgstr "Fel vid läsning av data: " #~ msgid "Cannot write any byte to file, filesystem is full? Please check!" #~ msgstr "Kan ej skriva nÃ¥gon byte till fil, fullt filsystem?, Kontrollera!" #~ msgid "" #~ "Position out of range, must call \"skip\" method from trivial_sar object " #~ "not from its \"reference\"" #~ msgstr "" #~ "Position är utanför gränser, mÃ¥ste anropa \"skip\"-metod frÃ¥n trivial_sar-" #~ "objekt inte frÃ¥n dess \"referens\"" #~ msgid "Error while calling execvp:" #~ msgstr "Fel vid anrop av execvp:" #~ msgid "Extracting contents of the archive..." #~ msgstr "Extraherar innehÃ¥llet i arkivet..." #, fuzzy #~ msgid "Cannot skip in an (anonymous) pipe" #~ msgstr "Kan ej hoppa över vid filslut" #, fuzzy #~ msgid "Error while writing data to anonymous pipe: " #~ msgstr "Fel vid skrivning av data till ett rör: " #~ msgid "" #~ "-E is not possible (and useless) without slicing (-s option), -E will be " #~ "ignored" #~ msgstr "" #~ "-E är ej möjligt (och oanvändbart) utan delning (-s alternativ), -E " #~ "ignoreras" #~ msgid "Compressing and writing back database header to file..." #~ msgstr "Komprimerar och skriver tillbaka databas till fil..." #~ msgid "NULL argument given to \"hourshift\"" #~ msgstr "Tomt argument givet till \"hourshift\"" #~ msgid "Archive contents size in archive : %i bytes\n" #~ msgstr "Arkivets totala storlek är : %i byte\n" #~ msgid "root has no parent directory, cannot change to it" #~ msgstr "rot har inte nÃ¥gon föräldramapp, kan ej byta till den" #~ msgid "Archive number | Data | EA\n" #~ msgstr "Arkiv nummer | Data | EA\n" #~ msgid "" #~ "Concerning file %S : archive #%d contains the most recent data and some " #~ "old EA while archive #%d contains the most recent EA only." #~ msgstr "" #~ "AngÃ¥ende fil %S : arkiv #%d innehÃ¥ller de senaste data och nÃ¥gra gamla " #~ "EA medan arkiv #%d innehÃ¥ller det senaste EA enbart." #~ msgid "" #~ "Dar manager will always restore files from the archives in the order they " #~ "have been added in database, thus, for this file, last EA version, will " #~ "be overwritten by the older version associated with saved data." #~ msgstr "" #~ "Dar kommer alltid att Ã¥terställa filer frÃ¥n arkiven i den ordning de " #~ "lades till i databasen, sÃ¥lunda för denna fil kommer senaste EA version " #~ "att skrivas över av den äldre versionen sammakopplad med sparade data." #~ msgid "" #~ "To avoid such type of problem, reorder the archive in the database to " #~ "have the most recent ones having the higher index number" #~ msgstr "" #~ "För att undvika dylika problem bör arkiven i databasen sorteras om sÃ¥ att " #~ "det senaste har det högsta indextalet" #~ msgid "CALLING DAR: " #~ msgstr "ANROPAR DAR: " #~ msgid "Removing references to saved data from catalogue..." #~ msgstr "Tar bort referenser till sparade data frÃ¥n katalog..." #~ msgid " File already present in archive" #~ msgstr "Fil finns redan i arkiv" #~ msgid "A problem occurred while opening header of file %s: %S. Try again?" #~ msgstr "" #~ "Ett problem uppstod bid öppning filhuvud till filen %s: %S, Försöka igen?" #~ msgid " . Ignore it and continue ?" #~ msgstr " . Ignorera detta och fortsätta ?" #~ msgid "unknown substitution string at end of string: %" #~ msgstr "okänd utbytessträng vid strängslut: %" dar-2.6.8/po/ChangeLog0000644000175000017520000000075613617552277011437 000000000000002020-02-08 gettextize * Makefile.in.in: New file, from gettext-0.19.8.1. * Rules-quot: New file, from gettext-0.19.8.1. * boldquot.sed: New file, from gettext-0.19.8.1. * en@boldquot.header: New file, from gettext-0.19.8.1. * en@quot.header: New file, from gettext-0.19.8.1. * insert-header.sin: New file, from gettext-0.19.8.1. * quot.sed: New file, from gettext-0.19.8.1. * remove-potcdate.sin: New file, from gettext-0.19.8.1. * POTFILES.in: New file. dar-2.6.8/po/sv.gmo0000644000175000017520000037126713617552353011024 00000000000000Þ•»ôýÌ;ÀOÁOÐOæO;öO2P=KP‰P  PÁP!ÃPåP(èPÐQ âQƒRŸR)¾R7èR; S1\S?ŽSCÎSJTO]TC­T2ñT5$U@ZU0›U.ÌU-ûU?)V<iVH¦V4ïVK$WEpW>¶W?õWC5XJyX9ÄXOþXANYAYHÒY;ZDWZCœZ#àZF[K[)i[$“['¸[$à[F\8L\7…\@½\=þ\><]A{]B½]C^<D^:^:¼^,÷^:$_;__0›_>Ì_I `)U``9š`+Ô`7a-8aBfa9©a)ãa& b 4bPUbL¦bCóbG7c)c=©c;çc #dIDdJŽdBÙd.eFKe+’e=¾e*üe:'fHbf#«f*ÏfLúf:Gg!‚g.¤gKÓg<h'\hB„h%ÇhDíh2iQQi£i3¹i2íi+ j1Lj8~j.·j!æj7k@kWk-kk*™kÄkÙk&ñkAlAZl3œlÐlæl mm&'m NmXmkm‰m‰¤m‹.n6ºnñn_oYfoDÀo)p&/p8Vp9p;Ép<qBqK]q©q8Áq$úq2r8Rr‹rB¨rërs2'sZsts?s Ðs$Üs%t5't7]t•t4µtOêt/:u/ju/šu9Êu=vIBv"Œv)¯v4Ùvw,wHEwlŽwhûwjdxÏxcìxOPy¬ y(Mz+vz'¢zÊz2ázH{$]{I‚{UÌ{P"|Qs|1Å|X÷|½P}+~":~-]~>‹~5Ê~#0$>U” ®Ï î/€$?€Kd€@°€Kñ€2=+p:œ-×'‚3-‚a‚>{‚º‚Ђ!í‚Hƒ Xƒyƒ8ƒºƒÕƒGèƒ:0„fk„9Ò„ …K*…Lv…%Ã…)é… †;4†#p† ”†•µ†K‡f‡‡%š‡À‡<ч=ˆ*Lˆ5wˆ­ˆ:ƈ"‰!$‰.F‰u‰=“‰щ?ê‰/*ŠZŠ#zŠžŠºŠÑŠ2늋Š5‹HÀ‹' Œ#1ŒUŒLkŒK¸ŒF=KO‰>ÙŽ31Ž2eŽ(˜ŽÁŽ!ÙŽûŽ/,%\(‚«Â?×&G>†1Ÿ%Ñ6÷%.‘T‘n‘3‚‘¶‘%Ñ‘.÷‘-&’T’m’’%¤’Ê’á’&“''“ O“@p“!±“%Ó“6ù“0” ?”J”([”0„”3µ”2锕4•-S•+•"­• ЕCÛ• –-–,L–y–—–³–7È–—H—+b—NŽ—Ý—/î—3˜5R˜.ˆ˜&·˜Þ˜6ä˜J™7f™@ž™ß™Fÿ™+FšNršJÁš ›-%›0S›:„›a¿œ6!/X¤ˆb-ž¬ž=Ÿ]Ÿ=nŸ%¬Ÿ*ÒŸ&ýŸ$ j² ¡=¡]¡N}¡2Ì¡mÿ¡"m¢>¢Ï¢Ô¢7Ú¢+£ >£:_£š£$¸£9Ý£+¤C¤!b¤?„¤]Ĥk"¥*Ž¥%¹¥ߥ_ü¥&\¦2ƒ¦.¶¦å¦ÿ¦)§>§*T§§™§"¶§#Ù§Yý§$W¨%|¨ ¢¨6è@ú¨8;©*t©/Ÿ©/Ï©ÿ©?ª(]ª%†ª'¬ª!Ôª#öª;«>V«)•«)¿« é«O ¬&Z¬/¬.±¬)à¬+ ­S6­çŠ­-r® ®À®/à®;¯L¯c¯+€¯¬¯À¯/Ú¯' °+2°D^°£°"Á°ä°-±30±8d± ²,©²4Ö²_ ³Vk³4³÷³"´9´É´bÙ´h<µe¥µ# ¶/¶J¶3g¶c›¶ÿ¶·&·@·KZ·'¦·ηÖ·è·A¸KI¸+•¸"Á¸?ä¸N$¹9s¹(­¹$Ö¹û¹º*-ºXºCtº@¸ºBùºI<»3†»@º»û»# ¼0¼G¼X¼h¼y¼*Œ¼·¼̼'å¼" ½0½#M½q½Œ½§½½â½¾"¾B¾b¾,€¾­¾)Ǿ ñ¾÷ü¿©ôÀ¡žÁ@ÂíÑÂe¿Ã%ÄæBÄŒ)ź¶Å÷qÆfiÇ¡ÐÇrÈ&tÉf›É;Êw>ÊL¶Ê<Ëú@Ë3;ÌKo̬»Ì¿hÍ`(Î?‰ÎxÉÎ?BÏ$‚ÏÆ§Ï3nÐc¢ÐHÑŸOÑËïÑÖ»Ò3’Ó]ÆÓ{$Ô^ ÔÿÕCÖ;RÖ>ŽÖ"ÍÖðÖP ×]^×¼×9Ú×,Ø$AØ0fØ)—ØÁØ ÜØýØÙ2ÙIÙ`ÙwٕٳÙ*ÊÙõÙ`ÚqÚN‘Ú-àÚ$Û3Û'6Û+^ÛLŠÛ$×ÛüÛ'Ü8ÜXVܯÜÉÜ*éÜ9Ý+NÝ)zݤÝfµÝbÞ¡Þ$!ß Fßgß…ß<™ß Öß$àß+à"1àTàYà_àzà˜à¶àÔàòàUá%fáŒáªá,Åá5òá-(â>VâQ•â çâ ãˆã1›ã Íã#ÛãBÿãMBäDäNÕä@$åeå€åEŸå3åå1æIKæ1•æ.Çæ;öæ!2çTç*pç1›çÍçLëç(8èaèuè‰è&¨è:Ïè éAé0]éŽéQ©é‘ûéê£ê%»ê3áê„ë šë-»ëyéë3cì—ì샽ì$AíYfíEÀíXî_îS|î¤ÐîuïO•ï,åïð11ð1cð$•ð_ºðYñrtñVçñ[>òJšò\åò…BóQÈóFôEaô?§ô<çô§$õÌÌõ+™öEÅö- ÷Ê9÷?ø=DøW‚øyÚøITù?žù;Þù!úv<úš³ú9Nû+ˆû'´ûLÜû&)ü$PüÓuüQIý›ý5²ý èý" þ.,þ[þ!sþ!•þ!·þÙþAøþ::ÿuÿ…ÿ–ÿ¯ÿÅÌÿ’;¯*ë!,8"e*ˆ&³+Ú-4H g#ˆ)¬+ÖLVOO¦}öMtÂ}ÖÜT51Cg]«I xS.Ì#û .#  R  \  f p x € ˆ ¨ ® µ &¾ Xå *> i  m "z  7£ 7Û 2  F 9R Œ #¦ Ê ç <ö <3 *p ›  » 2É 0ü - K !_ '  ©  Ê  Ô 4ß #8+?*k*– ÁÏOÕ%*4<6q¨»JÚ'%'M&u-œ0Ê=û9Oi9n¨ÇÎÒâ å&C bmr%u&›Â$ßX]4b—± ¹ ÅÐØ6á/:Hƒ ’œ¤­Æ&Ï%ö9B I1S…Bœ?ßwO— çó!!) K!Y"{Pž&ï& =CHŒàq€J­øH[-s¡£À,Ãòð´ã˜#³*×6894r:§>â^! Y€ CÚ 4!5S!L‰!5Ö!* "(7"3`"5”"LÊ"<#MT#L¢#9ï#9)$Lc$>°$Mï$Q=%9%HÉ%H&9[&C•&GÙ&(!'MJ'$˜'/½'#í'$(!6(OX(9¨(7â(L)Ng)E¶)@ü)7=*Du*7º*Iò*B<+5+Hµ+Bþ+2A,Lt,GÁ,> -H-2a-6”-;Ë-(.90.;j.'¦.%Î."ô.k/jƒ/Hî/C70${06 07×01d/1c”1[ø16T2H‹21Ô2C30J3/{3A«3"í3/4I@43Š4#¾43â4V5Bm5&°5=×516IG6‘6Q°6!72$77W7077À7Eø74>8$s8Q˜8ê89096O9 †9§9&Ã9Cê9A.:4p:¥:´:Ð:%ä:7 ;B;I;"_;‚;¡;’/<BÂ<=m=`Š=Pë=%<>%b>Jˆ>JÓ>M?Ml?º?RØ?+@5K@%@<§@7ä@ADB‰B'˜B(ÀB8éB8"C&[C6‚CW¹C/D0AD0rD<£D=àDNE#mE-‘E;¿E ûEFE8Fi~FsèFt\G"ÑGpôG\eH£ÂH1fI-˜I%ÆIìI2JJ:J,…JL²JdÿJXdKe½K?#LncLÏÒL2¢M)ÕM+ÿMA+NHmN-¶NAäNA&O hO'‰O"±O*ÔO1ÿO.1PW`PL¸PWQ=]QA›QJÝQ8(R6aR2˜RËRHêR3S%NS7tSL¬SùSTBT]TwTQŠT;ÜTqUBŠU%ÍURóUSFV/šVÊV éV@ W)KW$uW‘šW ,X MX!nX*X »XGÉXAY5SY;‰YÅYMàY*.ZYZ4tZ©ZIÉZ[8.[7g[#Ÿ[$Ã[è[\\14\f\„\M]!R]t]”]K¨]Jô]Q?^2‘^JÄ^>_N_8m_;¦_'â_ `"`=`R`/n`#ž`(Â`ë`a?a#\aU€aÖa/ëa-b=Ib.‡b¶bÔb@æb'c9Fc,€c7­cåcdd!7dYdud)•d'¿dçd3e#9e#]e>eÀe ÑeÝe2ôe4'f9\f9–fÐf!çf& g-0g ^g gFŒgÓgâg*h -h!Nhph5…h»hIÑh5iQQi £i7±i3éi7j+Uj#j¥j-«j]ÙjL7kK„k ÐkIñk4;lLplI½lm.&m-UmƒmNœn4ën1 o¦Rocùo±]p q0q/Bq)rq3œq'Ðq•øqnŽr ýrs>sQ^s4°shås!Nt@pt±t¶t;¼t*øt(#u@Lu$u,²u=ßu.vLv$lvH‘vbÚvt=w7²w(êw!x|5x+²x9Þx&y?y[y4vy«y/Èyøy!z#7z)[zf…z*ìz+{C{5c{F™{Dà{-%|5S|1‰|»|DØ|'} E}&f}}%¬}CÒ}@~&W~*~~©~LÆ~$28&k+’-¾Sìé@€-*X!u2—GÊ‚!+‚,M‚z‚Œ‚5£‚,Ù‚+ƒL2ƒƒ)“ƒ½ƒ.Úƒ$ „_.„ Ž…*š…4Å…[ú…dV†F»†"‡(%‡­N‡ ü‡r ˆp}ˆuîˆ1d‰–‰°‰7ʉSŠVŠlŠŠ!žŠtÀŠ$5‹Z‹a‹*s‹Mž‹lì‹$YŒ(~ŒF§ŒBîŒE1(w& Çç.þ"-ŽAPŽD’ŽG׎JNjF¹";Q`p€+ŸËáø ‘9‘.T‘ƒ‘"¢‘"Å‘ è‘! ’ +’ L’m’!’-¯’Ý’)ø’"“1”–7•ªÎ•y–ëù–nå—T˜íq˜™_™Áù™:»šaö››Xœôœ.žl5žI¢žeìž[RŸ&®ŸèÕŸG¾ V¡Û]¡Å9¢Yÿ¢<Y£l–£J¤%N¤­t¤?"¥€b¥b㥖F¦³Ý¦ë‘§S}¨tѨ¤F©Eë© 1«F=«:„«H¿«'¬0¬LN¬h›¬#­<(­!e­‡­1¡­*Ó­þ­!®<®"T®w®®§®"¿®"⮯)¯G¯j]¯$ȯOí¯.=°l°†°"а-­°SÛ°#/±S±)f±%±Q¶±"²$+²0P²;²0½²%î²³s)³j³¯´*¸´"ã´µ%µ4<µ qµ"{µ;žµ&Úµ¶¶ ¶&)¶&P¶&w¶&ž¶&ŶYì¶F·f·„·(¡·=Ê·0¸I9¸Pƒ¸Ô¸ ò¸{ÿ¸7{¹ ³¹"¿¹Kâ¹N.ºD}ºJºL »Z»t»BŒ»-Ï»*ý»H(¼1q¼5£¼-Ù¼(½0½2I½6|½³½OȽ%¾>¾R¾f¾ƒ¾3£¾×¾Gç¾2/¿b¿[¿¡Ý¿À—À4±À:æÀ†!Á"¨Á2ËÁvþÁ.u¤«„ÊÂ*OÃezÃlàÃ_MÄ#­ÄSÑÄŸ%Å#ÅÅUéÅ(?ÆhÆ1‚Æ1´Æ#æÆx ÇWƒÇfÛÇdBÈm§ÈIÉ\_É—¼É`TÊKµÊKËBMË@˼ÑËøŽÌ)‡ÍH±Í'úÍð"ÎIÏP]Ïb®ÏcÐfuÐ9ÜÐ6Ñ%MÑ~sÑ´òÑB§Ò'êÒ.ÓsAÓ#µÓ ÙÓÑúÓOÌÔÕ0-Õ^Õ|Õ,œÕÉÕ"ÜÕ"ÿÕ""ÖEÖGeÖ8­Ö æÖ òÖÿÖ×Ä5×ú×9Ø(MØvØ+•Ø&ÁØ/èØ+Ù)DÙ+nÙšÙ©ÙÂÙ!âÙ*Ú,/ÚN\Úi«Ú`Û—vÛ^Ü mÜyyÝùóÝ:íÞ7(ß[`ßK¼ßxà1à*³àÞà:áà á &á 0á;áCáKá'Tá|á ƒáá(ŸápÈá,9âfâ kâ&vââ;£â9ßâ3ã Mã@Yãšã%¸ã!Þãä@ä@Pä$‘ä!¶äØä.íä/åLå eårå"‘å´åÔå Ùå3åå,æFæ-Kæ-yæ5§æ ÝæëæXóæLçQç1dçB–çÙç éçV è,aè,Žè+»èFçè@.éCoé³éÉéæéHìé5êOêUêYêjê'nê–ê*±ê"Üê ÿê ëë)ë)@ëjë!ˆëdªëìDìZìyì ‚ì ì ™ì £ì<°ì3íì4!íVíeí uí í‰í¡í"¨í"Ëíîí îî î)%îOîdmîMÒïy ðXšð óðÿð ñ4ñOñ`ñ!yñ\›ñ0øñ0)òZòHkò´òH_ËE¥ò© 8IU Ž|ª«ÓV./¹8p×tâSäH|éT¶L¡¿’ÙH6ŽæW¤<=ïˆì¼Úó„™”ÚT^Ÿ{ka{Ühf˜Ñ¦r%£)ô@Ö¢öÄpÒxþ7tQï1‰á×nçg5Z»,Ô¤«Å*— ؈“Këo‰´.~=¨äð*¯ÿ >Ã8™¥ŠCãÈ~¹F&§]®yCð`¶õ'¶¤üÕ]½Ì ?ïŸY"+SAVÛ1I:k¾ÜÏ2—Q3ÊÞwKK•g´¦V&uèºÕÔb‡#;êk63ÇÖ{ùÌÔU°P¼Ž y»ƒ5痞ƕ§uq¸Ñøf,LT!ýÅ™vm:oì>΀bÛá_ú€ã ˜¡S[M[ a<óW¾4lW½ßßÒ· “…p©=Ÿ‚uDdŠøûH8‡Â´¹@|¿r)ñ?·£A…¯!1R”Ù3•0f&ŠRjЬÍmå6•t(9Í…dMoQa±"[Õ“¬ØqŒå'"· ê )µ_®;5pÁU±cÌ ”4`›B†NÖõªìƒ0>¸2¯n€œ‰m Pz¬PE]ùâ/îlò’sÜ ZLžµVb¥B‘º‡$z„ íwÑy Ÿ(O›À ÒF^löø÷4î«Gº*ql÷6È9a¥åÃ4ú…®Â‚MÏ 7Ó5JtMZ˜C=ú_DEwÉÊŠ[sJí° ºÝ¸IkЈ*+Þ³°ÃÄQ`.y ›z“#žhƒF+0¡¨§B×!Pè»êw-£€ݳxÍà/-ZOLXn!ËFÇO&‹æY‰žiˆ@#ûŒà–b%n—C³JÿÈfµ$es„” ˜É#¨-Aµ ¢šITÚ  }àvü†}²‘v²›š+\9‘~²ò^œ½¬ÀmäX­ëx2Á)rŒß«j¢–icge¨iG(ûÇ–B%'ª¦œvùéÆôšƪοiæ^ðj}ã÷þN"G­.†jƒÝ3<±²1‚¸?Ä~ç¤h@N\oü­X$œ‚Û\:þWuÁ‹KŒÙ¶Ž0c\U,zd ñ놮Ïâ¼AËh]}:7è©-£YÀý­> ÞE¦<±°scõRx(DN9É‘šé$íÿ–qÐ,§„‹;eÅ '’ÎýádJóö»³RD`e·‡¡ Ó©|Ê7/’O´Ør‹¢ñ¯;S?¹ô¾g2YG%î™{X Standard extraction time ownership the first non options argument is the archive to read the provided file. the second non option argument is the archive to create No pause in output Pause each %d line of output Executing batch file line: %S Dar Manager Database used [%s] : %S Your decision about file's EA: [p]reserve [o]verwrite mark [s]aved and preserve mark saved and overwri[t]e [m]erge EA and preserve merge EA a[n]d overwrite [r]emove [*] keep undefined [a]bort Your choice? Your decision about file's data: [P]reserve [O]verwrite mark [S]aved and preserve mark saved and overwri[T]e [R]emove [*] keep undefined [A]bort Your Choice? Extended Attribute: [ including %i hard link(s) -# encryption block size -$ encryption key for auxiliary archive -* same as -# but for archive of reference -+ merge two archives / create a sub archive -, ignore directories that follow the Directory Tagging --nodump do not backup, files having the nodump 'd' flag set -/ which way dar can overwrite files at archive merging or -@ [path/] Do on-fly catalogue isolation of the resulting archive -@ [path/] auxiliary archive of reference for merging -A add an archive to the database -A [path/] archive to take as reference -B specify the database to use (read or modify) -B read options from given file -C isolates the catalogue from an archive -C creates an empty database -D excluded directories are stored as empty directories -D delete an archive from the database -E command line to execute between slices of the archive -E command to execute between slices -E command to execute between slices of destination archive -F command to execute between slice of source archive -F same as -E but for the archive of reference -H [N] ignore shift in dates of an exact number of hours -I files to include in the operation (all by default) -J same as -K but it does concern the archive of reference -K use as key to encrypt/decrypt -L execute on a given database a batch of action as defined by -M stay in the same filesystem while scanning directories -N do not read ~/.darrc nor /etc/darrc configuration file -O[ignore-owner | mtime | inode-type] do not consider user and group -P subdirectory to exclude from the operation -Q suppress the initial warning when not launched from a tty -R filesystem root directory (current dir by default) -S first file size -S first file size (if different from following ones) -T tree output format -U mask to allow certain EA -V displays software version -V displays version information -V displays version information -X files to exclude from the operation (none by default) -Y do only compress the matching filenames -Z do not compress the matching filenames -[ filename contains a list of files to include -] filename contains a list of files to exclude -^ permission[:user[:group]] of created slices -aSI slice size suffixes k, M, T, G, etc. are power of 10 -aa do not try to preserve atime of file open for reading. -abinary slice size suffixes k, M, T, G, etc. are power of 2 -ac do not try to preserve ctime (default behavior). -acase the masks that follow are now case sensitive -ag set the following masks to be glob expressions -am set ordered mode for all filters -an the masks that follow are now case insensitive -ar set the following masks to be regex expressions -as only list files saved in the archive -b ring the terminal bell when user action is required -b change the basename to use for the give archive number -c check database for dates order -c creates an archive -d compares the archive with the existing filesystem -d specify the path to dar -e dry run, fake execution, nothing is produced -f do not restore directory structure -f list the archives where the given file is present -g subdirectory to include in the operation -h displays this help information -h displays this help information -i user interactive mode -i pipe to use instead of std input to read data from dar_slave -i pipe to use instead of std input to read orders from dar -k do not remove files destroyed since the reference backup -l gives information about the archive compiled in the database -l lists the contents of the archive -m do not compress file smaller than -m move an archive within a given database. -n don't overwrite files -o pipe to use instead of std output to orders dar_slave -o pipe to use instead of std output to write data to dar -o specify a list of option to always pass to dar -p pauses before writing to a new file -p change the path to use for the given archive number -q suppress final statistics report -r do not restore file older than those on filesystem -r restores the given files -s shows the number of most recent file by archive -s split the archive in several files of size -t tests the archive integrity -u mask to ignore certain EA -u list the most recent files contained in the given archive -v display more information about what is going on -vs display skipped files -w don't warn before overwriting files -w only with -r, restores in state just before the given date -wa don't warn before overwriting and removing files -x extracts files from the archive -~ command between slices of the auxiliary archive including %i hard link(s) treated %i byte(s) have been wasted in the archive to resave changing files %i inode(s) added to archive %i inode(s) changed at the moment of the backup and could not be saved properly %i inode(s) deleted %i inode(s) failed to be saved (filesystem error) %i inode(s) failed to restore (filesystem error) %i inode(s) ignored (excluded by filters) %i inode(s) not restored (not saved in archive) %i inode(s) not restored (overwriting policy decision) %i inode(s) not saved (no inode/file change) %i inode(s) recorded as deleted %i inode(s) recorded as deleted from reference backup %i inode(s) restored %i inode(s) saved %i item(s) do not match those on filesystem %i item(s) ignored (excluded by filters) %i item(s) treated %i item(s) with error %s comes with ABSOLUTELY NO WARRANTY; %s comes with ABSOLUTELY NO WARRANTY; for details type `%s -W'. %s comes with ABSOLUTELY NO WARRANTY; for details type `dar -W'. %s is part of the Disk ARchive suite (Release %s) , please provide it. . Ignoring file or directory . Retry ? . Retry command-line ? . Retry to launch dar as previously ? Choice: Continue anyway ? EA restored for %i inode(s) EA saved for %i inode(s) IF POSSIBLE TRY TO PRODUCE THIS ERROR, A SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH IN SOLVING THIS PROBLEM. THANKS IF POSSIBLE TRY TO REPRODUCE THIS ERROR, A SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH IN SOLVING THIS PROBLEM. THANKS Only one option of -c -d -t -l -C -x or -+ is allowed Ready to continue ? THANKS TO REPORT THE PREVIOUS OUTPUT TO MAINTAINER GIVING A DESCRIPTION OF THE CIRCUMSTANCES. This is free software, and you are welcome to redistribute it under certain conditions; This is free software, and you are welcome to redistribute it under Total number of inode(s) considered: %i Total number of items considered: %i Using libdar %u.%u built with compilation time options: Using libdar %u.%u built with compilation time options: Using libdar %u.%u.%u built with compilation time options: Using libdar %u.%u.%u built with compilation time options: [return = YES | Esc = NO] a : Save as n : pause each 'n' line (zero for no pause) c : check date order certain conditions; type `dar -L | more' for details. compiled the %s with %s version %s d : path to dar o : options to dar f : give file localization m : modify archive order for details type `dar -W'. is a slice from another backup, please provide the correct slice. is about to be overwritten. is not present in the archive l : list database contents A : Add an archive not merged (user choice) not restored (user choice) p : modify path of archives b : modify basename of archives q : quit type `%s -L | more' for details. type `dar -L | more' for details. u : list archive contents D : Remove an archive w : write changes to file s : database statistics with %i hard link(s) recorded "-k" (or "-kignore") and "-konly" are not compatible"first_file_size" cannot be different from zero if "file_size" is equal to zero# E X I T I N G ! ## NOT CAUGHT EXCEPTION, ## UNEXPECTED EXCEPTION, #%S already exists, and overwritten is forbidden, aborting%S did not exist before specified date and cannot be restored%S is about to be deleted (required by overwriting policy), do you agree?%S is about to be overwritten, OK?%S is about to be overwritten, continue ?%S is about to be removed from filesystem, continue?%S is an not a valid path: %S%S is not a valid number%S must be removed, but does not match expected type, remove it anyway ?%S: Overwriting policy (Data) is undefined for that file, do not know whether overwriting is allowed or not!%S: Overwriting policy (Data) is undefined for that file, do not know whether removal is allowed or not!%S: Overwriting policy (EA) is undefined for that file, do not know whether overwriting is allowed or not!%d is not a valid block size%d is the default value for --sparse-file-min-size, no need to specify it on command line, ignoring%d is the default value for -m, no need to specify it on command line, ignoring%s does not name a compression "[algorithm][:][level]" , like for examples "gzip", "lzo", "bzip2", "lzo:3", "gzip:2", "8" or "1". Please review the man page about -z option%s type is not a signed type as expected%s type is not an unsigned type as expected%s type length is not %d byte(s) but %d, ready to continue ? -$ is only available with -+ option and -c options-$ is only useful with -@ option, for the auxiliary archive of reference-%% is only available with -+ option-%% is only useful with -@ option, for the auxiliary archive of reference--nodump feature has not been activated at compilation time, it is thus not available--sparse-file-min-size only available while saving or merging archives, ignoring-. option is only useful when merging, creating or isolating an archive, ignoring-/ option is only useful with -+ option, ignoring-= option is valid only while saving files, thus in conjunction with -c option, ignoring-= option will be ignored as it is useless if you do not specify to which files or directories this backup hook is to be applied, thanks to -< and -> options. See man page for more details.-@ is only available with -+ and -c options-A option is not available with -l-B option cannot be given inside a batch file-F is only useful with -A option, for the archive of reference-H is only useful with -A option when making a backup-H is only useful with -c, -d or -x-H is only useful with -r option when extracting-J is only useful with -A option, for the archive of reference-M is only useful with -c-S option requires the use of -s-a option requires an argument-ac is only useful with -c or -d-af must be present before -A option not after!-af option is only available with -c-af option need not be specified more than once, ignoring extra -af options-ak is only available while merging (operation -+), ignoring -ak-ak option need not be specified more than once, ignoring extra -ak options-as is only available with -l, ignoring -as option-e is only useful with -x, -c or -+ options-e option is only available when using -r option, aborting-f in only available with -x option, ignoring-k option is only useful with -x option-konly and -kignore cannot be used at the same time-m is only useful with -c-o is mandatory when using "-A -" with "-c -" "-C -" or "-+ -"-r is useless with -n-w option is useless with -n-wa is only useful with -x option-~ is only useful with -@ option, for the auxiliary archive of reference: file not present in filesystemA problem occurred while reading this archive contents: Aborting batch operation: Aborting program. Aborting program. An error occurred concerning user command execution: Aborting program. An error occurred while calling libdar: Aborting program. The requested operation needs a feature that has been disabled at compilation time: Aborting program. User refused to continue while asking: Adding Hard link to archive: Adding an explicit escape sequence of type seqt_not_a_sequence is forbiddenAdding reference to files that have been destroyed since reference backup...All files asked could not be restoredAll layers have been created successfullyArchive %S requires a password: Archive basename (or extracted catalogue basename) to add: Archive delayed termination engagedArchive fast termination engagedArchive format older than "08" (release 2.4.0) cannot be read through a single pipe. It only can be read using dar_slave or normal plain file (slice)Archive number to modify: Archive number to move: Archive number to remove: Archive number who's path to modify: Archive number: Archive on stdout is not compatible with slicing (-s option)Archive version too high, use a more recent version of libdarAre you sure to remove archive number %d ?Argument given to -H is not a positive integer numberArguments read from %S :Arguments to -r must be relative path (never begin by '/')Asking for an element out of arrayBad CRC, data corruption occurredBad answer from peer, while closing connectionBad formatted date expressionBadly formatted SAR header (unknown TLV type in slice header)Badly formatted databaseBadly formatted terminator, cannot extract catalogue location: Badly formed "infinint" or not supported formatCAUGHT A NON (LIB)DAR EXCEPTIONCRC error detected while reading EACRC error: data corruption.Can't read saved data.Cancellation no confirmedCannot allocate memory for buffer while reading %SCannot block signals: Cannot cleanly abort the operation, thread-safe support is missing, will thus abruptly stop the program, generated archive may be unusableCannot compare EA: EA support has not been activated at compilation timeCannot compare files in write only modeCannot create database, file existsCannot create inode: Cannot evaluate this crit_and criterium as no criterium has been added to itCannot evaluate this crit_or criterium as no criterium has been added to itCannot extract from the internal catalogue the list of files to removeCannot get catalogue of reference as it has not been providedCannot get data name of the archive, this archive is not completely initializedCannot get ext2 attributes (and nodump flag value) for %S : %sCannot get file size: %sCannot get full path of current working directory: Cannot get inode information about file to remove Cannot get inode information for %s : %sCannot open catalogue: Cannot open destination file : %sCannot open file : Cannot open source file : %sCannot read arguments on command line, abortingCannot read directory contents: %s : Cannot read file information for %s : %sCannot read inode for Cannot read on inputCannot receive that much data in regard to the allocated memoryCannot recurs in a non directory entryCannot reduce the string to a size that is larger than its current sizeCannot remove directory Cannot remove nonexistent entry %S from catalogueCannot restore any file, nothing doneCannot restore file %S : non existent file in databaseCannot restore permissions of %s : %sCannot seek back one charCannot seek in fileCannot set last access and last modification time: Cannot skip at end of fileCannot skip forward to restore a holeCannot skip to the current position in "tronc"Cannot sync write on a read-only generic_fileCannot unblock signals: Cannot write down the archive: Cannot write on outputCannot write to destination, abortingCannot write to outputCaught a none libdar exceptionCaught an unknown Egeneric exception: Changing database header information...Changing database information...Checking chronological ordering of files between the archives...Checking file's dates ordering...Choose only one compression algorithmClearing file's EA (requested by overwriting policy): Commands are: Commands: Common options: Communication problem with peer, retry ?Compressing and writing back database to file...Compression level must be between 1 and 9, includedCompression_level must be between 1 and 9 includedComputing statistics...Considering cyphering layer...Considering the (first) archive of reference:Consistency check failed for archive headerContinue listing archive contents?Continue? Continuing the action under process which is to exit... so we exit!Continuing...Copied data does not match CRCCopy finished. Missing %.0f byte(s) of data Corrupted data read from pipeCorrupted data read on pipeCorrupted database :Corrupted entry following an escape mark in the archiveCould not create inode: Could not open directory %s in furtive read mode (%s), using normal modeCould not restore original file ownership: Could read a catalogue data structure at offset %i, it contains the following:Creating file...Crypto block size must be greater than 10 bytesCurrent Working Directory cannot be a relative pathCyphering algorithm not available in libgcrypt: %s/%sDAR sub-process has terminated with exit code DAR terminated upon signal reception: DIFF Data corruption may have occurred, cannot decrypt dataData corruption met at end of slice, forbidden flag found at this positionData corruption met at end of slice, unknown flag foundData corruption or unknown sparse_file mark found in file's dataData full saved : %S %SData of file %S from first archive has been preserved from overwritingData of file %S is about to be %S, proceed?Data of file %S taken from the first archive of reference has been overwrittenData of file %S taken from the first archive of reference has been removedData should be read-onlyDatabase has been successfully created empty.Database not saved, Do you really want to quit ?Dates are not increasing for all files when database's archive number grows, working with this database may lead to improper file's restored version. Please reorder the archive within the database in the way that the older is the first archive and so on up to the most recent archive being the last of the databaseDates of file's %S are not increasing when database's archive number grows. Concerned file is: %SDecompressing and loading database header to memory...Decompressing and loading database to memory...Decremental mode is useless when merging is not applied to both an archive of reference and an auxiliary archive of reference. Ignore decremental mode and continue?Detected Cache Directory Tagging Standard for %s, the contents of that directory will not be savedDirectory %S cannot be restored: overwriting not allowed and a non-directory inode of that name already exists, all files in that directory will be skipped for restoration:Dirty file : %S %SDivision by zeroDo you want to ignore the same type of error for other files?EA for %S are about to be merged, OK?EA for %S are about to be overwritten, OK?EA for %S are about to be removed, OK?EA for %S have not been cleared as requested by the overwriting policy because this file is a hard link pointing to an already restored inodeEA for %S have not been overwritten because this file is a hard link pointing to an already restored inodeEA full saved : %S %SEA more recent : %S %SEA number : %i %iEA of file %S from first archive have been dropped and marked as already savedEA of file %S from first archive have been removedEA of file %S from first archive have been updated with those of the same named file of the auxiliary archiveEA of file %S has been overwrittenEA of file %S has been overwritten and marked as already savedERR ERR Empty string as subdirectory does not make a valid pathEmpty string is an invalid archive basenameEmpty string is not a valid pathEnter each argument line by line, press return at the end Entry information CRC failureEntry information CRC failure for %SEntry information CRC failure for %S. Ignore the failure?Entry information: "in place" "to be added"Entry type : %s %sError creating Unix socket file: Error creating hard link %s : %s Trying to duplicate the inodeError creating hard link : %s , the inode to link with [ %s ] has disappeared, re-creating itError creating hard link : %s , the inode to link with [ %s ] is not present, cannot restore this hard linkError during user command line execution: Error getting file reading position: Error getting size of file: Error met while opening the last slice: %S. Trying to open the archive using the first slice...Error met while processing operation: Error met while reading line %S from file %S: %SError opening directory in furtive read mode: Error opening directory: Error opening pipe: Error performing the requested action: %SError reading EA for Error reading attribute %s of file %s : %sError reading character: Error reading database %S : Error reading directory contents: Error reading inode of file %s : %sError reading source file (we are at %.2f %% of data copied), trying to read further: %s Error retrieving EA list for %s : %sError saving Extended Attributes for Error transforming the archive :Error while activating libgcrypt's memory guard: %s/%sError while assigning key to libgcrypt key handle (essiv): %s/%sError while assigning key to libgcrypt key handle: %s/%sError while calling fork() to launch dar: Error while changing user terminal properties: Error while comparing archive with filesystem: Error while considering file Error while converting UID/GID to string for backup hook file: Error while creating ESSIV handle: %s/%sError while creating anonymous pipe: Error while creating hash handle: %s/%sError while cyphering data: %s/%sError while decyphering data: %s/%sError while derivating key from password (HMAC open): %s/%sError while derivating key from password (HMAC set key): %s/%sError while fetching archive properties: Error while fetching information for %S: Error while generating IV: %s/%sError while initializing hash: Hash algorithm not available in libgcrypt: %s/%sError while listing archive contents: Error while opening libgcrypt key handle: %s/%sError while opening the archive of reference: Error while parsing --min-digits option: Error while parsing -A argument as a date: Error while reading CRC for EA from the archive: No escape mark found for that fileError while reading archive's header, this may be because this archive is an old encrypted archive or that data corruption took place, Assuming it is an old archive, we have to read the header at the beginning of the first slice...Error while reading data for a secure memory:Error while reading from file: Error while reading from pipe: Error while reading user answer from terminal: Error while resetting encryption key for a new block: %s/%sError while restoring Error while restoring data: Error while restoring the following files: Error while saving Error while saving data: Error while setting IV for current block: %s/%sError while setting file permission: %sError while setting file user ownership: %sError while telling libgcrypt that initialization is finished: %s/%sError while testing archive: Error while writing data to pipe: Error while writing to file: Error! SHA1 not available in libgcrypt: %s/%sError, catalogue of reference has not been providedEscape sequences used for reading lead the archive to place some files out of the specified root. To overcome this problem, try reading the archive in direct mode (not using sequential reading), try repairing the archive using Parchive if redundancy data has been created or in last resort try using the lax modeEscaping...Existing EA for %S could not be preserved : Existing EA for %S could not be read and preserved: Failed opening the cache layer, lack of memory, archive read performances will not be optimizedFailed retrieving from libgcrypt the block size used by the cyphering algorithm: %s/%sFailed to open %S while checking for nodump flag: %sFailed writing down the hash: Fatal error on user command line: File %S has changed during backup and is probably not saved in a valid state ("dirty file"), do you want to consider it for restoration anyway?File %S line %dFile had changed during backup and had been copied another time, restoring the next copy of file: File has disappeared while we were reading it, cannot check whether it has changed during its backup: %SFile inclusion loop detected. The file %s includes itself directly or through other files (-B option)File mode is neither read nor writeFile must be a directory: File not found in database: File recorded as removed at this date in database: File selection has been aborted. Now building the resulting archive with the already selected filesFile size too smallFile to look for: Finished writing to file First file size too smallFirst slice size is too small to even just be able to drop the slice headerFormatting file as an empty database...Found 'Furtive read modeGiven date must be in the pastGiving -S option the same value as the one given to -s is uselessGiving to -S option the same value as the one given to -s option is uselessHard links of directories are not supportedHow much line to display at once: INTERNAL ERROR, PLEASE REPORT THE PREVIOUS OUTPUT TO MAINTAINERIf you really want to abort the archive creation hit CTRL-C, then press enter.Ignore previous error on user command line and continue ?Ignoring extra arguments on command lineIgnoring file with NODUMP flag set: Ignoring unknown option -%cIn included file %S: In which position to insert this archive: Incoherent answer from peerIncoherent catalogue structure: duplicated hard linked inode's dataIncoherent catalogue structure: hard linked data is not an inodeIncoherent catalogue structure: hard linked inode's data not foundIncoherent catalogue structure: unknown status flag for hard linked inodeIncoherent slice header: First slice size too smallIncoherent structure in data carrying sparse files: unknown markIncomplete labelIncorrect archive range in databaseIncorrect day of monthIncorrect minuteIncorrect monthIncorrect secondIndex out of rangeInitialization problem for liblzo2 libraryInput cannot be readInvalid archive number: Invalid argument as destination archiveInvalid argument as source archiveInvalid number in string: %SInvalid path, path must be relativeInvalid size for CRC widthInvalid size for option -SInvalid size for option -sIs directory : %S %SIs hard linked : %S %SIs inode : %S %SIs plain file : %S %SIterator does not point to dataKey cannot be an empty stringLAX MODE: "%S" is not a valid archive formatLAX MODE: %i %% remainingLAX MODE: %i is not a valid percent valueLAX MODE: Archive directory structure is corrupted, it would lead to place some files out of the specified root directory. Restoring different directory contents at the root not out of it, which will put files of different directories in the specified root directoryLAX MODE: Archive is flagged as having escape sequence (which is normal in recent archive versions). However if this is not expected, shall I assume a data corruption occurred in this field and that this flag should be ignored? (If unsure, refuse)LAX MODE: Archive seems to be ciphered, but you did not have provided any encryption algorithm, assuming data corruption and considering that the archive is not cipheredLAX MODE: Archive seems to be only an isolated catalogue (no data in it), Can I assume data corruption occurred and consider the archive as being a real archive?LAX MODE: Beginning search of the catalogue (from the end toward the beginning of the archive, on %i %% of its length), this may take a while...LAX MODE: CRC failed for catalogue, the archive contents is corrupted. This may even lead dar to see files in the archive that never existed, but this will most probably lead to other failures in restoring files. Shall we proceed anyway?LAX MODE: Cannot skip at the end of the archive! Using current position to start the catalogue searchLAX MODE: Caught exception: LAX MODE: Could not find a whole catalogue in the archive. If you have an isolated catalogue, stop here and use it as backup of the internal catalogue, else continue but be advised that all data will not be able to be retrieved...LAX MODE: Do you want to bypass some sanity checks and try again reading the archive contents (this may take some time, this may also fail)?LAX MODE: Do you want to try finding portions of the original catalogue if some remain (this may take even more time and in any case, it will only permit to recover some files, at most)?LAX MODE: Due to probable data corruption, dar could not determine the correct size of slices in this archive. For recent archive, this information is duplicated in each slice, do you want to try opening another slice to get this value if present?LAX MODE: Error met building a catalogue entry, skipping this entry and continuing. Skipped error is: LAX MODE: Escape sequence could not be found, it may have been corrupted or out of the scanned portion of the archive, trying to find the catalogue the other wayLAX MODE: Escape sequence seems present in this archive. I have thus two different methods, either I look for the escape sequence indicating the start of the catalogue or I try each position in turn in the hope it will not be data that look like a catalogueLAX MODE: Failed to read the catalogueLAX MODE: Failed to read the catalogue (0 bytes of the archive length asked to look for the catalogue)LAX MODE: Failed to read the catalogue (no data to inspect)LAX MODE: Good point! I could find the escape sequence marking the beginning of the catalogue, now trying to read it...LAX MODE: In spite of a the absence of a known slice size, continuing anywayLAX MODE: Please provide an strictly positive integer numberLAX MODE: Please provide the archive format: You can use the table at %s to find the archive format depending on the release version, (for example if this archive has been created using dar release 2.3.4 to 2.3.7 answer "6" without the quotes here): LAX MODE: Please provide the slice number to read: LAX MODE: Reached the end of the area to scan, FAILED to find any catalogueLAX MODE: Slice flag corrupted, but a slice of higher number has been seen, thus the header flag was surely not indicating this slice as the last of the archive. ContinuingLAX MODE: The catalogue (table of contents) usually takes a few percents of the archive at its end, which percentage do you want me to scan (answer by an *integer* number between 0 and 100)? LAX MODE: The end of the archive is corrupted, cannot get the archive contents (the "catalogue")LAX MODE: Trying to locate the escape sequence (safer choice) ?LAX MODE: Unexpected saved status for end of directory entry, assuming data corruption occurred, ignoring and continuingLAX MODE: Unknown data in slice header, ignoring and continuingLAX MODE: Using archive format "%d"?LAX MODE: catalogue label does not match archive label, as if it was an extracted catalogue, assuming data corruption occurred and fixing the catalogue to be considered an a plain internal catalogueLAX MODE: closing slice %i, header properly fetchedLAX MODE: first slice size is not possible to read, (lack of virtual memory?), continuing anyway...LAX MODE: first slice size is not possible to read, continuing anyway...LAX MODE: found unknown catalogue entry, assuming data corruption occurred, cannot read further the catalogue as I do not know the length of this type of entryLAX MODE: found unknown catalogue entry, assuming data corruption occurred. Skipping to the next entry, this may lead to improper directory structure being restored, if the corrupted data was a directoryLAX MODE: internal name of the slice leads dar to consider it is not member of the same archive. Assuming data corruption occurred and relying on the filename of this slice as proof of its membership to the archiveLAX MODE: opening slice %i to read its slice headerLAX MODE: slice size is not possible to read, (lack of virtual memory?), continuing anyway...LAX MODE: slices %i and %i are both recorded as last slice of the archive, keeping the higher number as the real last sliceLAX MODE: the requested sequential read mode relies on escape sequence which seem to be absent from this archive. Assuming data corruption occurred. However, if no data corruption occurred and thus no escape sequence are present in this archive, do not use sequential reading mode to explore this archive else you will just get nothing usable from itLack of MemoryLack of SECURED memory to achieve the operation, aborting operationLack of memory to achieve the operation, aborting operationLibrary used for blowfish encryption does not respect RFC 3962Listing options (to use with -l): Locating archive contents...Looking in archives for requested files, classifying files archive by archive...Mask_list's prefix must be an absolute path or start with "" string for archive mergingMemory allocation failed : %sMissing -c -x -d -t -l -C -+ option, see `%S -h' for helpMissing [ after } in conditional statement: Missing ] in conditional statement: Missing archive basename, see -h option for helpMissing argument to --backup-hook-executeMissing argument to --hashMissing argument to --min-digitsMissing argument to -EMissing argument to -E optionMissing argument to -FMissing argument to -SMissing argument to -^Missing argument to -i optionMissing argument to -o optionMissing argument to -sMissing argument to command line, abortingMissing catalogue in file.Missing hashing algorithms support (which is part of strong encryption support, using libgcrypt)Missing parameter to option -%cMissing source or destination argument on command line, see -h option for helpMissing strong encryption support (libgcrypt)Missing } in conditional statement: NONO FILE IN THAT DIRECTORY CAN BE SAVED.NOT YET IMPLEMENTED FEATURE has been used: Negative number or zero not allowed when moving an archive inside a databaseNew basename for archive number %d: New database name: New path to give to archive number %d: No action specified, abortingNo backup file is present in %S for archive %S, please provide the last file of the set.No cyphering layer openedNo database specified, abortingNo exception allowed from libdar callbacksNo file in this directory will be considered for merging.No file in this directory will be restored.No mask in the list of mask to operate onNo problem foundNo space left for inode, you have the opportunity to make some room now. When done : can we continue ?No space left on device, you have the opportunity to make room now. When ready : can we continue ?No terminal found for user interaction. All questions will be assumed a negative answer (less destructive choice), which most of the time will abort the program.No user target found on command lineNon existent archive in databaseNon existent file in databaseNon existent file: Non fatal error while writing to destination file, retrying Not SavedNot a zero terminated string in fileNot enough data to initialize storage fieldNow performing on-fly isolation...OK OK OK, keeping %S as basenameOnly one -@ option is allowedOnly one -A option is allowedOnly one -R option is allowedOnly one -S option is allowedOnly one -s option is allowedOpening a pair of pipes to read the archive, expecting dar_slave at the other ends...Opening and reading the batch file...Opening construction layer...Opening cyphering layer...Opening escape sequence abstraction layer...Opening named pipe %S as input to read the archive...Opening standard input to read the archive...Opening the archive using the multi-slice abstraction layer...Opening the compression abstraction layer (compression algorithm used is none)...Opening the compression layer...Options: Overwriting not allowed while a slice of a previous archive with the same basename has been found in the %s directory, Operation abortedParse error on command line (or included files): Parse error: Partial request received, aborting Path to dar (empty string to use the default from PATH variable): Pipe's other end is not known, cannot close any filedescriptor pointing on itPipe's other end is not known, cannot provide a filedescriptor on itPlease answer by the character between brackets ('[' and ']') and press returnPlease check documentation or upgrade your software if availablePlease check your hardwarePlease confirm your password: Probable bug in liblzo2: lzo1x_*_compress returned unexpected code %dProgram has been aborted for the following reason: Reached End of File while reading archive versionReached End of File, no correct data could be found after the last error Reached end of file before all data could be readReached end of file while reading slice headerReached premature end of file while reading archive versionReading a write only generic_fileReading archive contents...Reading catalogue of the archive to add...Reading options (to use with -x, -d, -t, -l, -A) Reading the archive header...Reading the archive of reference from pipe or standard input is not possibleReady to start writing down the archive?Received signal: %dReceived signal: %sReceived unknown special orderRecording hard link into the archive: Reference storage of the iterator is empty or non existentRemoving file %sRemoving file (reason is file recorded as removed in archive): %SRemoving file (reason is overwriting policy): %SRemoving the dirty file %SReplacing %s in the -R option by the directory pointed to by this symbolic link: Resetting the sequential reading process of the archive contents while it is not finished, will make all data unread so far becoming inaccessibleRestoring file's EA: Restoring file's data: Restoring options (to use with -x) : Running batch file from a batch file is not allowedSANITY CHECK: AT LEAST ONE THREAD_CANCELLATION OBJECT HAS NOT BEEN DESTROYED AND REMAINS IN MEMORY WHILE THE PROGRAM REACHED ITS ENDSEEMS TO BE A HARDWARE PROBLEM: SKIPPED (hard link in sequential read mode): Same data but CRC value could not be verified because we did not guessed properly its width (sequential read restriction)Same data but stored CRC does not match the data!?!SavedSaving Extended Attributes for Sequential read asked, but this archive is flagged to not have the necessary embedded escape sequences for that operation, abortingSize too large for an elastic bufferSkipping done (missing %.0f byte(s)), found correct data to read, continuing the copy... Slice size is too small to even just be able to drop the slice headerSlicing (-s option), is not compatible with archive on standard output ("-" as filename)Some file comparisons failedSome files are corrupted in the archive and it will not be possible to restore themSome files do not follow chronological order when archive index increases withing the database, this can lead dar_manager to restored a wrong version of these filesSparse file : %S %SSubtracting an "infinint" greater than the first, "infinint" cannot be negativeSyntax error in --retry-on-change argument: Syntax error in batch file: %SSyntax error in batch file: -C option not allowedSyntax error in batch file: -i option not allowedSyntax error in overwriting policy: The Cache Directory Tagging Standard is only useful while performing a backup, ignoring it hereThe archive %S is encrypted and no encryption cipher has been given, cannot open archive.The archive and the isolated catalogue do not correspond to the same data, they are thus incompatible between themThe basename '+' is reserved for special a purpose that has no meaning in this contextThe entry to recurs in does not exist, cannot add further entry to that absent subdirectoryThe following user comment will be placed in clear text in the archive: %SThe format version of the archive is too high for that software version, try reading anyway?The format version of this database is too high for that software version, use a more recent software to read or modify this databaseThe given path %s must be a directory (or symbolic link to an existing directory)The iterator is not indexing the object it has been asked to read fromThe iterator is not indexing the object it has been asked to write toThe iterator is not indexing the object it has been defined forThe iterator position is not inside the storage of referenceThe just restored file %S has been marked as dirty (sequential reading can only detect the dirty status after restoration), do we remove this just restored dirty file?The just restored file %S has been marked as dirty (sequential reading can only detect the dirty status after restoration), removing the just restored dirty file as it is asked to ignore this type of fileThe last file of the set is not present in The snapshot backup (-A +) is only available with -c option, ignoringThe two passwords are not identical. AbortingThis archive contains an isolated catalogue, it cannot be used for this operation. It can only be used as reference for a incremental/differential backup or as backup of the original archive's catalogueThis archive has slices and is not possible to read from a pipeThis archive is not exploitable, check documentation for moreThis archive is not exploitable, check the archive class usage in the API documentationThis archive is not single sliced, more data exists in the next slices but cannot be read from the current pipe, abortingThis is an old archive, it can only be opened starting by the first sliceThread cancellation requested, aborting as properly as possibleThread cancellation requested, aborting as soon as possibleTo terminate enter an empty line To use --sparse-file-min-size while merging archive, you need to use -ah option too, please check man page for detailsToo large block of compressed data: Either due to data corruption or current system limitation where SSIZE_MAX value implied smaller buffers than requiredToo many argument on command line, see -h option for helpToo much line in file %S (integer overflow)Tried to write out of size limited fileTwo different slices (%i and %i) are marked as the last slice of the backup!Unbalanced parenthesis in expression: Uncomplete answer received from peerUncompleted archive! Assuming it has been interrupted during the backup process. If an error has been reported just above, simply ignore it, this is about the file that was saved at the time of the interruption.Unexpected answer from slave, communication problem or bug may hang the operationUnexpected end of fileUnexpected error while waiting for dar to terminate: Unexpected exception from libdarUnexpected value found in databaseUnexpected value while reading archive versionUnknown TLV record typeUnknown argument given to -2 : %sUnknown argument given to -a : %sUnknown argument given to -k : %sUnknown argument given to -w: Unknown atomic operator, or atomic not allowed with an argument: Unknown character found while parsing conditional string: Unknown choice Unknown choice: Unknown crypto algorithmUnknown encryption algorithmUnknown entry found in slice header (type = %d), option not supported. The archive you are reading may have been generated by a more recent version of libdar, ignore this entry and continue anyway?Unknown escape sequence typeUnknown expression found while parsing conditional string: Unknown expression in overwriting policy: Unknown file type! file name is: Unknown header option in database, aborting Unknown macro %%%d in user commentUnknown parameter given to --hash option: Unknown parameter given to -a option: Unknown policy for EA '%c' in expression %SUnknown policy for data '%c' in expression %SUnknown record typeUnknown substitution string: %Unknown suffix [%c] in string %SUpdating database with catalogue...User Decision requested for EA of file %SUser Decision requested for data of file %SUser target named "%s" is not allowed (reserved word for conditional syntax)Using sequential reading mode for archive source is not possible for merging operationWARNING! File modified while reading it for backup, but no more retry allowed: WARNING! File modified while reading it for backup. No more retry for that file to not exceed the wasted byte limit. File is WARNING! File modified while reading it for backup. Performing retry %i of %iWARNING! The archive is located in the directory to backup, this may create an endless loop when the archive will try to save itself. You can either add -X "%S.*.%S" on the command line, or change the location of the archive (see -h for help). Do you really want to continue?WARNING! This is an isolated catalogue, no data or EA is present in this archive, only the catalogue structure can be checkedWARNING: support for secure memory was not available at compilation time, in case of heavy memory load, this may lead the password you are about to provide to be wrote to disk (swap space) in clear. You have been warned!Warning! No file in that directory will be restored: Warning! No file in this directory will be considered for merging: Warning, %S seems more to be a slice name than a base name. Do you want to replace it by %S ?Warning, are you sure you want to abort (please answer "%S" to confirm)? We have linked with an incompatible version of libdar. Expecting version %d.%d.x but having linked with version %d.%d.%dWhich is %.2f %% of the total amount of data Writing to a read only generic_fileYESZero is not a valid size for an elastic buffer[ ][ EA ][ Saved ][DIRTY][InRef][Saved]a binary digit is either 0 or 1abortabsentabsent an empty string is an invalid argumentbackup hook feature (-<, -> or -= options) is only available when saving files, ignoringbadly structured inode: unknown inode flagbigblock deviceblowfish strong encryption supportbzip2can't read data CRC: No escape mark found for that filecannot evaluate an empty chain in an overwriting policycannot provide data from a "not saved" file objectchar devicecompressReset called but compressInit never called beforecompressed data CRC errorcompressed data corruption detectedcompressed data is corruptedcorrupted filedata corruption detected: Incoherence in LZO compressed datadata corruption detected: Too large block of compressed datadata has been cleaned, object is now emptydate before 1970 is not alloweddeleted entrydevices have not the same major number: %d <--> %ddifference of last modification date: %S <--> %Sdifferent Extended Attributesdifferent file typedifferent owner (uid): %i <--> %idifferent owner group (gid): %i <--> %idifferent permission: %S <--> %Sdirectorydoor inodedropped from the archive and marked as already savedelastic buffer incoherent structureerror!execution of [ %S ] returned error code: %dexecve() failed. (process table is full ?)file exists, and DONT_ERASE option is set.file not openfirstgid value is too high for this system for libdar be able to restore it properlygziphard linked inodehide_file cannot be initialized with write-only fileignoring -O option, as it is useless in this situationin file %S line %Sincoherent catalogue structureincoherent data after escape sequence, cannot read internal data set labelincoherent endian between U_16 and U_32incoherent endian between U_16 and U_64incoherent endian between U_16 and U_Iincoherent slice header: Slice size too smallincomplete data set name found in a slice headerinode last change date (ctime) greater, EA might be differentinvalid decimal digitit seems to be a bug herelastlibgcrypt not initialized and libdar not allowed to do solisting() method must be givenlittlelzolzo compressionmemerged with possible overwritingmissing data to buildmissing data to build a special devicemissing data to build an inodenamed pipenextnono Extended Attribute to compare withno Extended Attributes to compare withno current directory definedno current reading directory definednodump flag feature has not been activated at compilation time, it is thus not availablenonenot enough space provided to dump the elastic buffernot same size: %i <--> %inotlastoverwrittenplain filepresentpresent provided password is too long for the allocated memoryre-enabling all signal handlers and continuing reached End of File before all expected data could be readread and writeread onlyremovedremoved removed from the archivereplacedroot directory has no parent directoryroot does not have a parent directoryroot has no parent directorysaved secondsoft linksymbolic link does not point to the same target: system() call failed: the "Keep file compressed" feature is not possible when merging two archives using different compression algorithms (This is for a future version of dar). You can still merge these two archives but without keeping file compressed (thus you will probably like to use compression (-z or -y options) for the resulting archivetoo large elastic buffer or elastic buffer incoherent structuretype %s is neither big nor little endian! Do not know how to handle integer in a portable manner on this host, abortinguid value is too high for this system for libdar be able to restore it properlyunix socketunknown compressionunknown compression algorithm: %Sunknown cryptographic algorithm: unknown entryunknown type of data in catalogueusage : %s usage: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] [options...] with -+ option, -A option is mandatorywith -C option, -A option is mandatorywrite onlywrong value given as initial_size argument while initializing cacheyesProject-Id-Version: sv Report-Msgid-Bugs-To: http://sourceforge.net/tracker/?group_id=65612MSGID_BUGS_ADDRESS =atid=511612 POT-Creation-Date: 2020-02-08 16:12+0100 PO-Revision-Date: 2012-04-15 11:28+0200 Last-Translator: Peter Landgren Language-Team: Swedish Language: sv MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit X-Generator: Lokalize 1.0 Plural-Forms: nplurals=2; plural=(n != 1); Standard uthämtningstillfälle ägarskap det första icke alternativargumentet är det arkiv som skall läsas den lämnade filen. det andra icke alternativargumentet är det arkiv som skall skapas Ingen paus i utdata Gör en paus för var %d -te rad i utdata Utför batch-fil rad: %S Dar Manager Database använde [%s] : %S ditt beslut om filens EA: [p] bevara [o] skrivöver [s] märk som sparad och bevara [t] märk som sparad och överskriv [m] slÃ¥ ihop EA och bevara [n] slÃ¥ ihop EA och överskriv [r] tag bort [*] behÃ¥ll oförändrad [a] avbryt Ditt val? Ditt beslut om fildata: [P] bevara [O] skriva över [S] märk som sparad och bevara [T] märk som sparad och överskriv [R] tag bort [*] behÃ¥ll odefinierad [A] avbryt Ditt val? Extended Attribut: [ inkluderande %i hÃ¥rd länk(s) -# krypteringsblockstorlek -$ krypteringsnyckel för hjälparkiv -* samma som -# men för referensarkivet -+ sammanfoga tvÃ¥ arkiv / skapa ett underarkiv -, ignorera mappar som följer "Directory Tagging" --nodump tag ej med filer med 'nodump'-flaggan 'd' satt -/ pÃ¥ vilket sätt dar kan skriva över filer vid ihopslagning av arkiv eller -@ [sökväg/] Gör katalogisolation av det resulterande arkivet i flykten -@ [sökväg/] referenshjälparkiv för sammanslagning -A lägg till ett arkiv till databasen -A [sökväg/] arkiv att ta som referens -B specificerar använd databas (läsning eller modifiering) -B läs alternativ frÃ¥n angiven fil -C isolerar katalogen frÃ¥n ett arkiv -C skapar en tom databas -D uteslutna mappar lagras som tomma mappar -D tag bort ett arkiv frÃ¥n databasen -E kommandorad, som skall utföras mellan delar i ett arkiv -E kommadon som skall utföras mellan delar -E kommando, som skall utföras mellan delar hos mÃ¥larkivet -F kommando, som skall utföras mellan delar i källarkivet -F spara som -E men för referensarkivet -H [N] ignorera datumbyte pÃ¥ en fixt antal timmar -I filer som skall inkluderas i Ã¥tgärden (alla som standard) -J samma som -K men det avser referensarkivet -K använd som nyckel för att kryptera/dekryptera -L utför pÃ¥ en given databas en serie Ã¥tgärder definierad av -M stanna i samma filsysytem medan mappar avsöks -N läs inte ~/.darrc eller /etc/darrc inställningsparameterfil -O[ignorera-ägare | mtime | inode-typ] beakta inte ägare och grupp -P mapp som skall utelutas i Ã¥tgärden -Q undertyck begynnelsevarningen om ej startad frÃ¥n en tty -R filsystemets rotmapp (gällande mapp som standard) -S första filens storlek -S första filens storlek (om skild frÃ¥n de efterföljandes) -T trädformat pÃ¥ utmatning -U mask för att tillÃ¥ta viss EA -V visar programvaruversion -V visar versionsinformation -V visar versionsinformation -X filer som skall uteslutas frÃ¥n Ã¥tgärden (inga som standard) -Y komprimera endast de matchande filnamnen -Z komprimera inte de matchande filnamnen -[ filnamn innehÃ¥ller en lista pÃ¥ filer som skall tas med -] filnamn innehÃ¥ller en lista pÃ¥ filer som skall uteslutas -^ permission[:användare[:grupp]] för skapade bitar -aSI delstorlekssuffix k, M, T, G, etc. är tio-potenser -aa bevara inte 'atime' hos en läsöppnad fil. -abinary delstorlekssuffix k, M, T, G, etc. är tvÃ¥-potenser -ac bevara inte 'ctime' (standarduppträdande). -acase efterföljande masker är känsliga för liten/stor bokstav -ag ställ in följande masker att vara 'glob expressions' -am ställ in 'ordered mode' för alla filter -an efterföljande masker är okänsliga för liten/stor bokstav -ar ställ in följande masker att vara reguljära uttryck -as lista bara filer som sparats i arkivet -b ring pÃ¥ terminalens klocka när en användarÃ¥tgärd mÃ¥ste till -b byt basnamnet att användas för det givna arkivnummret -c kontrollera databasen med avseende pÃ¥ datumordning -c skapar ett arkiv -d jämför arkivet med existerande filsystem -d specificera sökvägen till dar -e torrsim, fejkad exekvering, inget är producerat -f Ã¥terställ ej mappstrukturen -f lista de arkiv där den givna filen finns -g mapp som skall inkluderas i Ã¥tgärden -h visa denna hjälpinformation -h visar denna hjälpinformation -i interaktiv användarmod -i rör som används i stället för 'std input' till att läsa data frÃ¥n 'dar_slave' -i rör som änvänds i stället för 'std input' till att läsa beordringar frÃ¥n dar -k tag ej bort filer, förstörda sedan referenssäkerhetskopian -l lämna information om det arkiv 'kompilerat' i databasen -l listar innehÃ¥llet i arkivet -m komprimera inte filer mindre än -m flytta ett arkiv inom en given databas. -n skriv ej över filer -o rör som används i stället för 'std output' till att berordra 'dar_slave' -i rör som används i stället för 'std output' till att skriva data till dar -o specificera en lista med alternativ som alltid skall lämnas till dar -p gör en paus innan skrivning till en ny fil -p byt sökvägen att användas för det givna arkivnummret -q undertryck slutgiltig statistikrapport -r Ã¥terställ ej filer som är äldre än de i filsystemet -r Ã¥terställer de givna filerna -s visa antalet senaste filer mot arkiv -s klyv arkivet i flera filer av storlek -t testar arkivets integritet -u mask för att ignorera viss EA -u lista det senaste filerna innehÃ¥llna i det givna arkivet -v visar mer information om vad som pÃ¥gÃ¥r -vs visa överhoppade filer -w varna inte före överskrivning av filer -w endast med -r, Ã¥terställer status alldeles före det givna datumet -wa varna inte vid överskrivning och borttagande av filer -x extraherar filer frÃ¥n arkivet -~ kommando mellan avsnitt hos hjälparkivet inkluderande %i hÃ¥rd(a) länk(ar) behandlade %i byte(s) har förbrukats i arkivet för att Ã¥terspara ändrande filer %i inod(er) lagda till arkiv %i inode(er) ändrade vid säkerhetskopieringstillfället och kunde inte sparas %i inod(er) borttagen/borttagna %i nod(er) misslyckades att spara (filsystemfel) %i inod(er) gick inte att Ã¥terställa (filsystemfel) %i inod(er) ignorerad(e) (uteslutna av filter) %i inod(er) ej Ã¥terställd(a) (ej sparad(e) i arkiv) %i inod(er) ej Ã¥terställd(a) (beslut enl. överskrivningspolicyn) %i inod(er) ej sparad(e) (ingen inode/fil ändrad) %i inod(er) noterade som borttagna %i inoder(er) noterad(e) som borttagen/borttagna frÃ¥n referenssäkerhetskopian %i inod(er) Ã¥terställd(a) %i inod(er) sparade %i inod(er) matchar ej motsvarande i filsystem %i företeelse(r) ignorerad(e) (uteslutna av filter) %i företeelse(r) behandlad(e) %i företeelse(r) med fel %s kommer med ABSOLUT INGA GARANTIER; %s kommer med ABSOLUT INGEN GARANTI, för detaljer skriv '%s -W'. %s medför ABSOLUT INGA GARANTIER; för detaljer skriv 'dar -W'. %s är en del av Disk ARchieve sviten (UtgÃ¥va %s) . hämta den. . Ignorerar fil eller mapp : Försöka igen ? . Försöka med kommandoraden igen ? . Försök att starta dar pÃ¥ samma sätt som förut ? Val: Fortsätta likväl ? EA Ã¥terställt för %i inod(er) EA sparat för %i inoder(er) OM MÖJLIGT FÖRSÖK ATT SKAPA DETTA FEL, ETT SCENARIO SOM KAN Ã…TERSKAPA DET ÄR TILL STOR HJÄLP VID PROBLEMSLÖSNINGEN TACK OM MÖJLIGT FÖRSÖK ATT Ã…TERSKAPA DETTA FEL, ETT SCENARIO SOM KAN Ã…TERSKAPA DET ÄR TILL STOR HJÄLP VID PROBLEMSLÖSNINGEN TACK Endast ett alternativ av -c -d -t -l -C -x eller -+ är tillÃ¥tet Klar att fortsätta ? TACKSAM FÖR RAPPORT OM FÖREGÃ…ENDE UTMATNING TILL UNDERHÃ…LLAREN MED EN BESKRIVNING AV OMSTÄNDIGHETERNA. Detta är fri programvara, och du är välkommen att Ã¥terdistribuera den under vissa villkor; Detta är fri programvara, och du är välkommen att Ã¥terdistribuera den under Totalt antal övervägda inoder: %i Totalt antal övervägda poster: %i Använder libdar med %u.%u byggt med kompileringstillfällets alternativ: Använder libdar %u.%u byggd med alternativ vid kompileringstillfället: Använder libdar med %u.%u.%u byggt med kompileringstillfällets alternativ: Använder libdar %u.%u.%u byggd med alternativ vid kompileringstillfället: [vagnretur = JA | Esc = NEJ] a : Spara som n : gör paus var n-te rad (noll ger ingen paus) c : kontrollera datumordning vissa villkor; skriv 'dar -L | more' för detaljer. kompilerad den %s med %s version %s d : sökväg till dar o : alternativ hos dar f : ange filplats m : modifiera arkiveringsordning för detaljer skriv 'dar -W'. är en del frÃ¥n en annan säkerhetskopia, hämta den rätta delen. skall till att skrivas över. är inte närvarande i arkivet l : listar databasinnehÃ¥ll A : lägg till ett arkiv ej ihopslagen (användares val) ej Ã¥terställd (användares val) p : modifiera sökväg för arkiven b : modifiera basnamn för arkiven q : avsluta skriv '%s -L | more' för detaljer. skriv 'dar -L | more' för detaljer. u : listar databasinnehÃ¥ll D : Tar bort ett arkiv w : skriv ändringar till fil s : databasstatistik med %i hÃ¥rd(a) länk(ar) noterad(e) "-k" (eller "-kignore") och "-konly" är ej kompatibla"första_filstorleken" kan inte vara skild frÃ¥n noll om "filstorlek" är lika med noll# A V B B Y T E R ! ## ICKE FÃ…NGAT AVBROTT, ## OVÄNTAT AVBROTT, #%S finns redan och överskrivning är ej tillÃ¥ten, avbryter%S fanns ej före specificerat datum och kan ej Ã¥terställas%S skall till at tas bort (krav frÃ¥n överskrivningspolicy), är du överens?%S skall till att överskrivas, OK?%S skall till att skrivas över, fortsätta ?%S skall till att tas bort frÃ¥n filsystemet, fortsätta ? %S är ej en giltig sökväg: %S%S är inte ett giltigt tal%S mÃ¥ste tas bort, men matchar inte förvänta typ, ta bort ändÃ¥ ?%S: Överskrivningspolicy är obestämd för denna fil, vet ej huruvida överskrivning tillÃ¥ts eller ej!%S: Överskrivningspolicy (Data) är odefinierat för denna fil, vet ej huruvida bortagande är tillÃ¥tet eller ej!%S: Överskrivningspolicyn (EA) är obestämd för denna fil, vet ej huruvida överskrivning är tillÃ¥ten eller ej!%d är inte en giltig blockstorlek%d är standardvärdet för --sparse-file-min-size, inget behov att specificera det pÃ¥ kommandoraden, ignorerar%d är standardvärdet för -m, inget behov att specificera det pÃ¥ kommandoraden, ignorerar%s namnger inte en kompression "[algoritm][:][nivÃ¥]" , sÃ¥som t. ex. "gzip", "lzo", "bzip2", "lzo:3", "gzip:2", "8" eller "1". Titta pÃ¥ man page om -z alternativ%s type är ej ett tal med tecken som förväntat%s typ är ej tal utan tecken som förväntat%s typlängd är ej %d byte(s) men %d, beredd att fortsätta ? -$ är bara tillgängligt med -+ och -c alternativ-$ är bara nyttigt med -@ alternativ, för det hjälpande referensarkivet-%% är bara tillgängligt med -+ alternativ-%% är bara nyttigt med -@ alternativ, för det hjälpande referensarkivet--nodump-funktionen har ej aktiverats vid kompileringstillfället, sÃ¥lunda är den ej tillgängligt--sparse-file-min-size är bara tillgängligt under arkivs sparande eller ihopslagning. -. alternativ är endast användbart vid sammanslagning, skapande eller isolering av arkiv, ignorerar-/ alternativ är användbart bara med -+ alternativ, ignoreras-= alternativ är bara giltigt option under sparande av filer, sÃ¥lunda i samband med -c alternativ, ignorerar-= valet kommer att ignoreras dÃ¥ det är oanvändbart om du inte specificerar till vilka filer eller mappar denna koppling skall tillämpas, tack vare -< och -> valen. Se man page för ytterligare detaljer.-@ är bara tillgängligt med -+ och -c alternativ-A alternativ är ej tillgängligt med -l-B alternativ kan ej ges inuti en batch-fil-F är endast användbart med -A alternativ, för referensarkivet-H är endast användbart med -A alternativ dÃ¥ en säkerhetskopia görs-H är endast användbart med -c, -d eller -x-H är endast användbart med -r alternativ dÃ¥ extrahering görs-J är endast användbart med -A alternativ, för referensarkivet-M är endast användbart med -c-S alternativ kräver användning av -s-a alternativ kräver ett argument-ac är endast användbart med -c eller -d-af mÃ¥ste finnas före -A alternativ inte efter!-af alternativ är endast tillgängligt med -c-af alternativ behöver inte specificeras mer än en gÃ¥ng, ignorer extra af alternativ-ak är bara tillgängligt under sammanfogning (operation -+), ignorerar -ak-ak alternativ behöver inte specificeras mer än en gÃ¥ng, ignorer extra ak alternativ-as är endast tillgängligt med -l, ignorerar -as alternativ-e alternativ är användbart bara med -x, -c eller -+ alternativ-e alternativ är bara tillgängligt när -r alternativ används, avbryter-f är endast tillgängligt med -x alternativ, ignorerar-k alternativ är användbart bara med alternativet -x"-konly" och "-kignore" kan ej användas samtidigt-m är bara användbart med -c-o är obligatorisk tillsammans med "-A -" with "-c -" "-C -" or "-+ -"-r är oanvändbart med -n-w alternativ är oanvändbart med -n-wa alternativ är användbart bara med alternativet -x-~ is är bara nyttig med -@ alternativ, för det hjälpande referensarkivet: fil finns inte filsystemEtt problem inträffade vid läsning av innehÃ¥let i detta arkiv: Avbryter batch-Ã¥tgärd: Avbryter program. Avbryter program. Ett fel inträffade angÃ¥ende utförande av användarkommando: Avbryter program. Ett fel inträffade vid anrop av libdar: Avbryter program. Den begärda Ã¥tgärden behöver funktioner, som har avaktiverats vid kompileringstillfället: Avbryter program. Användare vägrade fortsätta vid förfrÃ¥gan: Lägger till hÃ¥rd länk till arkiv: Lägga till en tydlig escape-sekvens av typen 'seqt_not_a_sequence' är förbjudetLägger till hänvisning till filer, som har förstörts sedan referensbackuppen...Alla efterfrÃ¥gade filer kunde ej Ã¥terställasAlla lager har skapats korrektArkiv %S kräver ett lösenord: Arkivbasnamn (eller extraherat katalogbasnamn) att lägga till: Fördröjd avslutning pÃ¥ arkiv inkopplatSnabb avslutning pÃ¥ arkiv inkopplatArkivformat äldre än "08" (utgÃ¥va 2.4.0) kan ej läsas genom ett ensamt rör. Det kana bara läsas genom dar_slave eller normal planfilsmetodArkivnummer, som skall ändras: Arkivnummer, som skall flyttas: Arkivnummer, som skall tas bort: Arkivnummer, vars sökväg skall ändras: Arkivnummer: Arkiv pÃ¥ 'std output' är inte kompatibelt med delning (-s alternativ)Arkivversion är för hög, använd en senare version av 'libdar'Är du säker pÃ¥ att du vill ta bort arkiv nummer %dDet argument, som ges till -H, är inte ett positivt heltalArgument läses frÃ¥n %S :Argument till -r mÃ¥ste vara en relativ sökväg (fÃ¥r aldrig börja med '/')Begäran om ett element utanför en tabellFel i CRC, data förstörtFel svar frÃ¥n 'peer' vid stängning av förbindelseDÃ¥ligt formaterad datumuttryckFelaktigt formaterad SAR-huvud (okänd TLV-typ i delsäkerhetskopiehuvud)DÃ¥ligt formaterad databasFel utformat avslutning, kan ej extrahera katalogplats: Felaktigt format 'infinint' eller ej understött formatFÃ…NGADE ETT ICKE- (LIB)DAR-AVBROTTCRC-fel upptäckt vid läsning av EACRC-fel: data förstört.Kan ej läsas sparade data.Avbrott ej bekräftatKan ej tilldela buffertminne under läsning av %SKan ej blockera signal: Kan inte avbryta uppdraget rent, 'thread-safe'-stöd saknas, kommer att stoppa programmet abrupt, skapat arkiv kan vara oanvändbartKan ej jämföra EA: EA-stöd har inte aktiverats vid kompileringstillfälletKan ej jämföra filer i skrivmodKan ej skapa databas, fil finnsKan ej skapa inod: Kan ej utvärdera crit_and criterium dÃ¥ inget kriterium har lagts till detKan ej utvärdera crit_or criterium dÃ¥ inget kriterium har lagts till detKan ej extrahera frÃ¥n den interna katalogen lista med filer, som skall tas bort Kan ej fÃ¥ referenskatalog dÃ¥ den ej har lämnatsKan ej hämta datanamn för arkivet, detta arkiv är ej helt initialiseratKan ej fÃ¥ ext2-attribut (och nodump-flaggvärde) för %S : %sKan inte hämta filstorlek: %sKan ej fÃ¥ komplett sökväg till gällande arbetsmapp: Kan ej fÃ¥r inodsinformation om den fil som skall tas bort Kan ej fÃ¥ inodinformation för %s : %sKan ej öppna katalog: Kan ej öppna mÃ¥lfil : %sKan ej öppna fil : Kan ej öppna källfil : %sKan ej läsa argument pÃ¥ kommandorad, avbryterKan ej läsa mappinnehÃ¥llet: %s : Kan ej läsa filinformation för %s : %sKan ej läsa inod för Kan ej läsa pÃ¥ 'input'kan ej ta emot sÃ¥ mycket data med hänsyn till allokerat minneKan ej gÃ¥ rekursivt i en icke-mappKan ej reducera en sträng till en storlek, som är större än dess aktuella storlekKan ej ta bort mapp Kan ej flytta obefintligt data %S frÃ¥n katalogKan ej Ã¥terställa nÃ¥gon fil, inget utförtKan ej Ã¥terställa fil %S : icke existerande fil i databasenKan ej Ã¥terställe behörigheter för %s : %sKan ej leta bakÃ¥t ett teckenKan ej leta i filKan ej sätta senaste tillgÃ¥ngtid och senaste modifieringstid: Kan ej hoppa över vid filslutKan ej hoppa över framÃ¥t för att Ã¥terställa ett hÃ¥lKan ej hoppa till aktuell position i "tronc"Kan ej synka skrivning med en skrivskyddad allmän fil.Kan ej frisläppa signal: Kan ej skriva till arkivet:Kan ej skriva pÃ¥ 'output'Kan ej skriva till mÃ¥l, avbryterKan ej skriva till 'output'FÃ¥ngade att icke-libdaravbrottFÃ¥ngade ett okänt 'Egeneric exception:'Ändrar information i databashuvudet...Ändrar databasinformation...Kontrollerar datumordning hos filer mellan arkiv...Kontrollerar filers datumordning...Välj bara en komprimeringsalgoritmNollställning av fils EA (begärd av överskrivninsgpolicy): Kommandona är: Kommandon: Allmänna alternativ: Kommunikationsproblem med 'peer', försöka igen ?Komprimerar och skriver tillbaka databas till fil...KomprimeringsnivÃ¥n mÃ¥ste ligga mellan 1 och 9 inklusiveKompressionsnivÃ¥n mÃ¥ste ligga mellan 1 och 9, inklusiveBeräknar statistik...Funderar pÃ¥ chiffreringsnivÃ¥...Beaktar det (första) referensarkivet:Sundhetskontroll misslyckades för arkivhuvudFortsätta lista arkivinnehÃ¥ll?Fortsätta? Fortsätter aktuell process,vilket är att avsluta... sÃ¥ vi avslutar!Fortsätter...Kopierade data matchar inte CRCKopiering avslutad. Saknar %.0f byte data Felaktiga data lästa frÃ¥n rörFelkaktiga data lästa frÃ¥n rörFörstörd databas :Förstörd post följande ett escape-märke i arkivetKunde ej skapa inod: Kunde inte öppna mapp %s i smygläsningsmode (%s), använder normal modeKunde inte Ã¥terställa ursprunglig ägare till fil: Kunde inte läsa en katalogdatastruktur med offset %i, det innehÃ¥ller följande:Skapar fil...Krypteringsblockstorlek mÃ¥ste vara större än 10 byteAktuell arbetsmapp kan ej vara en relativ sökvägCyphering-algoritm inte tillgänglig i libgcrypt: %s/%sDAR underprocess har avslutats med exitkod DAR avslutat vid signalmottagande: DIFF data har förstörts, kan en dechiffrera dataData förstört vid slutet pÃ¥ delsäkerhetskopian, förbjuden flagga hittad i denna positionData förstört vid slutet pÃ¥ delsäkerhetskopian, förbjuden flagga hittadFörstörda data eller ett okänt märke för gles fil hittat i filens dataData helt sparat : %S %SData frÃ¥n fil %S frÃ¥n första arkivet har bevarats frÃ¥n överskrivningData hos filen %S skall till att bli %S, fortsätta?Data frÃ¥n fil %S taget frÃ¥n det första referensarkivet har skrivits överData frÃ¥n fil %S taget frÃ¥n det första referensarkivet har tagits bortData bör vara endast läsningDatabasen har med lyckat resultat skapats tom.Databas ej sparad. Vill du verkligen avsluta?datum är ej i stigande ordning för alla filer, när databasens arkivnummer ökar, arbete med denna databas kan leda till felaktig version för Ã¥terställd fil. Ordna om arkivet inom databasen sÃ¥ att det äldsta är det första arkivet och det senaste är det sista i databasenFils datum %S ökar ej när databasens arkivnummer växer. Berörd fil är: %SDekomprimerar och laddar databashuvud in i minnet...Dekomprimerar och laddar databasen in i minnet...Mod med stegvis minskning är oanvändbar när hopslagning inte är tillämpbar pÃ¥ bÃ¥de arkivreferens och en extra arkivreferens. Ignorera denna mod och fortsätta?Upptäckt "Cache Directory Tagging Standard" för %s, innehÃ¥llet i den mappen kommer aj att sparasMappen %S kan ej Ã¥terställas: överskrivning ej tillÃ¥ten och en icke-mapp-inod med det namnet finns redan, alla filer i den mappen kommer att hoppas över vid Ã¥terhämtning:Smutsig fil : %S %SDivision med nollVill du ignorera samma feltyp för andra filer?EA för %S skall till att slÃ¥s ihop, OK?NÃ¥gra EA för %S skall till att skrivas över, OK?EA för %S skall till att tas bort, OK?EA for %S har ej nollställt enligt begäran frÃ¥n överskrivnngspolicyn eftersom denna fil är en hÃ¥rd länk pekande pÃ¥ en redan Ã¥terställd inodEA för %S har ej skrivits över eftersom denna fil är en hÃ¥rd länk pekande pÃ¥ en redan Ã¥terskapad inode.EA helt sparat : %S %SEA senare : %S %SEA tal : %i %iEA för fil %S frÃ¥n första arkivet har släppts och markerats som redan sparadeEA för fil %S frÃ¥n första arkivet har tagits bortEA för fil %S frÃ¥n första arkivet har uppdaterats med dessa med samma filnamn frÃ¥n det extra arkivetEA för fil %S har skrivits överEA för fil %S har skrivits över och markerats som redan sparatERR ERR En tom sträng som undermapp utgör inte en giltig sökvägTom sträng är ett ogiltigt arkivabasnamnEn tom sträng är ej en giltig sökvägMata in varje argument rad för rad, tryck vagnretur vid slutet Inmatad information CRC-misslyckandeInmatad information CRC-misslyckande för %SInmatad information CRC-misslyckande för %S. Ignorera detta?Notis information: "i plats" "att läggas tillNotistyp : %s %sFel vid skapande av UNIX sockelfil: Fel vid skapande av hÃ¥rd länk %s : %s Försöker att duplicera inodenFel vid skapande av hÃ¥rd länk %s , inoden till länk med [ %s ] har försvunnit, Ã¥terskapar denFel vid skapande av hÃ¥rd länk %s , inoden till länk med [ %s ] finns inte, kan ej Ã¥terställa denna hÃ¥rda länkFel vid utförande av användarkommandoradsexekvering: Fel vid hämtning av fils läsposition: Fel vid hämtning av filstorlek: Fel vid öppning av sista delsäkerhetskopia: %S. Försöker öppna arkivet genom att använda första delsäkerhetskopia...Fel uppstÃ¥tt vid bearbetande av Ã¥tgärd: Fel pÃ¥träffat vid läsning av rad %S frÃ¥n fil %S: %SFel vid öppning i smygläsningsmode: Fel vid öppnande av mapp: Fel vid öppnade av rör: Fel vid genomförande av den begärda Ã¥tgärden: %SFel vid läsning av EA för Fel vid läsning av attribut %s hos fil %s : %sFel vid läsning av tecken: Fel vid läsning av databas %S : Fel vid läsning av mappinnehÃ¥ll: Fel vid läsning av inod för fil %s : %sFel vid läsning av källfil ( vi är vid %.2f %% av kopierade data), försöker att läsa vidare: %s Fel vid räddning av EA-lista för %s : %sFel vid sparande av Extended Attribut för Fel vid omvandling av arkivet :Fel vid aktivering av 'libgcrypts' minnesskydd: %s/%sFel vid tilldelning av nyckel till libgcrypt key handle (essiv): %s/%sFel vid tilldelning av nyckel till libgcrypts nyckelhanterare: %s/%sFel vid anrop av fork() för att starta dar: Fel vid ändring av användarterminalens egenskaper: Fel vid jämförelse mellan arkiv och filsystem: Fel vid bestämning av fil: Fel vid omvandling av UID/GID till sträng för "backup hook file": Fel vid skapande av ESSIV handle: %s/%sFel vid skapande av anonym pip: Fel vid skapande av hash handle: %s/%sFel vid kodning av data: %s/%sFel vid dechiffrering av data: %s/%sFel vid härledning av nyckel frÃ¥n lösenord (HMAC öppnat): %s/%sFel vid härledning av key frÃ¥n lösenord (HMAC set key): %s/%sFel vid hämtning av arkivegenskaper: Fel vid hämtning av information för %S: Fel vi skapande av IV: %s/%sFel vid initiering av hash: Hash algoritm ej tillgänglig i libgcrypt: %s/%sFel vid listning av arkivinnehÃ¥ll: Fel vid öppnande av 'libgcrypt key handle': %s/%sFel vid öppnande av referensarkivet: Fel vid analys av --min-digits alternativ: Fel vid analys av -A-argument som ett datum: Fel vid läsning CRC för EA frÃ¥n arkiv: Inget escape-märke funnet för denna filFel vid läsning av arkivhuvud. Detta kan bero pÃ¥ att arkivet är ett gammalt krypterat arkiv eller att data har förstörts. Under antagande att det är ett gammalt arkiv, mÃ¥ste huvudet läsas frÃ¥n början av den första delen...Fel vid läsning av data frÃ¥n säkert minne:Fel vid läsning frÃ¥n fil: Fel vid läsning frÃ¥n ett rör: Fel vid läsning av användarsvar frÃ¥n terminal: Fel vid Ã¥terställning av krypteringsnyckel för ett nytt block: %s/%sFel vid Ã¥terställning Fel vid Ã¥terställande av data. Fel vid Ã¥terställande av följande filer: Fel vid sparande Fel vid datasparande: Fel vid inställning av IV för aktuellt block: %s/%sFel vid tilldelning av en fils egenskaper:%sFel vid inställning av ägare till fil: %sFel vid meddelande till 'libgcrypt' att initialiseringen är avslutad: %s/%sFel vid arkivtest: Fel vid skrivning av data till ett rör: Fel vid skrivning till fil: Fel SHA1 ej tillgängligt i 'libgcrypt': %s/%sFel, referenskatalog har aj lämnatsEscape-sekvens använd för läsning leder till att arkivet placerar vissa filer utanför den specificerade roten. För att klara detta problem, försök att läsa arkivet i direkt mode (använd ej sekvensiell läsning), försök reaparera arkivet med hlälp av Parchive om redundanta data har skapats eller som sista utväg försök använda lax modeAvbryter...Befintlig EA för %S kunde inte bevaras : Befintlig EA för %S kunde inte läsas och bevaras: Misslyckades med att öppna "cache"-nivÃ¥n, minnesbrist, arkivläsförmÃ¥ga blir ej optimalMisslyckades med att hämta frÃ¥n libgcrypt den blockstorlek använd vid cyphering algoritmen: %s/%sMisslyckades att öppna %S medan kontroll av nodump-flagga gjordes: %sMisslyckades med att notera hash: Ödesdigert fel i användarkommandorad: Filen %S har ändrats under säkerhetskopieringen och är sannolikt inte sparad i ett giltigt tillstÃ¥nd ("smutsig fil"), vill du ta med den i Ã¥terställningen i alla fall?Fil %S rad %dFil har ändrats under säkerhetskopiering och har kopierats en annan gÃ¥ng, Ã¥terställer nästa kopia av filen: Fil försvann medan den lästes, kan ej kontrollera huruvida den har ändrats under dess säkerhetskopiering: %SSlinga i filinkludering har upptäckts. Filen %s inkluderar sig själv direkt eller genom andra filer (-B alternativ)FiltillstÃ¥nd är varken läsning eller skrivningFil mÃ¥ste vara en mapp: Fil ej funnen i databas: Fil noterad som borttagen vid detta datum i databasen: Filval har avbrutits. Skapar nu det resulterande arkivet med de redan valda filernaFilstorlek för litenFil att leta efter: Klar med skrivning till fil Första filens storlek för litenFörsta delsäkerhetskopiestorlek är för liten för att till och med kunna pÃ¥ plats med delsäkerhetskopiehuvudetFormaterar fil som en tom databas...Fann 'SmygläsningsmodeAngivet datum mÃ¥ste vara i det förgÃ¥gnaAtt ge -S alternativet samma värde som det som gavs till -s är oanvändbartOm man ger till -S alternativet samma värde, som det som man ger till -s alternativet, är det oanvändbartHÃ¥rda länkar för mappar stöds ejHur mycket rad, som skall visas genast: INTERNT FEL, RAPPORTERA FÖREGÃ…ENDE UTDATA TILL PROGRAMUNDERHÃ…LLARENOm du verkligen vill stoppa dar, tryck CTLR-C och sedan vagnretur Ignorera föregÃ¥ende fel frÃ¥n användarkommandorad och fortsätta ?Ignorerer extra argument pÃ¥ kommandoradIgnorerar fil med NODUMP-flagga satt: Ignorerar okänt alternativ -%cI ingÃ¥ende filer %S: PÃ¥ vilkenplats skall detta arkiv läggas in: Osammanhängande svar frÃ¥n 'peer'Ej enhetlig katalogstruktur: duplicerad hÃ¥rdlänkade inodes dataOsammanhängande katalogstruktur: hÃ¥rdlänkade data är ej en inodeOsammanhängande katalogstruktur: hÃ¥rdlänkade inoders data ej hittadeEj enhetlig katalogstruktur: obekant statusflagga för hÃ¥rdlänkad inode.Felaktig delsäkerhetskopiehuvud: första delsäkerhetskopiestorlek för litenIcke översenstämmande struktur i data med glesa filer. okänt märkeEj fullständig etikettFelaktigt arkivomrÃ¥de i databasenFelaktig dag i mÃ¥nadFelaktig minutFelaktig mÃ¥nadFelaktig sekundIndex utanför giltigt omrÃ¥deInitieringsproblem för liblzo2-biblioteket'Input' kan ej läsasOgiltigt arkivnummer: Ogiltigt argument som mÃ¥larkivOgiltigt argument som källarkivOgiltigt tal i sträng: %SOgiltig sökväg, sökväg mÃ¥ste vara relativOgiltig storlek för CRC-breddOgiltig storlek för -S alternativOgiltig storlek för -s alternativÄr mappar : %S %SÄr hÃ¥rdlänkad : %S %SÄr inode : %S %SÄr enkel fil : %S %S'Iteratorn' pekar inte pÃ¥ dataNyckel kan ej vara en tom strängLAX MODE: "%S" är ej ett giltigt arkivformatLAX MODE: %i %% Ã¥terstÃ¥rLAX MODE: %i är ej giltigt procentvärdeLAX MODE: mappstrukturen i arkivet är förstört, det skulle leda till vissa filer placeras utanför den angivna rotmappen. Ã…terställning av andra mappars innehÃ¥ll vid roten, ej utanför den, vilket kommer att lägga filer frÃ¥n andra mappar i den angivna rotmappen.LAX MODE: Arkiv är flaggat som om det har 'escape sekvens markeringar' (vilket är normalt i tidigare arkivversioner). Om detta inte är förväntat, skall jag antaga att data förstörts i detta fält och att denna flagga skall ignoreras? (Om osäker, vägra)LAX MODE: Arkivet verkar vara chiffrerat, men du har ej angett nÃ¥gon chifferalgoritm, antar dataförstörelse och betraktar arkivet som ej chiffreratLAX MODE: Arkivet verkar utgöras av bara en isolerad katalog (inga data i det). Kan det antagas att data förstörts och kan det arkivet betraktas som ett riktigt arkiv?LAX MODE: Börjar med att avsöka katalogen (frÃ¥n slutet mot början av arkivet %i %% av arkivet), detta kan ta ett bra tag...LAX MODE: CRC misslyckades, arkivinnehÃ¥llet är förstört. Detta kan leda till att dar ser filer i arkivet, vilka aldrig funnits men detta kommer sannolikt leda till andar fel vid Ã¥terställande av filer. Skall vi fortsätta ändÃ¥?LAX MODE: Kan ej hoppa över i slutet av arkivet! Amvänder aktuell position till att börja katalogsökningenLAX MODE: fÃ¥ngade avbrott: LAX MODE: Kunde inte hitta hela katalogen i arkivet. Om du har en isolerad katalog, stanna här och använd den som säkerhetskopia för den interna katalogen, i annat fall fortsätt, men kom ihÃ¥g att alla data inte kan Ã¥terställas...LAX MODE: vill du hoppa över nÃ¥gra vettighetskontroller och försöka igen att läsa arkivinnehÃ¥llet (detta kan lite tid, detta kan även misslyckas)?LAX MODE: vill du försöka finna delar av originalkatalogen om nÃ¥got Ã¥terstÃ¥r (detta kan ta mycket tid och in vilket fall som helst kommer bara nÃ¥gra filer, som mest, att Ã¥terställas.)?LAX MODE: PÃ¥ grund av trolig dataförstörelse, kan 'dar' inte avgöra det rätta storleken pÃ¥ delsäkerhetskopian i detta arkiv. För senare arkiv är denna information dubblerad i varje delsäkerhetskopia, vill du försöka att öppna en annan delsäkerhetskopia för att fÃ¥ tag pÃ¥ detta värde om det finns?LAX MODE: fel vid skapnde av mapp, hoppar över detta data och fortsätter. Överhoppat fel var: LAX MODE: Escape-sekvens kunde inte hittas, den kan ha blivit förstörd or utanför avsökt del av arkivet, försöker hitta katalogen den andra vägen...LAX MODE: escape-sekvenser verkar finnas i detta rakiv. Jag har dÃ¥ tvÃ¥ olika metoder, antingen letar jag efter escape-sekvenser visande pÃ¥ en katalogbörjan eller sÃ¥ försöker jag vid varje position i ordning i hopp om att det inte är data, som ser ut som en katalog.LAX MODE: Misslyckades med att läsa katalogenLAX MODE: Misslyckades med att läsa katalogen (0 byte hos arkivlängden medförde att leta efter katalogen)LAX MODE: Misslyckades med att läsa katalogen (inga data att inspektera)LAX MODE: Bra ide! kunde hitta escape-sekvensen markerande katalogbörjan, försöker nu läsa den...LAX MODE: Trots frÃ¥nvaron av en känd delsäkerhetskopiestorlek, fortsätter hur som helstLAX MODE: Skriv in ett positivt heltalLAX MODE: Upplys om arkivformatet. Du kan använda tabellen vid %s att finna arkivformatet beroende pÃ¥ version., till exempel om detta arkiv har skapats med "dar" version 2.3.4 till 2.3.7, svara "6", (utan anföringstecken) här:" LAX MODE: Skriv in numret pÃ¥ den delsäkerhetskopia som skall läsas: LAX MODE: NÃ¥dde slutet pÃ¥ avsökt omrÃ¥de, misslyckades med att hitta nÃ¥gon katalogLAX MODE: delsäkerhetskopieflagga förstörd, men en delsäkerhetskopia med högre nummer har setts, sÃ¥lunda indikerade denna delsäkerhetskopieflagga inte denna delsäkerhetskopia som den sista i arkivet. FortsätterLAX MODE: katalogen (InnehÃ¥llsförteckning) tar normalt upp nÃ¥gra procent av arkivet i dess slut. Hur stor del procentuellt, vill du att jag skall avsöka (svara med ett heltal mellan 0 och 100)?LAX MODE: Slutet pÃ¥ arkivet är förstört, kan ej hämta arkivinnehÃ¥llet ("katalogen")LAX MODE: Försöker hitta escape-sekvensen (säkrare val) ?LAX MODE: Oväntat sparat status för mappslutpost, antar dataförstörelse skett, ignorerar och fortsätterLAX MODE: Okänt data i delsäkerhetskopiehuvud, ignorerar och fortsätterLAX MODE: Använder arkivformat "%d"?LAX MODE: katalogetikett matchar inte arkivetikett, som om det vore en extraherad katalog, antar dataförstörelse skett och fixar till katalogen som en enkel intern katalogLAX MODE: stänger delsäkerhetskopia %i, huvud hämtat korrektLAX MODE: första delsäkerhetskopiestorlek är ej möjlig att läsa, (brist pÃ¥ virtuellt minne?), fortsätter hur som helst...LAX MODE: första delsäkerhetskopiestorlek är ej möjlig att läsa, fortsätter hur som helst...LAX MODE: hittade obekant kataloguppgift, antar att data förstörts, kan ej läsa vidare i katalogen dÃ¥ jag ej vet längden pÃ¥ denna typ av uppgiftLAX MODE: hittat okänd katalogpost antar data förstört. Hoppar över till nästa post, detta kan leda till att felaktig mappstruktur Ã¥terställs om förstört data var en mappLAX MODE: det interna namnet pÃ¥ delsäkerhetskopian leder 'dar' till att besluta att den inte är frÃ¥n samma arkiv. Anar dataförstörelse och litande pÃ¥ filnamnet pÃ¥ denna delsäkerhetskopia som bevis pÃ¥ dess medlemskap i arkivetLAX MODE: öppnar delsäkerhetskopia %i för att läsa dess delsäkerhetskopiehuvudLax MODE: delsäkerhetskopiestorlek ej möjlig att läsa, (brist pÃ¥ virtuellt minne?); fortsätter hur som helst...LAX MODE: delsäkerhetskopior %i och %i är bägge noterade som den sista delsäkerhetskopian, behÃ¥ller det högre talet som den verkligt sista delsäkerhetskopianLAX MODE: en begärda läget sekvensiell läsning bygger pÃ¥ escape-sekvenser vilket värkar saknas i detta arkiv. Antar att data förstörts. Men om inga data förstörts och ändÃ¥ ingen escape-sekvens finns i arkivet, använd inte läget sekvensiell läsning för att undersöka detta arkiv du fÃ¥r inget matnyttigt av detMinnesbristBrist pÃ¥ SÄKERT minne för att utföra Ã¥tgärden, avbryter densammaMinnesbrist för att utföra Ã¥tgärden, avbryter densammaBibliotek, som används för blowfish-kryptering respekterar ej RFC 3962Listalternativ (att användas vid -l): Letar efter arkivinnehÃ¥ll...Letar i arkiven för begärda filer, klassificerar filer arkiv för arkiv...Mask_lists prefix mÃ¥ste vara en absolut sökväg eller börja med ""-sträng vid arkivhopslagningMinnestilldelning misslyckades : %sSaknar -c -x -d -t -l -C -+ alternativ, se %S -h för hjälpSaknat [ efter } i villkorssats: Saknat ] i villkorssats: Saknar arkivbasnamn, se -h alternativ för hjälpSaknar argument till --backup-hook-executeSaknar argument till --hashSaknar argument till --min-digitsSaknar argument till -ESaknar argument till -E alternativSaknar argument till -FSaknar argument till -SSaknar argument till -^Saknar argument till -i alternativSaknar argument till -o alternativSaknar argument till -sSaknar argument pÃ¥ kommandorad, avbryterKatalog saknas i fil.Saknar stöd för 'hashing'-algoritm (vilket är en del av stödet för stark kryptering genom 'libgcrypt'Saknar parameter till alternativ -%cSaknar käll- eller mÃ¥l-argument pÃ¥ kommandorad, se -h alternativ för hjälpSaknar stöd för stark kryptering (libgcrypt)Saknat } i villkorssats: NEJINGEN FIL I DEN MAPPEN KAN SPARAS.EN INTE IMPLEMENTERAD FUNKTION har använts: Negativt tal eller noll är ej tillÃ¥tet vid flyttning av ett arkiv inom en databasNytt basnamn för arkiv nummer %d: Nytt databasnamn: Ny sökväg att ge till arkif nummer %d: Ingen Ã¥tgärd specificerad, avbryterIngen säkerhetskopiefil finns i %S för arkiv %S, ge de n sista filen i mängdenIngen chiffreringslager öppnat...Ingen databas specificerad, avbryterInget avbrott tillÃ¥tet frÃ¥n libdar 'callbacks'Ingen fil i denna mapp kommer att tas med vid ihopslagning.Ingen fil i denna mapp kommer att Ã¥terställas.Ingen mask i masklistan att jobba medInga problem hittadeInget utrymme kvar för inod, du har möjlighet att skapa en del plats nu. När det är gjort : kan vi fortsätta ?Inget utrymme kvar pÃ¥ enhet, du har möjlighet att skapa plats nu. När du är klar : kan vi fortsätta ?Ingen terminal hittad för användarinteraktion. Alla frÃ¥gor kommer att anses negativt besvarade (minst skadliga val), vilket i det flesta fall kommer att avbryta programmet.Inget användarmÃ¥l funnet pÃ¥ kommandoradIcke existerande arkiv i databasenIcke existerande fil i databasIcke-existerande fil: Icke fatalt fel vid skrivning till mÃ¥lfil, gör om Ej sparadInte en nollavslutad sträng i filInte tillräckligt med data för att initiera lagringsfältNu genomförs 'i flykten'-isolation...OK OK OK, behÃ¥ller %S som basnamnEndast ett -@ alternativ är tillÃ¥tetEndast ett -A alternativ är tillÃ¥tetEndast ett -R alternativ är tillÃ¥tetEndast ett -S alternativ är tillÃ¥tetEndast ett -s alternativ är tillÃ¥tetÖppnar ett par pipes för att läsa arkivet, förväntar mig dar_slave i andra ändan...Öppnar och läser batch-fil...Öppnar konstruktionsnivÃ¥...Öppnar chiffreringslager...Öppnar escape-sekvensabstraktionslagretÖppnar namngiven pipe %S som input för att läsa arkivet...Öppnar standard-input för att läsa arkivet...Öppnar arkivet genom att använda abstraktionsnivÃ¥ med 'multi-slice'...Öppnar kompressionsabstraktionslagret ( ingen kompressionsalgoritm används)...Öppnar kompressionslagret...Alternativ: Överskrivning ej tillÃ¥ten dÃ¥ en del av ett tidigare arkiv med samma basnamn har hittats i %s-mappen. Ã…tgärden avbrutenAnalysfel i kommandoraden (eller i inkluderade filer): Analysfel: DelförfrÃ¥gan mottagen, avbryter Sökväg till dar (vid tom sträng används standard frÃ¥n PATH-variabel): Rörs andra ända okänt, kan ej stänga nÃ¥gon filbeskrivning pekande pÃ¥ detRörs andra ända okänt, kan ej erhÃ¥lla en filbeskrivning för detSvara med de tecken mellan hakparenteser ('[' och ']') och tryck vagnreturKontrollera dokumentationen eller uppgradera din programvara om tillgängligKontrollera din hÃ¥rdvaraBekräfta lösenordet: Möjligt fel i liblso2: lzo1x_*_compress lämnade oväntad kod %dProgarmmet har avslutats av följande orsak: NÃ¥tt filslut vid läsning av arkivversionNÃ¥tt filslut, inga korrekta data kunde Ã¥terfinnas efter senaste felet Kommit till filslut innan alla data kunnat läsasNÃ¥tt filslut vid läsning av delsäkerhetskopiehuvudNÃ¥dde filslut under läsning av arkivversionLäsning av en 'write only generic_file'Läser arkivinnehÃ¥ll...Läsning av arkivkatalogen för att lägga till...Läsalternativ (att användas vid -x, -d, -t, -l, -A) Läser arkivhuvud...Läsning av referensarkivet frÃ¥n ett rör eller standard input är ej möjligtKlart att börja skriva till arkivet?Mottagen signal: %dMottagen signal: %sMottaget okänd specialorderSparar hÃ¥rd länk till arkiv: Referenslager hos iteratorn är tomt eller finns ejTar bort fil %sTar bort fil (orsaken är att fil är noterad som bortagen i arkiv): %STar bort fil (orsak är överskrivningspolicy): %STar bort den smutsiga filen %SÃ…terställer %s i -R alternativet genom den mapp, pekad pÃ¥ frÃ¥n denna symboliska länk: Nollställning av den sekvensiella läsprocessen av arkivinnehÃ¥llet medan den inte är avslutad, kommer att göra alla data oläsbara och sÃ¥ lÃ¥ngt oÃ¥tkomligaÃ…terställer fils EA: Ã…terställer fils data: Ã…terställningsalternativ (att användas vid -x) : Att köra en batch-fil frÃ¥n en batch-fil är ej tillÃ¥tetSUNDHETSKONTROLL: Ã…TMINSTONE ETT TRÃ…DAT AVBROTTSOBJEKT HAR INTE FÖRSTÖRTS OCH FINNS KVAR I MINNE MEDAN PROGRAMMET NÃ…DDE SITT SLUTVERKAR VARA ETT HÃ…RDVARUPROBLEM: ÖVERHOPPAD (hÃ¥rd länk i sekvensiell läsmode): Samma data men CRC-värdet kunde inte verifieras, ty vi gissade inte korrekt dess bredd (sekvensiell läsbegränsning)Samma data men lagrat CRC matchar inte data!?!SparadSparar Extended Attribut för Sekvensiell läsning begärd, men detta arkiv är flaggat för att inte ha nödvändiga escepsekvenser för denna Ã¥tgärd, avbryterFör stor storlek för en elastisk buffertÖverhoppning gjord (missade %.0f byte), fann riktiga data för läsning, fortsätter kopieringen... Delsäkerhetskopiestorlek är för liten för att till och med kunna pÃ¥ plats med delsäkerhetskopiehuvudetDelning (-s alternativ), är inte kompatibelt med arkiv pÃ¥ 'standard output' ("-" som filnamn)Vissa filjämförelser misslyckadesNÃ¥gra filer i arkivet är förstörda och det är ej möjligt att Ã¥terställa demVissa filer följer inte kronologisk ordning när arkivindex ökas inom databasen. Detta kan leda till att dar_manager Ã¥terhämtar fel version av dessa filer.Gles fil : %S %S Subtraktion av en 'infinint' större än det första, 'infinint' kan ej vara negativtSyntaxfel i --retry-on-change argument: Syntaxfel i batch-fil: %SSyntaxfel i batch-fil: -C alternativ ej tillÃ¥tetSyntaxfel i batch-fil: -i alternativ ej tillÃ¥tetSyntaxfel i överskrivningspolicy: "Cache Directory Tagging Standard" är bara användbart medan en säkerhetskopiering görs, tar ej hänsyn till det härArkivet %S är krypterat och ingen krypteringschiffer har getts, kan ej öppna arkivet.Arkivert och den isolerade katalogen mostavar inte samma data, de är sÃ¥lunda inkompatibla med varannBasnamnet '+' är reserverat för speciella ändamÃ¥l, som inte har nÃ¥gon mening i detta sammanhangPosten at gÃ¥ rekursivt in i saknas, kqan ej lägga till ytterligare poster till den obefintliga undermappen.Följande användarkommentar kommer att placeras i klartext i arkivet: %SArkivets formatversion är för högt för denna programvaruversion, försöka läsa ändÃ¥?Formatversionen för denna databas är för hög för denna programvaruversion, använd en senare programvara för att läsa eller ändra denna databasDen uppgivna sökvägen %s mÃ¥ste vara en mapp (eller en symbolisk länk till en befintlig mapp)'Iteratorn' indexerar inte det objekt den har blivit ombedd att läsa frÃ¥n'Iteratorn' indexerar inte det objekt den har blivit ombedd att skriva till'Iteratorn' indexerar inte det objekt som den har definierats förIteratorns position är ej innanför referensens lagringsutrymmeDen precis Ã¥terställda filen %S har markerats som smutsig (sekvensiell läsning kan bara detektera smutsigt status efter Ã¥terställning), skall vi ta bort den precis Ã¥terställd filen?Den precis Ã¥terställda filen %S har markerats som smutsig (sekvensiell läsning kan bara detektera smutsigt status efter Ã¥terställning), genom borttagande av den just Ã¥terställda smutsiga filen som det är frÃ¥gat om att ignorera denna filtypDen sista filen i omgÃ¥ngen finns inte i Snabb-backup (-A +) är bara tillgängligt med -c aalternativ, ignorerarDe tvÃ¥ lösenorden är olika. AvbryterDetta arkiv innehÃ¥ller en isolerad katalog, det kan ej användas för denna Ã¥tgärd. Det kan bara användas som referens vid inkrementell/differentiell säkerhetskopiering eller som säkerhetskopiering av det ursprungliga arkivets katalogDetta arkiv har delar och har ej möjlighet att bli lästa frÃ¥n ett rörDetta arkiv kan inte utnyttjas, kontrollera dokumentationen för mer informationDetta arkiv kan inte utnyttjas, kontrollera, hur man använder arkivklassen, i API-dokumentationenDetta arkiv har delar och men det är ej möjlighet att läsa följand del frÃ¥n ett rör, avbryterDetta är ett gammalt arkiv, det kan bara öppnas genom att börja med den första delsäkerhetskopianTrÃ¥d-avbrott begärt, avbryter sÃ¥ snart snyggt möjligtTrÃ¥d-avbrott begärt, avbryter sÃ¥ snart som möjligtAvsluta genom att mata in en tom rad För att utnyttja --sparse-file-min-size under arkivihopslaygnin, mÃ¥ste du använda -ah valet ocksÃ¥se man page för detaljerFör stort block med komprimerade data: antingen pÃ¥ grund av förstörda data eller begränsningar i aktuellt system där SSIZE_MAX-värdet indikerar mindre buffertar än begärdaFör mÃ¥nga argument pÃ¥ kommandorad, se -h alternativ för hjälpFör mycket rad i fil %S (heltalsspill)Försökte skriva 'out of size' begränsad filTvÃ¥ olika delsäkerhetskopior (%i och %i) är markerade som den sista delsäkerhetskopian i denna säkerhetskopia!Obalanserade parenteser i uttryck: Ofullständigt svar frÃ¥n 'peer'Ej komplett arkiv! Antar det har blivit avbrutet vid säkerhetskopieringen. Om ett fel har precis rapporterats ovan, strunta i det helt enkelt, detta är om den fil, som sparades vid tidpunkten för avbrottet.Oväntat svar frÃ¥n slav, kommunikationsproblem eller bug kan hänga Ã¥tgärdenOväntat filslutOväntat fel vid väntan pÃ¥ att dar avslutas: Oväntat avbrott frÃ¥n libdarOväntat värde hitta i databasOväntat värde vid läsning av arkivversionOkänd TLV-posttypOkänt argument givet till -2 : %sOkänt argument givet till -a : %sOkänt argument givet till -k : %sOkänt argument givet till -w: Okänd 'atomic' operator eller 'atomic' ej tillÃ¥ten med ett argument: Okänt tecken hittat under kontroll av villkorssträng: Okänt val Okänt val: Okänd krypteringsalgoritmOkänd krypteringsalgoritmOkänd post hittad i delsäkerhetskopiehuvud (typ= = %d), alternativ ej stött. Arkivet du läser kan ha skapats av en senare version av 'libdar', ignorera denna post och fortsätta hur som helst?Okänd escape-sekvenstypOkänt uttryck hittat under kontroll av villkorssträng: Okänt uttryck i överskrivningspolicy: Okänd filtyp! filnamnet är: Okänt alternativ i databashuvud, avbryter Okänt makro %%%d i användarkommentarOkänd parameter givet till --hash alternativ: Okänd parameter givet till -a alternativ: Okönd policy för EA '%c' i uttrycket %SOkänd policy för data '%c' i uttrycket %SOkänd posttypOkänd utbytessträng :%Okänt suffix [%c] i sträng %SUppdaterar databas med katalog...Användarbeslut begärt för EA hos fil %SAnvändarbeslut begärt för data hos fil %SAnvändarmÃ¥l kallat "%s" är ej tillÃ¥tet (reseverat ord för villkorssyntax)Att använda inställningen sekvensiell läsning för källarkiv är ej möjligt vid hopslagningsÃ¥tgärdVARNING! Fil ändrad under läsning för säkerhetskopiering, men inga fler försök tillÃ¥tna: VARNING! Fil ändrad vid läsning för säkerhetskopiering. Inga fler försök för den filen för att inte överskrida bortkastad byt-gräns. Fil är VARNING! Fil ändrad under läsning för säkerhetskopiering. Genomför nytt försök %i av %iVarning! Arkivet är placerat i backupmappen, detta kan skapa en ändlös slinga när arkivet försöker spara sig själv. Du kan antingen lägga till -X "%S.*.%S" till kommandoraden eller ändra platsen för arkivet. (se -h för hjälp) Vill du verkligen fortsätta?VARNING! Detta är en isolerad katalog, inga data eller EA finns i detta arkiv, endast katalogstrukturen kan kontrollerasVARNING: stöd för säkert minne var ej tillgängligt vid kompileringstillfället. Vid stor minnesbelastning kan detta leda till att det lösenord du skall till och lämna kan skrivas till disk (växlingsutrymme) i klartext. Du har härmed varnats!Varning! Ingen fil i den mappen kommer att Ã¥terställas: Varning! Ingen fil i den mappen kommer att slÃ¥s ihop: Varning, %S verkar mer vara namn pÃ¥ en del och inte ett basnamn. Vill du byta det mot %S ?Varning, är du säker du vill avbryta (svara med "%S" för att bekräfta)?Vi har länkats med en inkompatibel version av libdar. Förväntad version %d.%d.x men blev länkad med version %d.%d.%dVilket är %.2f %% av det totala mängden data) Skrivning till en 'read only generic-file'JANoll är inte en gÃ¥ngbar storlek för en elastisk buffert[ ][ EA ][ Sparad ][SMUTS][InRef][Sparad]en binär siffra är antingen 0 eller 1avbrytfrÃ¥nvarandefrÃ¥nvarande en tom sträng är ett ogiltigt argument'backup hook' funktion (-<, -> or -= options) är bara tillgänglig vid sparande av filer when saving, ignorerardÃ¥ligt strukturerad inod: okänd inodflaggastorblockenhetstöd för 'blowfish' stark krypteringbzip2kan ej läsa CRC: inget escape-märke hittat för denna filkan ej utvärdera en tom kedja i en överskrivningspolicykan inte förse data frÃ¥n en "not saved"-filobjektteckenenhet'compressReset' anropad men 'compressInit' anropades inte förstCRC-fel hos komprimerade dataförstörda komprimerade data hittadekomprimerade data är förstördaförstörd filförstörda data hittade: oegentligheter i LZO-komprimerade dataförstörda data hittade: För stort block med komprimerade datadata har rensats, objekt är nu tomtdatum före 1970 är ej tillÃ¥tetfördröjd inmatningenheter har inte samma 'major'-tal: %d <--> %dskillnad mot senaste ändringsdatum: %S <--> %Sandra Utvidgade Attributannan filtypannan ägare (uid): %i <--> %iannan ägargrupp (gid): %i <--> %iandra behörigheter: %S <--> %Smapp'door' inodsläppt frÃ¥n arkivet och markerad som redan sparadosammanhängande struktur i elastisk buffertfel!utförande av [ %S ] resulterade i felkod: %dexecve() misslyckades. (full processtabell ?)fil finns, och 'DONT_ERASE'-alternativ är inställt.fil ej öppenförstagid-värde för högt för detta system för 'libdar' att kunna Ã¥terställa den säkertgziphÃ¥rdlänkad inode'hide_file' kan ej initieras med 'write-only'-filignorerar -O alternativ, dÃ¥ det är värdelöst i denna situationi fil %S rad %Sosammanhängande katalogstrukturej översenstämmande data efter escape-sekvens, kan ej läsa intern datamängsetikettosammanhängande endian mellan U_16 och U_32osammanhängande endian mellan U_16 och U_64osammanhängande endian mellan U_16 och U_IFelaktig delsäkerhetskopiehuvud: delsäkerhetskopiestorlek för litenej komplett datamängdsnamn hittat i ett delsäkerhetskopiehuvudinods senaste ändringsdatum (ctime) är större, EA kan vara annatogiltig decimalsiffradet verkar vara en bugg härsista'libgcrypt' är ej initialiserad och 'libdar' ej tillÃ¥ten att göra sÃ¥list()-metod mÃ¥ste angeslitenlzolzo-komprimeringjagihopslagning med möjlig överskrivningdata saknas för att byggadata saknas till att skapa en specialenhetdata saknas för att bygga en inodnamnad pipenästanejinga Utvidgade Attribut att jämföra me inga Utvidgade Attribut att jämföra me ingen aktuell mapp definieradingen aktuell läsmapp definierad--nodump-funktionen har ej aktiverats vid kompileringstillfället, sÃ¥lunda är den ej tillgängligtingetinte försedd med tillräckligt med plats att dumpa elastisk buffertinte samma storlek: %i <--> %iej sistaöverskrivenenkel filbefintlignärvarande lämnat lösenord är för lÃ¥ngt för det tilldelade minnetÃ¥terställer alla signalhanterare och fortsätter nÃ¥dde filslut innan alla väntade data kunde läsasläs och skrivenbart läsningborttagenborttagenborttagen frÃ¥n arkivetersattrotmappen har ingen föräldramapprot har inte nÃ¥gon föräldramapprot har ingen föräldramappsparad andramjuk länksymbolisk länk pekar inte pÃ¥ samma mÃ¥lsystem()-anrop misslyckades: Egenskapen "BehÃ¥lla fil komprimerad" är inte möjlig när man slÃ¥r ihop tvÃ¥ arkiv med olika komprimeringsalgoritmer. (Detta är för en kommande version av dar). Du kan fortfarande slÃ¥ ihop dessa tvÃ¥ arkiv men utan att behÃ¥lla filer komprimerade (med ändÃ¥ vill du kanske använda komprimering (-z eller -y-alternativ) för det resulterande arkivetför stor elastisk buffert eller osammanhängande struktur i elastisk bufferttyp %s är varken big eller little endian! FörstÃ¥r ej hantera heltal pÃ¥ ett flyttbart sätt pÃ¥ denna värd, avbryteruid-värde för högt för detta system för 'libdar' att kunna Ã¥terställa den säkertunix sockelokänd komprimeringokänd komprimeringsalgoritm: %Sokänd krypteringsalgoritmOkänd inmatningokänd datatyp i kataloganvändning ; %s Användning: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] [alternativ...] med -+ alternativ är -A alternativ obligatoriskmed -c alternativ är -A alternativ obligatoriskenbart skrivningfel värde givet till argumentet initial_size medan cachen initialiserasjadar-2.6.8/po/fr.gmo0000644000175000017520000064330713617552353011000 00000000000000Þ•Þ /Óü]H}I}X}n};~}º}=Ó}~ (~I~!K~m~(p~ú™~Д—e€ ý€GžGæ.‚J‚Ei‚)¯‚7Ù‚;ƒ1Mƒ?ƒC¿ƒJ„HN„O—„Cç„2+…5^…@”…0Õ….†-5†?c†<£†Hà†4)‡K^‡Eª‡>ð‡?/ˆCoˆJ³ˆ9þˆO8‰Aˆ‰AʉH Š;UŠD‘ŠCÖŠ#‹F>‹…‹)£‹$Í‹'ò‹$ŒF?Œ8†Œ7¿Œ@÷Œ=8>vAµB÷B:ŽC}ŽDÁŽ<:C:~,¹:æ;!0]>ŽIÍ)‘A‘9\‘+–‘7‘-ú‘B(’9k’)¥’&Ï’ ö’P“Lh“Cµ“Gù“)A”=k”;©” å”I•JP•B›•.Þ•F –+T–=€–*¾–:é–H$—#m—*‘—L¼—: ˜"D˜!g˜.‰˜K¸˜<™'A™"i™DŒ™BÑ™$š$9š$^š$ƒš)¨š+Òš$þš$#›$H›$m›$’›$·›$Ü›$œ'&œ$Nœ$sœ$˜œ$½œ$âœ$$,$Q$v%›?ÁDžFžQež·ž3Íž2Ÿ+4Ÿ1`Ÿ8’Ÿ.ËŸ!úŸ7 T k ( -¨ *Ö ¡¡&.¡AU¡A—¡3Ù¡ ¢#¢>¢Y¢t¢¢0ª¢0Û¢ £'£B£]£ {£†£&ž£.Å£^ô£ S¤]¤p¤ޤ©¤Ȥ‰ä¤‹n¥6ú¥1¦_F¦Y¦¦D§)E§&o§8–§9ϧ; ¨<E¨‚¨K¨é¨8©$:©2_©8’©Ë©Bè©+ªHª2gªšª´ª?Ъ «$«%A«5g«7«Õ«4õ«O*¬/z¬/ª¬/Ú¬G ­"R­9u­=¯­Bí­I0®"z®)®4Ç®ü®J¯e¯M~¯>̯H °T°&e°Œ°l«°h±j±kì±X²p²ƒ²œ² ·²Ųã²c³Od³¬´³(a´+Š´'¶´Þ´%å´ µ µ7µ29µHlµ$µµIÚµ@$¶Ee¶/«¶UÛ¶+1·P]·Q®·1¸#2¸#V¸#z¸3ž¸,Ò¸Xÿ¸½X¹+º#Bº"fº$‰º-®º#ܺ>»5?»#u»0™»#Ê»>î»-¼#G¼(k¼ ”¼#µ¼eÙ¼U?½#•½¹½ ؽ$ù½$¾"C¾/f¾$–¾K»¾$¿$,¿@Q¿$’¿K·¿2À$6À'[À+ƒÀ:¯À-êÀ(Á'AÁ3iÁÁ;·Á>óÁ)2Â\Â#rÂc–Â!úÂ3ÃPÃ!mÃ8Ã,ÈÃ+õÃH!ÄHjÄ ³ÄÔÄ8ÜÄÅÅ4ÅGGÅ:ÅfÊÅ91Æ1kÆtÆÇ+ÇCIÇFÇ6ÔÇ) È.5ÈKdÈ?°È)ðÈLÉ%gÉ)É)·É(áÉ Ê;+Ê#gÊ ‹Ê•¬ÊƒBËyÆË@Í!SÍ)u͟ͺÍÓÍ%îÍÎ%Î<§ÎzäÎ8_Ï-˜ÏÆÏ àÏ)Ð=+Ð*iÐ5”ÐÊÐ:ãÐ"Ñ)AÑÑkÑ=Ò!FÒ.hÒ—ÒµÒ=ÑÒÓ?(Ó/hÓŠ˜ÓE#ÔiÔˆÔÔ%½Ô#ãÔ$Õ=,ÕjÕ†Õ-£Õ2ÑÕÖÖ5Ö2QÖ„ÖI›ÖŠåÖHp×'¹×Ká׎-Ø#¼ØàØDöØL;ÙKˆÙFÔÙ+Ú=GÚO…Ú9ÕÚ>ÛNÛ3gÛ2›Û(ÎÛ'÷Û%Ü'EÜŒmÜúÜ!Ý4ÝHÝ#eÝ/‰Ý%¹Ý(ßÝÞÞ?4Þ&tÞG›ÞãÞüÞ3ß1JßI|ßKÆß%àF8à6à%¶àÜàöàa á"láá3§áÛá%öá'â.Dâ-sâ¡â&ºâáâã%ã>ãuUã/Ëã*ûã&ä&Eä'lä ”ä@µä3öä!*å%Lå6rå©å Æåçåæ#æ 2æ=æ(Næ0wæ)¨æ3ÒæKç2Rço…çpõçfèK~èÊè-éè-é>Eé+„é"°é ÓéCÞé "ê0ê'Oê,wêA¤êæêë ë75ë:më¨ë3ÁëHõë+>ìNjì5¹ì0ïì= í^íYoí?Éíb îBlî/¯î3ßîÉï5Ýï.ð&Bðiðoð6tðJ«ð7öð@.ñoññF¯ñ+öñN"òJqò¼òÕò-õò0#ó:Tóaô6ñô/(õ¤Xõýõ5öRö3oöb£ö¬÷I³÷ý÷bø€ø=‘ø(ÏøUøø2Nú2úq´ú%&û*Lû&wûžûj,ü—ü·ü×üN÷ü2Fýmyý"çý> þIþiþnþÉtþ7>ÿ2vÿ+©ÿ Õÿ:öÿ1$O9t+®Ú*ù!$F0f?—]×k5*¡0Ì1ý0/%`†£)Á#ëJ.Z-‰9·.ñ5 )V(€*©*Ô&ÿ-&T_q/Ñ/&1+X_„2ä$ <$]+‚+®)Ú, 21 9d 9ž =Ø / F .Z ‰ £ )¸ â *ø # = "Z "} #  YÄ  = $X %} 0£  Ô õ  6' ^ @| S½ 8*J/u/¥Õ?ó(3%\'‚!ª#Ì;ð>,)k•)µ ß9O:&ŠJ±/ü.,)[+…S±TIZ¤ç¾-¦Ôô/D;a´+Ñ9ý7K4e/š'Ê+ò5DT™"·Ú-ø3&+Z-†8´ í8ù<2,o4œ-Ñ5ÿJ5y€ú>7?!w™+¹kåQOq3Á#õ9cT_¸#,LP))Çñ ^( V‡ IÞ 7(!&`!6‡!¾!*Û!4"”;"4Ð"#"$#RG#š#*$·:$bò$hU%e¾%#$&H&c&0€&…±&37'ck' Ï'ð'(^(w(eŠ()ð()-2)`) z)›)Kµ)­*'¯*×*­ß*V,'ä- .o.Ž.A­.1ï.!/+@/Al/K®/+ú/"&0?I0N‰09Ø0(1$;1`1|1*’1½1CÙ1@2B^2I¡23ë2@3`3#q3•3¬3»3Ì3Ü3í3*4+4@4[V4&²4Ù4í45'!5"I5Bl5>¯5/î5'6.F6u6’6#£6Ç6â6ý6#7<7\7|7œ7[¼78)88)b8Œ8'ª8 Ò8,à8f 93t9¨:)Â: ì:÷÷;Áï<©±>¡[?ý?íŽ@e|AâAæÿAŒæBºsC÷.Df&E¡E/F=1G&oGf–G;ýGw9HL±HzþH<yIú¶I3±JKåJ¬1K¿ÞK`žL?ÿL|?Mx¼MÅ5N?ûN$;OY`OƺO3PcµPHQŸbQËRÖÎR3¥S]ÙS{7T^³TVC!VeV;|V ¸V>ÙV"W ;W'\W„W"¡WPÄWHX]^X¼X.ÚX9 Y,CY$pY0•Y)ÆYðY Z 'ZHZ_Z}Z”Z«ZÂZàZþZ*[@[*[[%†[.¬[`Û[<\N\\-«\$Ù\þ\']+)]_U]Lµ]$^'^';^0c^”^X²^ _D%_dj_Ï_â_*`9-`+g`“`)¥`)Ï`ù`H aESa‰™af#bbŠb¡íb$c,´c)ác d,dJd<^d ›d$¥d+ÊdHödN?e"Že±e¶e¼e×eÚeøef4fRfUpf%Æfìfg!g,hQVh ¨h Éh.Óhˆi1‹i ½iËi#çi j@,jBmjM°jDþjNCk@’kÓkîk5 lDClEˆlÎlíl m3,mG`m1¨mIÚm1$n.Vn;…n!Án'ãn o*'o1Ro„oL¢oïo(p7pKp_p&~p:¥p@àp!qA2q0tq)¥qÏqQêq#ü®;¯V¯n¯†¯;œ¯"د"û¯%°D° ^°j°Zo°-ʰ!ø°9±ÒT±0'²hX²Á²à²ç²ë²û² þ²k³‹³%¡³&dzî³( ´%6´\´ z´…´Š´%´&³´Ú´ê´$µ ,µ9µXIµ¢µ4§µܵöµ*þµ8)¶ b¶n¶ w¶%‚¶¨¶*·2·1;·6m·/¤·:Ô·¸ ¸(¸0¸9¸R¸&[¸%‚¸¨¸*Ÿð¸ù¸¹<¹Z¹?j¹ ª¹1´¹æ¹ü¹ºB&º?i»/©»Ù»ô»¼w¼OŒ¼ ܼ#è¼ ½½-½!A½!c½ …½1“½8Ž!þ½" ¾PC¾6”¾6˾&¿&)¿ P¿E[¿C¡¿å¿è¿gì¿ÅTÀÂ+)ÂUÂPe¶ÂPÏÂ$ Ã*EÃpÃrÑÃ&”Ã0»Ã;ìÄÔ(ÆóýÆNñÇK@ÈŒÈ ¦ÈYÇÈ1!É>SÉ;’É9ÎÉ@ÊJIÊY”ÊKîÊL:ËF‡Ë/ÎË;þË3:Ì6nÌ(¥Ì2ÎÌNÍ:PÍN‹Í7ÚÍTÎOgÎ<·ÎVôÎHKÏ<”ÏCÑÏWÐTmÐ:ÂÐ]ýÐ<[ÑA˜ÑQÚÑ1,ÒV^Ò'µÒ9ÝÒ"Ó:Ó+VÓH‚Ó1ËÓ1ýÓD/ÔCtÔR¸ÔO ÕO[ÕM«ÕQùÕQKÖGÖTåÖ>:×#y×O×Dí×62Ø=iØO§Ø:÷Ø2Ù;LÙ2ˆÙ<»Ù8øÙG1Ú=yÚ$·Ú!ÜÚþÚfÛ_€ÛBàÛ?#Ü*cÜWŽÜAæÜ$(ÝYMÝa§Ý6 Þ-@ÞSnÞ4ÂÞX÷Þ8PßE‰ßFÏß&à<=àUzàÐà&æà( á86á[oáNËá â#;â1_âP‘â$ââ$ã$,ã$Qã)vã- ã$Îã$óã$ä$=ä$bä$‡ä$¬ä$Ñä'öä$å$Cå$hå$å$²å$×å$üå$!æ$Fæ-kæ>™ælØæ$EçhjçÓçHîç;7è0sè=¤èFâè-)é/WéM‡éÕéïé:êM@ê2ŽêÁêÛê&öêFëFdë6«ëâëúëì8ìWìrì0ì>¾ìýìí3íNíní+…í,±íAÞíƒ î ¤î ®î.Ïî+þî *ïKï”iï”þï<“ðÐðeäðUJñU ñ,öñ-#ò>Qò1òAÂò6ó;óRYó'¬ó+Ôó!ô<"ôA_ô ¡ôFÂô õ!*õ2Lõ&õ&¦õNÍõ ö)*ö+Tö;€öJ¼ö+÷83÷Nl÷/»÷/ë÷/øGKø"“ø8¶øBïøF2ùXyù'Òù0úùJ+ú"vúY™úóúWûHfûw¯û'ü*8ü,cü†üŽý¦ý‚7þºþÙþóþÿ +ÿ9ÿYÿixÿUâÿÄ8&ý*$4O„(‹´Ëá0ãS(hS‘@åJ&2qm¤.fAe¨6Ee…K¥3ñS%±y0+\'|+¤VÐ'U>B”#×;û7 UN "¤ Ç -ç ) '? og b×  : #[       Á %â A .J 1y  «  Ì ;í  ) 1J =|  º #Û 0ÿ H0;y1µ+ç8"LQoaÁ1#UujŒ#÷Kg#~0¢3Ó0`8L™(æE\_R”<çf$C‹CÏxŒ ©GÊ]2p-£4ÑW^^=½Mû2I:|1·5é)%I9o@©Áꚬ’GÚ!ð)<![}7Õšé[„àEn2´ç! )) IS 8 1Ö !V"!'y!)¡!òË!¾"1Ç"Kù"E#\#Hs#¼#IÕ#*$–J$Uá$%7%]%t%-”%0Â%1ó%_%&&…&-¬&<Ú&2',J'w'%Š'D°'$õ'r(Î(m\)=Ê)R*®[*? +"J+Sm+WÁ+V,Jp,A»,Hý,jF-O±-E.0G.?x.F¸.9ÿ.99/:s/E®/–ô/$‹0,°0!Ý0'ÿ05'1>]13œ1.Ð1ÿ12G;2Qƒ2aÕ2%73(]38†3F¿3h4eo4CÕ4V5Gp55¸5(î5/6qG69¹6,ó6S 7't73œ7@Ð7@8HR8&›8-Â8 ð89G/9!w9v™90:)A:,k:4˜:2Í:6;R7;AŠ;:Ì;)<J1<"|<)Ÿ<)É<'ó<= 1=>=.R=E=*Ç=>ò=I1>9{>µ>‚7?º?PÖ?-'@0U@2†@5¹@Aï@(1A ZAAfA ¨A/µA/åA3B:IB&„B/«BÛBGøBQ@C"’CIµCXÿC?XDY˜DIòDF¶LžõLY”NKîN@:O¶{O(2P5[P)‘PU»PfQ´xQI-R+wRp£RSG&S)nSS˜S7ìTF$UˆkUDôUB9VD|V ÁVzbW+ÝW+ X+5XgaXJÉX‹Y3 YPÔY+%ZQZVZÿ\ZI\[4¦[-Û[* \H4\E}\Ã\1á\5]*I]2t].§]8Ö].^S>^d’^}÷^Au_C·_Cû_I?`>‰`5È`#þ`="a"`aWƒa<Ûa>bGWb4ŸbIÔb,c2Kc0~c.¯c@Þc0dPd}ld:êd=%e+ce4e…ÄeGJf1’f+Äf0ðf/!g7Qg3‰g8½gGögE>hE„hUÊh8 iYi<ui+²i%Þi5j&:j=aj+Ÿj*Ëj5öj7,k/dkq”k7l0>l/ol6Ÿl(Öl0ÿl&0m(WmD€mÅm\äm}AnL¿n2 o7?oIwo/Áomño8_p/˜pTÈp.q0LqW}qPÕq:&r4ar5–r<ÌrI slSs5ÀspösHgt7°t,èt5uyKuzÅu_@v$ vÅvGÚw'"x+Jx:vx ±xYÒx",y%Oy7uyE­y óy+z@@zTz7Öz1{;@{U|{#Ò{(ö{-|9M|4‡|0¼|7í|W%} }~A‹~FÍ~5IJ6”;Ëp€™x€HL`(­)Ö4‚h5‚*ž‚YÉ‚<#ƒ,`ƒ(ƒ$¶ƒ~Ûƒ~Z„Ù„-õ„g#…7‹…2Ã…$ö…*†ZF†r¡†B‡BW‡>š‡<Ù‡#ˆ.:ˆEiˆ§¯ˆGW‰3Ÿ‰5Ó‰[ Š«eŠ‹ø%‹lŒ‹Œv A‚%ÄêK Ž£VŽIúŽuD!ºÜúy’¨A6‘+x‘1¤‘"Ö‘!ù‘%’sA’½µ’3s“§“ª°“‡[•/ã–—u*—( —LÉ—R˜(i˜<’˜NϘ<™9[™+•™SÁ™nšV„šCÛš(›H›d›(›$ª›YÏ›\)œY†œQàœF2UyÏ,ß žž/žAžPžcž6vž­žÇžqãžUŸsŸ‘Ÿ®Ÿ1ÍŸ,ÿŸI, ]v 6Ô  ¡')¡#Q¡u¡,ˆ¡µ¡&Ñ¡&ø¡*¢+J¢+v¢+¢¢+΢]ú¢)X£)‚£)¬£%Ö£Rü£ O¤6\¤z“¤5¥D¦:_¦¾š¦$Y§u~¨¼ôªȱ«—z¬"­Š5®"À® 㮥í¯Ô“°h±ˆ²Û³`ä³OEµ*•µ\ÀµF¶Šd¶Zï¶J·BÛ·$¸?C¹Xƒ¹Çܹܤºm»Lﻊ<¼}Ǽ E½OQ¾.¡¾aоá2¿LÀ€aÀaâÀÄDÁ ¶ÃIÉÃtÄŸˆÄÈ(ÅñÆAÇEÇ7aÇ!™ÇU»Ç*È%<È+bÈ(ŽÈ"·ÈÚÈTkÉvÀÉ"7Ê0ZÊK‹Ê7×Ê,ËC<Ë,€Ë­ËÍË$ìË#Ì5Ì#RÌ#v̵̚ÌÒÌïÌ7 Í!BÍ<dÍ.¡Í=ÐÍx·ÎC¥Î7éÎ,!ÏNÏ2RÏ<…ÏxÂÏW;Ð#“Ð&·Ð!ÞÐDÑ EÑjfÑ$ÑÑiöÑk`ÒÌÒ(çÒQÓEbÓ0¨ÓÙÓ6ñÓ6(Ô_ÔgxÔpàÔÉQÕ‡Ö}£Öâ!×9Ø=>Ø;|Ø ¸ØÙØùØFÙ VÙH`Ù8©Ù\âÙb?Ú)¢ÚÌÚÑÚ!×ÚùÚüÚÛ<Û\Û|ÛtœÛ8ÜJÜ*hÜ)“ÜA½Ü?ÿÜ6?ÝLvÝBÃÝgÞAnÞ °Þ-»ÞséÞE]ߣß,¸ß.åß)à=>àF|àqÃàY5áOáEßá %â%FâBlâI¯âPùâ.Jã-yã)§ã4ÑãTäB[ä]žäGüäLDåB‘å6Ôå@ æ#Læ/pæ: æ%Ûæcç"eç*ˆç³çÄç&Óç,úçA'èCiè­èIÇè4é+FéréS‘é'åéŒ ê$šê%¿ê0åê0ë"Gë>jë&©ë Ðë/ñëm!ì|ì° í,½í&êí1î„CîGÈîï$ïH;ïY„ïjÞï=Ið,‡ð¤´ð)Yñ¥ƒñ+)òZUòG°òjøò‡có&ëóQôÜdôDAõ+†õ ²õkÓõ)?öHiö<²öQïöQA÷3“÷Ç÷dÌ÷q1øm£ø\ùvnùqåùPWú€¨ú´)ûCÞû$"üHGüIü<ÚüLýÙdý>þABÿe„ÿFêÿ71êiYTW®`YgÌÁ[ŽUê9@•z$Š5ÈÀ:‰GÄ9 HF8aÈ^* F‰ )Ð ìú Wç i? © (à Iì '6 &^ <…   à    " C >b R¡ ô !!6ÏX'(UP5¦2Ü0<OŒ- ,Î?û(;?dD¤é#&BiA•;×<8P)‰S³\Ldm±MÀmF.RuSÈ ­ô¸|­>*Sij½K(‹ty'z‚¢Š%<°í$ó &9,`E,Ó( 7) a 4e  š  ¤  ® ¸ wÀ 8!@!H!$P!u!z!!,Š! ·!0Â!0ó!,$"Q"|q"*î")#zC#{¾#:$?$Z$6o$<¦$ã$'é$[%@m%@®%Xï%H&^&r&RŒ& ß&)é&'2'AQ'“'¨'¹'3Ê'þ'(`((P‰(GÚ(,")O)5d)š)&«)BÒ)B*8X*‘*6®*Zå*@+3\+3+%Ä+ ê+õ+ ,.,(G,%p,[–,ò,6û,02-8c-aœ-,þ-+.@0.q.„..Z¨.///9/IS/2/ Ð/Ú/"ö/k0&…0&¬0%Ó0=ù0w71D¯1ô1hý1Zf2Á2Ü2ü23E.32t31§3;Ù3"4 84C4~K4;Ê4&5I-5ðw5:h6ˆ£6&,7S7Y7]7m7!q7e“7!ù7388O8*ˆ8³8Î8(î8 9"9+9*/9*Z9…9¡9/Á9ñ9:m:‚:4ˆ: ½: Þ:(ê:?;S;Z; c;*q;¡œ;><F<DO<=”<FÒ<G=a= u=ƒ= Œ=–=¬=$µ=$Ú=$ÿ=5$>Z>c>ƒ>YŒ>æ>Lü>I?4Y?Ž?£?¹?eÎ?74A:lA#§AËAæA‘ëAY}B×B!æB C#C7C'LC$tC™CC©CQíC(?D"hDP‹D-ÜDQ E0\E0E¾EFÍEIF^FaFaeF»IÔBh‰Ð Eø?Iu^mUIÉï%¾Gxß ¸¢±cÀ¥5ý¦«®¶º´C·'1X#:ù9eÔÝòãJ¨·FÔLJïå_‹8.‰²ñ"ÂŒ¨Ë{Žw*}Âd©*pçwÖÌx÷@\ÝšA(¾„/6_ùi°UŽó˪¶™) t Ú9î\Í\hË‘!Â)“®ï#$ÖNBùÿãÞåeJÓb\áƒn¤}ÅŠó´èSŽÜò<Z,Êö<Å/õmŒÉŠa¨>‹(Kª@?LFµç…™ŸpU–Ü- ¬*½•2ºRitOÊ/òµ»8)õVÆ)#]a‚¥H<­Ïr³ò¿²¾ˆ0׉]uib…-‹(qúÇëüßœTuPžX€ÃjÒFXÓs?81Þ=o^vÖ±Û¤êDhs@æË§Œù…Mf<þ¤s8y¢CÓgšgÐN¤+éâ2ÞÚ`‘iè4ø„‡Jr½;m+†¯gu9zð¸2"³¿‘Ñ~Kí]©ÈõX¨y^aûkX † Õ$+,}˜æ¹Î­Å5àºÍí’4&ð([µÜˆ²q­4Á°.Ï^WÝEM»¾¶£‰¿íèyì¬’Ç M,'"oÊ 3t´Š ÜËBBþÒ!,"HúêA¦å',6 ]R-ÄK9Jk7„ à?Ñ‘x`Ž/Ú­žb „ãÀ¼ð>ÃG`–zOZ’µ~×ÐÆ|†YëüQ _ Q©Ù†|ä!ÆÚ¾LÎ Ÿ?“F K®¯Ã6žô¦,ø|ÍGò>¨ª2‹íkEP3ä!×ApÿGlªíf²”G˜×J;t©"-O^C_%€úM-Èh§râîǺ°”Oƒ¶ÜÎ{ûe⹊Wáš42·N—•FÖ¼LKÂSñwV«ùMNä ‡˜¿î¸oã&‡Dô[#–ªA’„™›ç‰Oè¶ô®Ø[&Í3yæÁ s.Ÿ Žjþqã?PߣÕÛì0'i­¬@›¿VQVï7Þ”ÛŒVÖqná“Ü–›ƒBZؽt¬ëf%ÝC3öÏVIÿ-Öé<Ëà:ÿSŸU”g:tÆÛT¬°é·ûÚüÐÞ¸áôzz Lk× Š·ÔÙœ÷èÔA a (x*©4Ì×°ýK÷!õC0±; vj•ƒLY»7#Œ(W‡xÓÐl=$oTS.=¹ ü΢+lk«­¤ÕÌž)ºæ_Ȭ½`n÷wzOàØÝÈÌœaÆuÓqþÀr$”sÊlê7ļŸ[}RÑjcÕélv¦.˜³‚ óm€{‚0¨TW±Ì×—†åÝÒ§Ù¹b@u^¹HUN{~éE›â¼“)ÅöHÆÂÁû³ ´ÉH”/¹bµh+cf™ «1êm!¸Ç$²&A/nNS¯õš¾=¢®úo‚ºW –„ž ÂÒ+Ìç‘=>ïzГQ&ÙŽ0E´:ˆp[6—Ù~æëyóÉ;ˆ%:£ý§â…»{$ÅÑ87öå…QTˆ’pÞ± ]œ6p ¦ÁgC]Q¡š|Ñ'«¢ØßHvBÕ61øö÷±bÑwµ §ÙêdfÒ~û™ÈGqìî\ZßÎÚi΃vs<€‡mw9ý2€Z¦€Àø57 ¡RRYü®Èâ`1î´‘lž £Ïen ÒYD—X©}¥DYäI`ñ3%oˆ80 •³Erá;›Ûú•œë&þ«ç½‹9Ä=d¥c>äÄŸaÏnÿÀ¿| jS#ÔZ4Á—:DÛÍÉÊyRý‚c–5rÓ½Íð5@v÷‰’ŒÊLŠ…P¥Ä~eÕ\ñ;f|ó¼§ Px³²ÇðìFd“5¯Ø¼°•†œÁ¡™.¡W‚>ô[eYc%Ï3É£hØ‹˜¡P'˜¤}›»j£ƒìg¥ªTJš*ÅkdU ¶¡{I¸Àà1*¯Mñ"ÄdD¯_ Standard extraction time ownership the first non options argument is the archive to read the provided file. the second non option argument is the archive to create No pause in output Pause each %d line of output Executing batch file line: %S Dar Manager Database used [%s] : %S WARNING! This archive only contains the catalogue of another archive, it can only be used as reference for differential backup or as rescue in case of corruption of the original archive's content. You cannot restore any data from this archive alone Your decision about file's EA: [p]reserve [o]verwrite mark [s]aved and preserve mark saved and overwri[t]e [m]erge EA and preserve merge EA a[n]d overwrite [r]emove [*] keep undefined [a]bort Your choice? Your decision about file's FSA: [p]reserve [o]verwrite mark [s]aved and preserve mark saved and overwri[t]e [*] keep undefined [a]bort Your choice? Your decision about file's data: [P]reserve [O]verwrite mark [S]aved and preserve mark saved and overwri[T]e [R]emove [*] keep undefined [A]bort Your Choice? date format: [[[year/]month]/day-]hour:minute[:second] Available algo: gzip,bzip2,lzo,xz. Exemples: -zlzo -zxz:5 -z1 -z Extended Attribute: [ including %i hard link(s) %i file(s) have been record as destroyed since backup of reference -# encryption block size -$ encryption key for auxiliary archive -* same as -# but for archive of reference -+ merge two archives / create a sub archive -, ignore directories that follow the Directory Tagging --nodump do not backup, files having the nodump 'd' flag set -/ which way dar can overwrite files at archive merging or -8 sig add delta signature to perform binary delta if used as ref. -@ [path/] Do on-fly catalogue isolation of the resulting archive -@ [path/] auxiliary archive of reference for merging -A add an archive to the database -A [path/] archive to take as reference -B specify the database to use (read or modify) -B read options from given file -C isolates the catalogue from an archive -C creates an empty database -D excluded directories are stored as empty directories -D delete an archive from the database -E command line to execute between slices of the archive -E command to execute between slices -E command to execute between slices of destination archive -F command to execute between slice of source archive -F same as -E but for the archive of reference -H [N] ignore shift in dates of an exact number of hours -I files to include in the operation (all by default) -J same as -K but it does concern the archive of reference -K use as key to encrypt/decrypt -L execute on a given database a batch of action as defined by -M stay in the same filesystem while scanning directories -N do not read ~/.darrc nor /etc/darrc configuration file -O[ignore-owner | mtime | inode-type] do not consider user and group -P subdirectory to exclude from the operation -Q suppress the initial warning when not launched from a tty -R filesystem root directory (current dir by default) -S first file size -S first file size (if different from following ones) -T tree output format -U mask to allow certain EA -V displays software version -V displays version information -V displays version information -X files to exclude from the operation (none by default) -Y do only compress the matching filenames -Z do not compress the matching filenames -[ filename contains a list of files to include -] filename contains a list of files to exclude -^ permission[:user[:group]] of created slices -aSI slice size suffixes k, M, T, G, etc. are power of 10 -aSI slice size suffixes k, M, T, G, etc. are powers of 10 -aa do not try to preserve atime of file open for reading. -abinary slice size suffixes k, M, T, G, etc. are power of 2 -abinary slice size suffixes k, M, T, G, etc. are powers of 2 -ac do not try to preserve ctime (default behavior). -acase the masks that follow are now case sensitive -ag set the following masks to be glob expressions -am set ordered mode for all filters -an the masks that follow are now case insensitive -ar set the following masks to be regex expressions -as only list files saved in the archive -b ring the terminal bell when user action is required -b change the basename to use for the give archive number -c check database for dates order -c creates an archive -d compares the archive with the existing filesystem -d specify the path to dar -e dry run, fake execution, nothing is produced -f do not restore directory structure -f list the archives where the given file is present -g subdirectory to include in the operation -h displays this help information -h displays this help information -i user interactive mode -i pipe to use instead of std input to read data from dar_slave -i pipe to use instead of std input to read orders from dar -k do not remove files destroyed since the reference backup -l gives information about the archive compiled in the database -l lists the contents of the archive -m do not compress file smaller than -m move an archive within a given database. -n don't overwrite files -o pipe to use instead of std output to orders dar_slave -o pipe to use instead of std output to write data to dar -o specify a list of option to always pass to dar -p pauses before writing to a new file -p change the path to use for the given archive number -q suppress final statistics report -r do not restore file older than those on filesystem -r restores the given files -s shows the number of most recent file by archive -s split the archive in several files of size -t tests the archive integrity -u mask to ignore certain EA -u list the most recent files contained in the given archive -v display more information about what is going on -v[s|t|d|m|f|a] verbose output -vs display skipped files -w don't warn before overwriting files -w only with -r, restores in state just before the given date -wa don't warn before overwriting and removing files -x extracts files from the archive -y repair a truncated archive -z [[algo:]level] compress data in archive. -z = -z9 = -zgzip:9 -~ command between slices of the auxiliary archive Delta compression support : %s Detected system/CPU endian : %s Extended Attributes support : %s Furtive read mode support : %s Integer size used : %d bits Integer size used : unlimited Large dir. speed optimi. : %s Large files support (> 2GB) : %s Libbz2 compression (bzip2) : %s Liblzma compression (xz) : %s Liblzo2 compression (lzo) : %s Libz compression (gzip) : %s Linux ext2/3/4 FSA support : %s Mac OS X HFS+ FSA support : %s Multiple threads (libthreads): %s %s Posix fadvise support : %s Public key ciphers (gpgme) : %s Remote repository support : %s Restores dates of symlinks : %s Strong encryption (libgcrypt): %s Thread safe support : %s Timestamp read accuracy : %S Timestamp write accuracy : %S ext2fs NODUMP flag support : %s including %i hard link(s) treated archive # | most recent/total data | most recent/total EA %i byte(s) have been wasted in the archive to resave changing files %i inode(s) added to archive %i inode(s) changed at the moment of the backup and could not be saved properly %i inode(s) deleted %i inode(s) failed to be saved (filesystem error) %i inode(s) failed to restore (filesystem error) %i inode(s) ignored (excluded by filters) %i inode(s) not restored (not saved in archive) %i inode(s) not restored (overwriting policy decision) %i inode(s) not saved (no inode/file change) %i inode(s) recorded as deleted %i inode(s) recorded as deleted from reference backup %i inode(s) restored %i inode(s) saved %i inode(s) with only metadata changed %i item(s) do not match those on filesystem %i item(s) ignored (excluded by filters) %i item(s) treated %i item(s) with error %s comes with ABSOLUTELY NO WARRANTY; %s comes with ABSOLUTELY NO WARRANTY; for details type `%s -W'. %s comes with ABSOLUTELY NO WARRANTY; for details type `dar -W'. %s is part of the Disk ARchive suite (Release %s) , please provide it. - Door entries : %i - block devices : %i - character devices : %i - directories : %i - named pipes : %i - number of inode with hard link : %i - number of reference to hard linked inodes: %i - plain files : %i - symbolic links : %i - unix sockets : %i . Ignoring file or directory . Retry ? . Retry command-line ? . Retry to launch dar as previously ? : Failed resaving uncompressed the inode data : Resaving uncompressed the inode data to gain space is not possible, keeping data compressed Choice: Continue anyway ? EA restored for %i inode(s) EA saved for %i inode(s) FSA restored for %i inode(s) FSA saved for %i inode(s) IF POSSIBLE TRY TO PRODUCE THIS ERROR, A SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH IN SOLVING THIS PROBLEM. THANKS IF POSSIBLE TRY TO REPRODUCE THIS ERROR, A SCENARIO THAT CAN REPRODUCE IT WOULD HELP MUCH IN SOLVING THIS PROBLEM. THANKS Only one option of -c -d -t -l -C -x or -+ is allowed Ready to continue ? THANKS TO REPORT THE PREVIOUS OUTPUT TO MAINTAINER GIVING A DESCRIPTION OF THE CIRCUMSTANCES. This is free software, and you are welcome to redistribute it under certain conditions; This is free software, and you are welcome to redistribute it under Total number of inode(s) considered: %i Total number of items considered: %i Using libdar %u.%u built with compilation time options: Using libdar %u.%u built with compilation time options: Using libdar %u.%u.%u built with compilation time options: Using libdar %u.%u.%u built with compilation time options: [return = YES | Esc = NO] a : Save as n : pause each 'n' line (zero for no pause) c : check date order certain conditions; type `dar -L | more' for details. compiled the %s with %s version %s d : path to dar o : options to dar f : give file localization m : modify archive order for details type `dar -W'. is a slice from another backup, please provide the correct slice. is about to be overwritten. is not present in the archive l : list database contents A : Add an archive not merged (user choice) not restored (user choice) p : modify path of archives b : modify basename of archives q : quit type `%s -L | more' for details. type `dar -L | more' for details. u : list archive contents D : Remove an archive w : write changes to file s : database statistics with %i hard link(s) recorded "-k" (or "-kignore") and "-konly" are not compatible"first_file_size" cannot be different from zero if "file_size" is equal to zero# E X I T I N G ! ## NOT CAUGHT EXCEPTION, ## UNEXPECTED EXCEPTION, #%S Can we read it as if it was zero (1st January 1970 at 00:00:00 UTC)?%S [%c] [ REMOVED ENTRY ] (%S) %S%S already exists, and overwritten is forbidden, aborting%S did not exist before specified date and cannot be restored%S has a bad or corrupted header, please provide the correct file.%S is about to be deleted (required by overwriting policy), do you agree?%S is about to be overwritten, OK?%S is about to be overwritten, continue ?%S is about to be removed from filesystem, continue?%S is an not a valid path: %S%S is not a valid file (wrong magic number), please provide the good file.%S is not a valid number%S is required for further operation, please provide the file if you have it.%S is required for further operation, please provide the file.%S must be removed, but does not match expected type, remove it anyway ?%S: %s, aborting%S: %s, do we retry network operation?%S: %s, retrying in %d seconds%S: Overwriting policy (Data) is undefined for that file, do not know whether overwriting is allowed or not!%S: Overwriting policy (Data) is undefined for that file, do not know whether removal is allowed or not!%S: Overwriting policy (EA) is undefined for that file, do not know whether overwriting is allowed or not!%S: Overwriting policy (FSA) is undefined for that file, do not know whether overwriting is allowed or not!%SIs subdir of: %S [%S]%SPath is: %S [%S]%SPath leads to: %S [%S]%Sglob expression: %S [%S]%Snot( %S %S)%Sregular expression: %S [%S]%d is not a valid block size%d is the default value for --sparse-file-min-size, no need to specify it on command line, ignoring%d is the default value for -m, no need to specify it on command line, ignoring%s does not name a compression "[algorithm][:][level]" , like for examples "gzip", "lzo", "bzip2", "lzo:3", "gzip:2", "8" or "1". Please review the man page about -z option%s type is not a signed type as expected%s type is not an unsigned type as expected%s type length is not %d byte(s) but %d%s: %s'-A +' is not possible with -y option, compression ratio , ready to continue ? --$ is only available with -+ option and -c options-$ is only useful with -@ option, for the auxiliary archive of reference-%% is only available with -+ option-%% is only useful with -@ option, for the auxiliary archive of reference--------------+-------------------------+-------------------------alter=do-not-compare-symlink-mtime option is useless with -y option--alter=tape-marks is impossible with -y option--nodump feature has not been activated at compilation time, it is thus not available--sequential-read is useless with -y option--sparse-file-min-size only available while saving or merging archives, ignoring-. option is only useful when merging, creating or isolating an archive, ignoring-/ option is only useful with -+ option, ignoring-/ option is useless with -y option-5 option is useless with -y option-8 option is useless with -y option-9 option is only valid with -A option, ignoring it-< and -> options are useless with -y option-= option is valid only while saving files, thus in conjunction with -c option, ignoring-= option will be ignored as it is useless if you do not specify to which files or directories this backup hook is to be applied, thanks to -< and -> options. See man page for more details.-@ is only available with -+ and -c options-@ option is useless with -y option-A option is not available with -l-A option is required with -y option-B option cannot be given inside a batch file-D option is useless with -y option-F is only useful with -A option, for the archive of reference-H is only useful with -A option when making a backup-H is only useful with -c, -d or -x-H is only useful with -r option when extracting-H option is useless with -y option-J is only useful with -A option, for the archive of reference-M is only useful with -c-M option is useless with -y option-O option is not possible with -y option-S option requires the use of -s-T option is only available with -l-X, -I, -P, -g, -], -[ and any other file selection relative commands are not possible with -y option-Y and -Z are only useful with compression (-z option), ignoring any -Y and -Z option-\ option is useless with -y option-a option requires an argument-ac is only useful with -c or -d-ad option is useless with -y option-ae option is useless with -y option-af is not possible with -y option-af must be present before -A option not after!-af option is only available with -c-af option need not be specified more than once, ignoring extra -af options-ah option is useless with -y option-ai option is useless with -y option-ak is only available while merging (operation -+), ignoring -ak-ak option is useless with -y option-ak option need not be specified more than once, ignoring extra -ak options-as is only available with -l, ignoring -as option-as option is useless with -y option-asecu option is useless with -y option-e is only useful with -x, -c or -+ options-e option is only available when using -r option, aborting-f in only available with -x option, ignoring-k option is not possible with -y option-k option is only useful with -x option-konly and -kignore cannot be used at the same time-m is only useful with -c-m is only useful with compression (-z option), ignoring -m-o is mandatory when using "-A -" with "-c -" "-C -" or "-+ -"-p and -Q options are mutually exclusives-r is useless with -n-r option is useless with -y option-u, -U, -P, -g, -], -[ and any other EA selection relative commands are not possible with -y option-vf is only useful with -c option-w option is only valid with -r option, ignoring it-w option is useless with -n-wa is only useful with -x option-z option needs only to be used with -c -C or -+ options-{ and -} options are useless with -y option-~ is only available with -+ and -c options-~ is only useful with -@ option, for the auxiliary archive of reference... A strong randomly generated key could be found after %d iteration(s): file not present in filesystemA problem occurred while reading this archive contents: ANDAborting batch operation: Aborting program. Aborting program. An error occurred concerning user command execution: Aborting program. An error occurred while calling libdar: Aborting program. The requested operation needs a feature that has been disabled at compilation time: Aborting program. User refused to continue while asking: Aborting program: child process died unexpectedlyAccess mode | User | Group | Size | Date |[Data ][D][ EA ][FSA][Compr][S]| FilenameAdding %S to archive: %SAdding Hard link to archive: Adding a new layer on top: Caching layer for better performances...Adding a new layer on top: Escape layer to allow sequential reading...Adding a new layer on top: Strong encryption object...Adding a new layer on top: compression...Adding a new layer on top: scrambler object...Adding an explicit escape sequence of type seqt_not_a_sequence is forbiddenAdding cache layer over pipe to provide limited skippability...Adding only inode metadata to archive: %SAdding reference to files that have been destroyed since reference backup...All files asked could not be restoredAll layers have been created successfullyAn empty string is not a valid group nameAn empty string is not a valid user nameArchive %S requires a password: Archive basename (or extracted catalogue basename) to add: Archive delayed termination engagedArchive fast termination engagedArchive format older than "08" (release 2.4.0) cannot be read through a single pipe. It only can be read using dar_slave or normal plain file (slice)Archive internal catalogue is not identically signed as the archive itself, this might be the sign the archive has been compromisedArchive internal catalogue is properly signed but its content does not match the tape marks used so far for sequentially reading. Possible data corruption or archive compromission occurred! if data extracted in sequential read mode does not match the data extracted in direct access mode, consider the sequential data has been been modified after the archive has been generatedArchive is closed.Archive is composed of %i file(s)Archive is signed : %SArchive number to modify: Archive number to move: Archive number to remove: Archive number who's path to modify: Archive number: Archive of reference is not signed properly (no the same signatories for the archive and the internal catalogue), do we continue?Archive on stdout is not compatible with slicing (-s option)Archive repairing completed. WARNING! it is strongly advised to test the resulting archive before removing the damaged oneArchive signature is only possible with gnupg encryptionArchive size is unknown (reading from a pipe)Archive size is: %i bytesArchive total size is : %i bytesArchive version format : %sArchive version too high, use a more recent version of libdarAre you sure to remove archive number %d ?Argument given to -H is not a positive integer numberArguments read from %S :Arguments to -r must be relative path (never begin by '/')Asking for an element out of arrayAsymmetric key encryption used : %SAt least one slice of an old archive with the same name remains in the directory %s. It is advised to remove all the old archive's slices before creating an archive of same name. Can I remove these old slices?BROKEN Bad CRC, data corruption occurredBad answer from peer, while closing connectionBad formatted date expressionBadly formated octal numberBadly formatted SAR header (unknown TLV type in slice header)Badly formatted databaseBadly formatted terminator, cannot extract catalogue location: Badly formed "infinint" or not supported formatBirth Time attribute cannot be restored for %s because no FSA familly able to carry that attribute could be activated at compilation time.Both reference archive are nullptr, cannot merge archive from nothingBuilding the catalog object...CATALOGUE CONTENTS :CAUGHT A NON (LIB)DAR EXCEPTIONCRC difference concerning file's dataCRC error detected while reading EACRC error detected while reading FSACRC error met while reading delta signature: data corruption.CRC error: data corruption.CRC failed for the catalogueCalculating delta signature from filesystem: Calculating the signature of the catalogue hash...Can't read saved data.Cancellation no confirmedCannot add an absolute pathCannot allocate memory for buffer while reading %SCannot block signals: Cannot calculate delta signature when merging if keep compressed is askedCannot cleanly abort the operation, thread-safe support is missing, will thus abruptly stop the program, generated archive may be unusableCannot compare EA: EA support has not been activated at compilation timeCannot compare files in write only modeCannot convert negative floating point value to unsigned (positive) integerCannot convert username to uid in statically linked binary, either directly provide the UID or run libdar from a dynamically linked executableCannot create database, file existsCannot create inode: Cannot dup() filedescriptor while copying "fichier_local" object: %sCannot evaluate this crit_and criterium as no criterium has been added to itCannot evaluate this crit_or criterium as no criterium has been added to itCannot extract from the internal catalogue the list of files to removeCannot flush read a write-only generic_fileCannot get catalogue of reference as it has not been providedCannot get data name of the archive, this archive is not completely initializedCannot get effective permission given a file descriptor: Cannot get ext2 attributes (and nodump flag value) for %S : %sCannot get file size: %sCannot get full path of current working directory: Cannot get inode information about file to remove Cannot get inode information for %s : %sCannot get inode information for %s: %sCannot get last modification date: %sCannot get size of %S: not a plain fileCannot handle such a too large integer. Use a full version of libdar (compiled to rely on the "infinint" integer type) to solve this problemCannot open catalogue: Cannot open destination file : %sCannot open file : Cannot open source file : %sCannot read "fcntl" file's flags : Cannot read arguments on command line, abortingCannot read directory contents: %s : Cannot read file information for %s : %sCannot read inode for Cannot read on inputCannot receive that much data in regard to the allocated memoryCannot recurs in a non directory entryCannot reduce the string to a size that is larger than its current sizeCannot remove directory Cannot remove file %s: %sCannot remove non-existent file from filesystem: %SCannot remove nonexistent entry %S from catalogueCannot restore a delta binary patch without a file to patch on filesystemCannot restore a inode metadata only without an existing file on filesystemCannot restore any file, nothing doneCannot restore delta diff for %S as exsiting inode is not a plain fileCannot restore file %S : non existent file in databaseCannot restore permissions of %s : %sCannot seek back one charCannot seek in fileCannot sequentially read an archive of reference when delta signature or delta patch is requestedCannot set "fcntl" file's flags : Cannot set birth time: Cannot set last access and last modification time: Cannot skip at end of fileCannot skip forward to restore a holeCannot skip to a valid position in fileCannot skip to the current position in "tronc"Cannot sync write on a read-only generic_fileCannot unblock signals: Cannot write down a read-only databaseCannot write down the archive: Cannot write on outputCannot write to destination, abortingCannot write to outputCatalogue computed hash does not match the signed hash of the archive, archive has been modified since it was signed!Catalogue size in archive : %i bytesCatalogue size in archive : N/ACaught a none libdar exceptionCaught an unknown Egeneric exception: Changing database header information...Changing database information...Checking chronological ordering of files between the archives...Checking date ordering of files between archives...Checking file's dates ordering...Choose only one compression algorithmClearing file's EA (requested by overwriting policy): Closing archive low layer...Closing the compression layer...Closing the encryption layer...Closing the escape layer...Commands are: Commands: Common options: Communication problem with peer, retry ?Compressing and writing back database to file...Compression algorithm used : %SCompression level must be between 1 and 9, includedCompression option (-z option) is useless and ignored when using -ak optionCompression_level must be between 1 and 9 includedComputed EA CRC for file %S differs from what was stored in the archive, this file's EA may have been corruptedComputed FSA CRC for file %S differs from what was stored in the archive, this file's EA may have been corruptedComputing statistics...Conflict found while selecting the file to retain in the resulting archive:Considering cyphering layer...Considering date as if it was zero (Jan 1970)Considering the (first) archive of reference:Considering the second (alias auxiliary) archive of reference:Consistency check failed for archive headerContinue listing archive contents?Continue? Continuing the action under process which is to exit... so we exit!Continuing...Copied data does not match CRCCopied data does not match expected CRCCopy finished. Missing %.0f byte(s) of data Copying delta signature structure from the archive of reference: Corrupted data read from pipeCorrupted data read on pipeCorrupted database :Corrupted entry following an escape mark in the archiveCorruption met while reading header_version data structureCould not create inode: Could not find tape mark for the internal catalogueCould not open directory %s in furtive read mode (%s), using normal modeCould not restore original file ownership: Could read a catalogue data structure at offset %i, it contains the following:Creating a new thread to run the compression layer...Creating a new thread to run the escape layer...Creating a new thread to run the previously created layers...Creating file...Creating low layer: Writing archive into a black hole object (equivalent to /dev/null)...Creating low layer: Writing archive into a plain file object...Creating low layer: Writing archive into a sar object (Segmentation and Reassembly) for slicing...Creating low layer: Writing archive into standard output object...Crypto block size must be greater than 10 bytesCurrent Working Directory cannot be a relative pathCurrent implementation of blowfish encryption is not compatible with old (weak) implementation, use dar-2.3.x software or later (or other software based on libdar-4.4.x or greater) to read this archiveCyphering algorithm not available in libgcrypt: %s/%sDAR sub-process has terminated with exit code DAR terminated upon signal reception: DIFF DataData corruption may have occurred, cannot decrypt dataData corruption met at end of slice, forbidden flag found at this positionData corruption met at end of slice, unknown flag foundData corruption or unknown sparse_file mark found in file's dataData full saved : %S %SData more recent : %S %SData of file %S from first archive has been preserved from overwritingData of file %S is about to be %S, proceed?Data of file %S taken from the first archive of reference has been overwrittenData of file %S taken from the first archive of reference has been removedData should be read-onlyData size : %i %iDatabase has been successfully created empty.Database not saved, Do you really want to quit ?Dates are not increasing for all files when database's archive number grows, working with this database may lead to improper file's restored version. Please reorder the archive within the database in the way that the older is the first archive and so on up to the most recent archive being the last of the databaseDates of file's %S are not increasing when database's archive number grows. Concerned file is: %SDecompressing and loading database header to memory...Decompressing and loading database to memory...Decremental mode is useless when merging is not applied to both an archive of reference and an auxiliary archive of reference. Ignore decremental mode and continue?Delta saving file to archive: Delta signature do not have the same size: %i <--> %iDelta signature do not matchDelta signature have the same size but do not matchDetected Cache Directory Tagging Standard for %s, the contents of that directory will not be savedDirectory %S cannot be restored: overwriting not allowed and a non-directory inode of that name already exists, all files in that directory will be skipped for restoration:Directory component in %S does not exist or is a dangling symbolic link: Dirty file : %S %SDisabling signal handler, the next time this signal is received the program will abort immediatelyDivision by zeroDo you want to ignore the same type of error for other files?Do you want to use it for the operation?Due to data corruption, it is not possible to know if slice %S is the last slice of the archive or not. I need your help to figure out this. At the following prompt please answer either one of the following words: "last" or "notlast" according to the nature of this slice (you can also answer with "abort" to abort the program immediately): Dumping delta signature structure for saved file: EA and FSA of file %S are about to be %S, proceed?EA and FSA of file %S from first archive have been updated with those of same named file of the auxiliary archiveEA for %S are about to be merged, OK?EA for %S are about to be overwritten, OK?EA for %S are about to be removed, OK?EA for %S have not been cleared as requested by the overwriting policy because this file is a hard link pointing to an already restored inodeEA for %S have not been overwritten because this file is a hard link pointing to an already restored inodeEA full saved : %S %SEA more recent : %S %SEA number : %i %iEA of file %S from first archive have been dropped and marked as already savedEA of file %S from first archive have been removedEA of file %S from first archive have been updated with those of the same named file of the auxiliary archiveEA of file %S has been overwrittenEA of file %S has been overwritten and marked as already savedEA size : %i %iERR ERR Either archives in database are not properly tidied, or file last modification date has been artificially set to an more ancient date. This may lead improper Extended Attribute restoration for inode %SEmpty string as subdirectory does not make a valid pathEmpty string given as argument to tools_readlink()Empty string is an invalid archive basenameEmpty string is not a valid pathEnter each argument line by line, press return at the end Entry information CRC failureEntry information CRC failure for %SEntry information CRC failure for %S. Ignore the failure?Entry information: "in place" "to be added"Entry type : %s %sError code %d to message conversion failedError creating Unix socket file: Error creating archive layers: Error creating data buffer overlay for GPGME: %sError creating hard link %s : %s Trying to duplicate the inodeError creating hard link : %s , the inode to link with [ %s ] has disappeared, re-creating itError creating hard link : %s , the inode to link with [ %s ] is not present, cannot restore this hard linkError during user command line execution: Error found while looking fo GID of group %s: %SError found while looking for GID of group %s: %SError found while looking for UID of user %s: %SError getting file reading position: Error getting size of file: Error in included file (%s): Error met building the rsync hash table: Error met during network transfer: Error met while asking libcurl to consider ~/.netrc for authentication: %sError met while assigning private key file: %sError met while assigning public key file: %sError met while assigning sftp authentication methods: %sError met while checking for capability %S: %sError met while checking the resulting patched file: Error met while creating a libcurl handleError met while creating the hash file: Error met while duplicating libcurl handleError met while feeding data to librsync: Error met while fetching file size: %sError met while listing FTP/SFTP directory %sError met while opening %S: Error met while opening the last slice: %S. Trying to open the archive using the first slice...Error met while passing username to libcurl: %sError met while preparing directory listing: %sError met while processing operation: Error met while reading a block of data: %sError met while reading archive of reference slicing layout, ignoring this field and continuingError met while reading line %S from file %S: %SError met while reading next entry: Error met while removing file %SError met while removing file %S: %sError met while resetting URL to handle: %sError met while retrieving current time: %SError met while setting capability %S: %sError met while setting known_hosts file: %sError met while setting libcurl authentication: %sError met while setting libcurl for reading data file: %sError met while setting libcurl for writing data file: %sError met while setting up connection for file %S removal: %sError met while setting verbosity on handle: %sError opening %S : Error opening directory in furtive read mode: Error opening directory: Error opening pipe: Error performing the requested action: %SError reading EA for Error reading attribute %s of file %s : %sError reading character: Error reading database %S : Error reading directory contents: Error reading included file (%s): Error reading inode of file %s : %sError reading source file (we are at %.2f %% of data copied), trying to read further: %s Error releasing librsync job: Error removing file %s: %SError retrieving EA list for %s : %sError saving Extended Attributes for Error saving Filesystem Specific Attributes for Error transforming the archive :Error trying to run %s: %sError unlinking %S: %sError while activating libgcrypt's memory guard: %s/%sError while adding EA %s : %sError while assigning key to libgcrypt key handle (essiv): %s/%sError while assigning key to libgcrypt key handle to check password strength: %s/%sError while assigning key to libgcrypt key handle: %s/%sError while calling fork() to launch dar: Error while changing user terminal properties: Error while comparing archive with filesystem: Error while considering file Error while converting UID/GID to string for backup hook file: Error while creating ESSIV handle: %s/%sError while creating anonymous pipe: Error while creating hash handle: %s/%sError while cyphering data: %s/%sError while decyphering data: %s/%sError while derivating key from password (HMAC open): %s/%sError while derivating key from password (HMAC set key): %s/%sError while fetching archive properties: Error while fetching hostname: Error while fetching information for %S: Error while generating IV: %s/%sError while initializing "mutex" for class "statistics": Error while initializing hash: Hash algorithm not available in libgcrypt: %s/%sError while listing archive contents: Error while opening libgcrypt key handle to check password strength: %s/%sError while opening libgcrypt key handle: %s/%sError while opening the archive of reference: Error while parsing --min-digits option: Error while parsing -A argument as a date: Error while reading CRC for EA from the archive: No escape mark found for that fileError while reading CRC for FSA from the archive: No escape mark found for that fileError while reading CRC of delta signature data. Data corruption occurredError while reading FSA: Error while reading archive's header, this may be because this archive is an old encrypted archive or that data corruption took place, Assuming it is an old archive, we have to read the header at the beginning of the first slice...Error while reading data for a secure memory:Error while reading from file: Error while reading from pipe: Error while reading user answer from terminal: Error while removing %s : %sError while resetting encryption key for a new block: %s/%sError while restoring Error while restoring data: Error while restoring the following files: Error while retrieving delta signature from the archive: Error while saving Error while saving data: Error while seeking in file on remote repository: %sError while setting IV for current block: %s/%sError while setting file permission: %sError while setting file user ownership: %sError while setting write append mode for libcurl: %sError while telling libgcrypt that initialization is finished: %s/%sError while testing archive: Error while writing data to pipe: Error while writing to file: Error! SHA1 not available in libgcrypt: %s/%sError, catalogue of reference has not been providedError, invalid passphrase given, try again:Error, while sending the passphrase to GPGME:Escape sequences used for reading lead the archive to place some files out of the specified root. To overcome this problem, try reading the archive in direct mode (not using sequential reading), try repairing the archive using Parchive if redundancy data has been created or in last resort try using the lax modeEscaping...Exception caught from archive_listing_callback executionException caught from archive_listing_callback execution: %sExisting EA for %S could not be preserved : Existing EA for %S could not be read and preserved: Existing FSA for %S could not be preserved : Existing FSA for %S could not be read and preserved: Existing file is of a different nature, cannot only restore inode metadataExtended Attribute of file found in database but impossible to restore (only found "unchanged" in differential backups): FALSEFATAL error during network communication, aborting operation: FATAL error from operating system, aborting operation: FATAL error, aborting operation: FSA familly : %S %SFSA for %S are about to be overwritten, OK?FSA for %S have not been overwritten because this file is a hard link pointing to an already restored inodeFSA full saved : %S %SFSA of file %S from first archive have been dropped and marked as already savedFSA of file %S from first archive have been removedFailed assigning URL to libcurl: %sFailed creating GPGME context: Failed creating slice %S: Failed driving dar_slave to the end of archive: %S. Trying to open the archive from the first bytesFailed opening the cache layer, lack of memory, archive read performances will not be optimizedFailed reading %S: Failed reading CRC for EA and FSA: Failed reading data CRC for %S, file may be damaged and will be marked dirtyFailed reading existing extX family FSA: Failed reading inode information for %s: Failed reading slice %S: Failed retreiving passphraseFailed retrieving from libgcrypt the block size used by the cyphering algorithm (essiv): %s/%sFailed retrieving from libgcrypt the block size used by the cyphering algorithm: %s/%sFailed retrieving from libgcrypt the key length to use (essiv key): %s/%sFailed retrieving from libgcrypt the maximum key lengthFailed sending CR after the passphraseFailed sending the totality of the passphrase to GPGMEFailed set extX family FSA: Failed setting (opening) extX family FSA: Failed setting GPGME context with OpenPGP protocol: Failed setting storage size to zero for this file with missing data CRC, CRC error will be reported for that file while reading the repaired archiveFailed to open %S while checking for nodump flag: %sFailed writing down the hash: Fatal error on user command line: Fetching EA value while listing an archive is not possible in sequential read modeFile %S has changed during backup and is probably not saved in a valid state ("dirty file"), do you want to consider it for restoration anyway?File %S line %dFile found in database but impossible to restore (only found "unchanged" in differential backups, or delta patch without reference to base it on in any previous archive of the base): File had changed during backup and had been copied another time, restoring the next copy of file: File has disappeared while we were reading it, cannot check whether it has changed during its backup: %SFile inclusion loop detected. The file %s includes itself directly or through other files (-B option)File mode is neither read nor writeFile must be a directory: File not found in database: File offset too large to be stored in off_t typeFile ownership will not be restored du to the lack of privilege, you can disable this message by asking not to restore file ownershipFile recorded as removed at this date in database: File selection has been aborted. Now building the resulting archive with the already selected filesFile size : %i bytesFile size too smallFile size: %i bytesFile the patch is about to be applied to is not the expected one, aborting the patch operationFile to look for: Filename provided by the operating system seems truncated in directory %s, storing filename as is: %sFilesystem Specific Attribute are missingFinal memory cleanup...Finished Inspecting directory %S , saved %S%SFinished writing to file First file size : %i bytesFirst file size too smallFirst slice size is too small to even just be able to drop the slice headerFor your information, this is the iteration %d for which the randomly generated key is reported to be weak by libgcrypt, continuing generating another random key... patienceFormatting file as an empty database...Found 'Found a correct archive header at the beginning of the archive, which does not stands to be an old archive, the end of the archive is corrupted and thus the catalogue is not readable, aborting. Either retry providing in addition an isolated catalogue of that archive to perform the operation, or try reading the archive in sequential mode or try in lax mode or, last chance, try both lax and sequential read mode at the same timeFound a correct archive header at the beginning of the archive, which does not stands to be an old archive, the end of the archive is thus corrupted. Without external catalogue provided and as we do not read the archive in sequential mode, there is very little chance to retreive something from this corrupted archive. Do we continue anyway ?Found negative date (%s) for inode %s .Furtive read modeFurtive read mode requires either root permission and FOWNER capability, falling back to normal filesystem readGPGME engine not available: %sGPGME version requirement is not satisfied, requires version > %sGenerating random key for symmetric encryption...Given date must be in the pastGiven user target(s) could not be found: %SGiving -S option the same value as the one given to -s is uselessGiving to -S option the same value as the one given to -s option is uselessHard links of directories are not supportedHow much line to display at once: INTERNAL ERROR, PLEASE REPORT THE PREVIOUS OUTPUT TO MAINTAINERIf you really want to abort the archive creation hit CTRL-C, then press enter.Ignore previous error on user command line and continue ?Ignoring extra arguments on command lineIgnoring file with NODUMP flag set: Ignoring unknown option -%cIn included file %S: In which position to insert this archive: Incoherent answer from peerIncoherent catalogue structure: duplicated hard linked inode's dataIncoherent catalogue structure: hard linked data is not an inodeIncoherent catalogue structure: hard linked inode's data not foundIncoherent catalogue structure: unknown status flag for hard linked inodeIncoherent slice header: First slice size too smallIncoherent structure in data carrying sparse files: unknown markIncomplete labelIncorrect archive range in databaseIncorrect day of monthIncorrect hourIncorrect minuteIncorrect monthIncorrect secondIndex out of rangeInitialization problem for liblzo2 libraryInput cannot be readInspecting directory Integer too large for libcurl, cannot skip at the requested offset in the remote repositoryInvald nullptr argument given to 'ptr'Invalid Cipher textInvalid archive number: Invalid archive number: %dInvalid argument as destination archiveInvalid argument as source archiveInvalid argument given to -T option, expecting [:]Invalid divisor used for delta signature block len calculationInvalid hash algorithm provided to -T opton: %sInvalid nullptr argument given to 'ptr'Invalid nullptr pointer given to close_archiveInvalid number in string: %SInvalid number: Invalid path, path must be relativeInvalid size for CRC widthInvalid size for option -SInvalid size for option -sInvalid wide-char found in string: Is directory : %S %SIs hard linked : %S %SIs inode : %S %SIs plain file : %S %SIsolation with delta signature is not possible on a just created archive (on-fly isolation)Iterator does not point to dataKDF hash algorithm : %SKDF iteration count : %SKey cannot be an empty stringKey found but users are not all trustedKey generatedLAX MODE: "%S" is not a valid archive formatLAX MODE: %S has a bad or corrupted header, trying to guess original values and continuing if possibleLAX MODE: %S is missing, You have the possibility to create a zero byte length file under the name of this slice, to replace this missing file. This will of course generate error messages about the information that is missing in this slice, but at least libdar will be able to continue. Can we continue now?LAX MODE: %i %% remainingLAX MODE: %i is not a valid percent valueLAX MODE: Archive directory structure is corrupted, it would lead to place some files out of the specified root directory. Restoring different directory contents at the root not out of it, which will put files of different directories in the specified root directoryLAX MODE: Archive is flagged as having escape sequence (which is normal in recent archive versions). However if this is not expected, shall I assume a data corruption occurred in this field and that this flag should be ignored? (If unsure, refuse)LAX MODE: Archive is flagged to not have escape sequence which is not the case by default since archive format 8 (release 2.4.x). If corruption occurred and an escape sequence is present, this may lead data restoration to fail, answering no at this question will let me consider that an escape sequence layer has to be added in spite of the archive flags. Do you want to continue as suggested by the archive flag, thus without escape sequence layer?LAX MODE: Archive seems to be ciphered, but you did not have provided any encryption algorithm, assuming data corruption and considering that the archive is not cipheredLAX MODE: Archive seems to be only an isolated catalogue (no data in it), Can I assume data corruption occurred and consider the archive as being a real archive?LAX MODE: Beginning search of the catalogue (from the end toward the beginning of the archive, on %i %% of its length), this may take a while...LAX MODE: CRC failed for catalogue, the archive contents is corrupted. This may even lead dar to see files in the archive that never existed, but this will most probably lead to other failures in restoring files. Shall we proceed anyway?LAX MODE: Cannot skip at the end of the archive! Using current position to start the catalogue searchLAX MODE: Caught exception: LAX MODE: Could not find a whole catalogue in the archive. If you have an isolated catalogue, stop here and use it as backup of the internal catalogue, else continue but be advised that all data will not be able to be retrieved...LAX MODE: Do you want to bypass some sanity checks and try again reading the archive contents (this may take some time, this may also fail)?LAX MODE: Do you want to try finding portions of the original catalogue if some remain (this may take even more time and in any case, it will only permit to recover some files, at most)?LAX MODE: Due to probable data corruption, dar could not determine the correct size of slices in this archive. For recent archive, this information is duplicated in each slice, do you want to try opening another slice to get this value if present?LAX MODE: Error met building a catalogue entry, skipping this entry and continuing. Skipped error is: LAX MODE: Escape sequence could not be found, it may have been corrupted or out of the scanned portion of the archive, trying to find the catalogue the other wayLAX MODE: Escape sequence seems present in this archive. I have thus two different methods, either I look for the escape sequence indicating the start of the catalogue or I try each position in turn in the hope it will not be data that look like a catalogueLAX MODE: Failed to read the archive header's format version.LAX MODE: Failed to read the catalogueLAX MODE: Failed to read the catalogue (0 bytes of the archive length asked to look for the catalogue)LAX MODE: Failed to read the catalogue (no data to inspect)LAX MODE: Good point! I could find the escape sequence marking the beginning of the catalogue, now trying to read it...LAX MODE: In spite of a the absence of a known slice size, continuing anywayLAX MODE: In spite of its name, %S does not appear to be a dar slice, assuming a data corruption took place and continuingLAX MODE: Please provide an strictly positive integer numberLAX MODE: Please provide the archive format: You can use the table at %s to find the archive format depending on the release version, (for example if this archive has been created using dar release 2.3.4 to 2.3.7 answer "6" without the quotes here): LAX MODE: Please provide the slice number to read: LAX MODE: Reached the end of the area to scan, FAILED to find any catalogueLAX MODE: Slice flag corrupted, but a slice of higher number has been seen, thus the header flag was surely not indicating this slice as the last of the archive. ContinuingLAX MODE: The catalogue (table of contents) usually takes a few percents of the archive at its end, which percentage do you want me to scan (answer by an *integer* number between 0 and 100)? LAX MODE: The end of the archive is corrupted, cannot get the archive contents (the "catalogue")LAX MODE: Trying to locate the escape sequence (safer choice) ?LAX MODE: Unexpected saved status for class "cat_detruit" object, assuming data corruption occurred, ignoring and continuingLAX MODE: Unexpected saved status for end of directory entry, assuming data corruption occurred, ignoring and continuingLAX MODE: Unknown compression algorithm used, assuming data corruption occurred. Please help me, answering with one of the following words "none", "gzip", "bzip2", "lzo" or "xz" at the next prompt:LAX MODE: Unknown data in slice header, ignoring and continuingLAX MODE: Using archive format "%d"?LAX MODE: catalogue computed hash does not match the signed hash of the archive, ignoringLAX MODE: catalogue label does not match archive label, as if it was an extracted catalogue, assuming data corruption occurred and fixing the catalogue to be considered an a plain internal catalogueLAX MODE: closing slice %i, header properly fetchedLAX MODE: first slice size is not possible to read, (lack of virtual memory?), continuing anyway...LAX MODE: first slice size is not possible to read, continuing anyway...LAX MODE: found unknown catalogue entry, assuming data corruption occurred, cannot read further the catalogue as I do not know the length of this type of entryLAX MODE: found unknown catalogue entry, assuming data corruption occurred. Skipping to the next entry, this may lead to improper directory structure being restored, if the corrupted data was a directoryLAX MODE: internal name of the slice leads dar to consider it is not member of the same archive. Assuming data corruption occurred and relying on the filename of this slice as proof of its membership to the archiveLAX MODE: opening slice %i to read its slice headerLAX MODE: slice size is not possible to read, (lack of virtual memory?), continuing anyway...LAX MODE: slices %i and %i are both recorded as last slice of the archive, keeping the higher number as the real last sliceLAX MODE: the requested sequential read mode relies on escape sequence which seem to be absent from this archive. Assuming data corruption occurred. However, if no data corruption occurred and thus no escape sequence are present in this archive, do not use sequential reading mode to explore this archive else you will just get nothing usable from itLack of MemoryLack of SECURED memory to achieve the operation, aborting operationLack of Secured MemoryLack of memory to achieve the operation, aborting operationLast file size : %i bytesLibrary used for blowfish encryption does not respect RFC 3962Listing options (to use with -l): Loading catalogue into memory...Loading isolated catalogue in memory...Locating archive contents...Long options support : %s Looking in archives for requested files, classifying files archive by archive...Making room in memory (releasing memory used by archive of reference)...Mask_list's prefix must be an absolute path or start with "" string for archive mergingMemory allocation failed : %sMerging/filtering files from the %s archive...Missing -c -x -d -t -l -C -+ option, see `%S -h' for helpMissing [ after } in conditional statement: Missing ] in conditional statement: Missing archive basename, see -h option for helpMissing argument to --backup-hook-executeMissing argument to --deltaMissing argument to --hashMissing argument to --min-digitsMissing argument to -EMissing argument to -E optionMissing argument to -FMissing argument to -SMissing argument to -^Missing argument to -i optionMissing argument to -o optionMissing argument to -sMissing argument to command line, abortingMissing catalogue in file.Missing data for encrypted symmetrical keyMissing data to initiate a TLV objectMissing data while reading slice_layout objectMissing hashing algorithms support (which is part of strong encryption support, using libgcrypt)Missing parameter to option -%cMissing source or destination argument on command line, see -h option for helpMissing strong encryption support (libgcrypt)Missing } in conditional statement: NONO FILE IN THAT DIRECTORY CAN BE SAVED.NOT YET IMPLEMENTED FEATURE has been used: Need to activate sparse file detection in order to calculate delta signature for sparse file %SNegative number or zero not allowed when moving an archive inside a databaseNew basename for archive number %d: New database name: New path to give to archive number %d: No Filesystem Specific Attribute to compare withNo action specified, abortingNo backup file is present in %S for archive %S, please provide the last file of the set.No cyphering layer openedNo cyphering layer opened, adding cache layer for better performanceNo data found in that archive, sequentially reading the catalogue found at the end of the archive...No data to decryptNo database specified, abortingNo exception allowed from libdar callbacksNo file in this directory will be considered for merging.No file in this directory will be restored.No mask availableNo mask in the list of mask to operate onNo mask to operate on in the list of maskNo problem foundNo recipient remain with a valid key, encryption is impossible, abortingNo signatory remain with a valid key, signing is impossible, abortingNo slice layout of the archive of reference for the current isolated catalogue is available, cannot provide slicing information, abortingNo space left for inode, you have the opportunity to make some room now. When done : can we continue ?No space left on device, you have the opportunity to make room now. When ready : can we continue ?No terminal found for user interaction. All questions will be assumed a negative answer (less destructive choice), which most of the time will abort the program.No user target found on command lineNo valid encryption key could be find for %SNo valid signing key could be find for %SNon existent archive in databaseNon existent file in databaseNon existent file: Non fatal error while writing to destination file, retrying Not SavedNot a zero terminated string in fileNot enough data to initialize storage fieldNot setting FSA extX IMMUTABLE flags for %s due to of lack of capabilityNot setting FSA extX SYSTEM RESOURCE flags for %s due to of lack of capabilityNow performing on-fly isolation...OK OK OK, keeping %S as basenameOROnly one -@ option is allowedOnly one -A option is allowedOnly one -R option is allowedOnly one -S option is allowedOnly one -s option is allowedOpening a pair of pipes to read the archive, expecting dar_slave at the other ends...Opening and reading the batch file...Opening archive %s ...Opening construction layer...Opening cyphering layer...Opening escape sequence abstraction layer...Opening named pipe %S as input to read the archive...Opening standard input to read the archive...Opening the archive of reference %s to retreive the isolated catalog ... Opening the archive using the multi-slice abstraction layer...Opening the compression abstraction layer (compression algorithm used is none)...Opening the compression layer...Options: Out of range index requested for a secu_stringOverwriting not allowed while a slice of a previous archive with the same basename has been found in the %s directory, Operation abortedParse error on command line (or included files): Parse error: Parse error: Unmatched `%c'Partial request received, aborting Passphrase required for key %s :Patch data does not match its CRC, archive corruption took placePath to dar (empty string to use the default from PATH variable): Pipe's other end is not known, cannot close any filedescriptor pointing on itPipe's other end is not known, cannot provide a filedescriptor on itPlease answer by the character between brackets ('[' and ']') and press returnPlease check documentation or upgrade your software if availablePlease check your hardwarePlease confirm your password: Please provide the password for login %S at host %S: Previous archive size does not include headers present in each sliceProbable bug in liblzo2: lzo1x_*_compress returned unexpected code %dProcessing files for backup...Processing files for fixing...Processing files for merging...Program has been aborted for the following reason: Reached End of File while reading archive header_version data structureReached End of File while reading archive versionReached End of File, no correct data could be found after the last error Reached end of file before all data could be readReached end of file while reading slice headerReached premature end of file while reading archive versionReading a write only generic_fileReading ahead a write only generic_fileReading archive contents...Reading catalogue of the archive to add...Reading options (to use with -x, -d, -t, -l, -A) Reading the archive header...Reading the archive of reference from pipe or standard input is not possibleReading the archive trailer...Ready to start writing down the archive?Received signal: %dReceived signal: %sReceived unknown special orderRecording hard link into the archive: Reference storage of the iterator is empty or non existentRemoving existing immutable flag in order to restore data for %SRemoving file %sRemoving file (reason is file recorded as removed in archive): %SRemoving file (reason is overwriting policy): %SRemoving information from the database...Removing the dirty file %SReplacing %s in the -R option by the directory pointed to by this symbolic link: Resaving file without compression: Resetting the sequential reading process of the archive contents while it is not finished, will make all data unread so far becoming inaccessibleRestoration of EA for %S aborted: Restoration of FSA for %S aborted: Restoration of linux immutable FSA for %S aborted: Restoring file's EA: Restoring file's FSA: Restoring file's data using a delta patching: Restoring file's data: Restoring linux immutable FSA for Restoring options (to use with -x) : Running batch file from a batch file is not allowedSANITY CHECK: AT LEAST ONE THREAD_CANCELLATION OBJECT HAS NOT BEEN DESTROYED AND REMAINS IN MEMORY WHILE THE PROGRAM REACHED ITS ENDSECURITY WARNING! SUSPICIOUS FILE %S: ctime changed since archive of reference was done, while no other inode information changedSEEMS TO BE A HARDWARE PROBLEM: SKIPPED (hard link in sequential read mode): Salt size : %d byte%cSame data but CRC value could not be verified because we did not guessed properly its width (sequential read restriction)Same data but stored CRC does not match the data!?!SavedSaving Extended Attributes for Saving Filesystem Specific Attributes for Saving/Isolation/merging/repairing options (to use with -c, -C, -+ or -y): Scrambling is a very weak encryption algorithm, this is a non-sens to use with asymmetric encryptionSecured string can only be read from a terminalSee man page for more options. Sequential read asked, but this archive is flagged to not have the necessary embedded escape sequences for that operation, abortingSequential reading marks : %sSequential reading of the archive of reference is not possible when delta difference is requested, you need to read the archive of reference in direct access mode (default mode)Size too large for an elastic bufferSkipping done (missing %.0f byte(s)), found correct data to read, continuing the copy... Slice %S has an unknown flag (neither terminal nor non_terminal file).Slice size is too small to even just be able to drop the slice headerSlicing (-s option), is not compatible with archive on standard output ("-" as filename)Some file comparisons failedSome files are corrupted in the archive and it will not be possible to restore themSome files do not follow chronological order when archive index increases withing the database, this can lead dar_manager to restored a wrong version of these filesSorry, file size is unknown at this step of the program.Sparse file : %S %SStarting the copy of %u byte(s)Subtracting an "infinint" greater than the first, "infinint" cannot be negativeSymmetric key encryption used : %SSyntax error in --retry-on-change argument: Syntax error in batch file: %SSyntax error in batch file: -C option not allowedSyntax error in batch file: -i option not allowedSyntax error in overwriting policy: TRUEThe Cache Directory Tagging Standard is only useful while performing a backup, ignoring it hereThe archive %S is encrypted and no encryption cipher has been given, cannot open archive.The archive and the isolated catalogue do not correspond to the same data, they are thus incompatible between themThe basename '+' is reserved for special a purpose that has no meaning in this contextThe catalogue will be filled while sequentially reading the archive, preparing the data structure...The entry to recurs in does not exist, cannot add further entry to that absent subdirectoryThe following user comment will be placed in clear text in the archive: %SThe format version of the archive is too high for that software version, try reading anyway?The format version of this database is too high for that software version, use a more recent software to read or modify this databaseThe given path %s must be a directory (or symbolic link to an existing directory)The global data compression ratio is: The iterator is not indexing the object it has been asked to read fromThe iterator is not indexing the object it has been asked to write toThe iterator is not indexing the object it has been defined forThe iterator position is not inside the storage of referenceThe just restored file %S has been marked as dirty (sequential reading can only detect the dirty status after restoration), do we remove this just restored dirty file?The just restored file %S has been marked as dirty (sequential reading can only detect the dirty status after restoration), removing the just restored dirty file as it is asked to ignore this type of fileThe last file of the set is not present in The overall archive size includes %i byte(s) wasted due to bad compression ratioThe snapshot backup (-A +) is only available with -c option, ignoringThe two passwords are not identical. AbortingThis archive contains an isolated catalogue, it cannot be used for this operation. It can only be used as reference for a incremental/differential backup or as backup of the original archive's catalogueThis archive has slices and is not possible to read from a pipeThis archive is not exploitable, check documentation for moreThis archive is not exploitable, check the archive class usage in the API documentationThis archive is not single sliced, more data exists in the next slices but cannot be read from the current pipe, abortingThis database has been corrupted probably due to a bug in release 2.4.0 to 2.4.9, and it has not been possible to cleanup this corruption, please rebuild the database from archives or extracted "catalogues", if the database has never been used by one of the previously mentioned released, you are welcome to open a bug report and provide as much as possible details about the circumstancesThis is an old archive, it can only be opened starting by the first sliceThread cancellation requested, aborting as properly as possibleThread cancellation requested, aborting as soon as possibleTo calculate delta signatures of files saved as sparse files, you need to activate sparse file detection mechanism with merging operationTo terminate enter an empty line To use --sparse-file-min-size while merging archive, you need to use -ah option too, please check man page for detailsToo large block of compressed data: Either due to data corruption or current system limitation where SSIZE_MAX value implied smaller buffers than requiredToo large value give for key derivation interation countToo many argument on command line, see -h option for helpToo much line in file %S (integer overflow)Too old version for libgcrypt, minimum required version is %sTried to write out of size limited fileTwo different slices (%i and %i) are marked as the last slice of the backup!Type "man dar" for more details and for all other available options. Unbalanced parenthesis in expression: Uncomplete answer received from peerUncompleted archive! Assuming it has been interrupted during the backup process. If an error has been reported just above, simply ignore it, this is about the file that was saved at the time of the interruption.Unexepected value for boolean FSA, data corruption may have occurredUnexpected answer from slave, communication problem or bug may hang the operationUnexpected end of fileUnexpected error reported by GPGME: Unexpected error while waiting for dar to terminate: Unexpected exception from libdarUnexpected value found in databaseUnexpected value while reading archive versionUnknown TLV record typeUnknown argument given to -2 : %sUnknown argument given to -a : %sUnknown argument given to -k : %sUnknown argument given to -w: Unknown atomic operator, or atomic not allowed with an argument: Unknown character found while parsing conditional string: Unknown choice Unknown choice: Unknown crypto algorithmUnknown encryption algorithmUnknown entry found in slice header (type = %d), option not supported. The archive you are reading may have been generated by a more recent version of libdar, ignore this entry and continue anyway?Unknown escape sequence typeUnknown expression found while parsing conditional string: Unknown expression in overwriting policy: Unknown file type! file name is: Unknown groupUnknown header option in database, aborting Unknown macro %%%d in user commentUnknown option -%cUnknown parameter given to --delta option: Unknown parameter given to --hash option: Unknown parameter given to --modified-data-detection option: Unknown parameter given to -a option: Unknown policy for EA '%c' in expression %SUnknown policy for data '%c' in expression %SUnknown protocol: %SUnknown record typeUnknown substitution string: %Unknown suffix [%c] in string %SUnknown time unitUnknown userUpdating database with catalogue...User Decision requested for EA of file %SUser Decision requested for FSA of file %SUser Decision requested for data of file %SUser comment : %SUser target found on command line or included file(s):User target named "%s" is not allowed (reserved word for conditional syntax)Using sequential reading mode for archive source is not possible for merging operationUsing user provided modified slicing (first slice = %i bytes, other slices = %i bytes)WARNING! File modified while reading it for backup, but no more retry allowed: WARNING! File modified while reading it for backup. No more retry for that file to not exceed the wasted byte limit. File is WARNING! File modified while reading it for backup. Performing retry %i of %iWARNING! Incorrect signature found for archive, continue anyway?WARNING! The archive is located in the directory to backup, this may create an endless loop when the archive will try to save itself. You can either add -X "%S.*.%S" on the command line, or change the location of the archive (see -h for help). Do you really want to continue?WARNING! This is an isolated catalogue, no data or EA is present in this archive, only the catalogue structure can be checkedWARNING: support for secure memory was not available at compilation time, in case of heavy memory load, this may lead the password you are about to provide to be wrote to disk (swap space) in clear. You have been warned!WARNING: support for secure memory was not available at compilation time, in case of heavy memory load, this may lead the password/passphrase provided to be wrote to disk (swap space) in clear. You have been warned!Warning! %s Filesystem Specific Attribute support have not been activated at compilation time and could not be restored for %sWarning! No file in that directory will be restored: Warning! No file in this directory will be considered for merging: Warning, %S seems more to be a slice name than a base name. Do you want to replace it by %S ?Warning, are you sure you want to abort (please answer "%S" to confirm)? Warning, the archive %S has been encrypted. A wrong key is not possible to detect, it would cause DAR to report the archive as corruptedWarning: FSA %s/%s support has not been found at compilation time, cannot restore it for inode %sWarning: Failed reading %S: Warning: libdar multi-threading is an experimental and unsupported feature, read man page about -G option for more informationWe have linked with an incompatible version of libdar. Expecting version %d.%d.x but having linked with version %d.%d.%dWhich is %.2f %% of the total amount of data WorseWriting down archive contents...Writing down archive trailer...Writing down the archive header...Writing down the first archive terminator...Writing down the initial elastic buffer through the encryption layer...Writing down the second archive terminator...Writing down the signed hash of the catalogue...Writing to a read only generic_fileYESZero is not a valid size for an elastic buffer[ ][ EA ][ Saved ][DIRTY][Data ][D][ EA ][FSA][Compr][S]| Permission | User | Group | Size | Date | filename[InRef][Inode][Saved]a binary digit is either 0 or 1abortabsentabsent an empty string is an invalid argumentappend onlyappending data after the end of a secure_memoryappending data over secure_memory its endarchive # | path | basenameatime, data access timebackup hook feature (-<, -> or -= options) is only available when saving files, ignoringbadly structured inode: unknown inode flagbadly structured inode: unknown inode flag for FSAbe advised that a CRC error will be reported for the EA of that file while sequentially reading the repaired archivebe advised that a CRC error will be reported for the FSA of that file while sequentially reading the repaired archivebigbinay delta patch : %iblock deviceblowfish strong encryption supportbuilding delta signature with block size of %d bytesbzip2can't find mark for delta signaturecan't read data CRC: No escape mark found for that filecannot evaluate an empty chain in an overwriting policycannot provide data from a "not saved" file objectcannot use 'none' as hashing algorithm for key derivation functioncase in-sensitivecase sensitivechar devicecompressReset called but compressInit never called beforecompressedcompressed data CRC errorcompressed data corruption detectedcompressed data is corruptedcompression (-z option) cannot be changed with -y optioncompression used: %Scorrupted filecreation datectime, inode change timedar options : %Sdar path : %Sdata corrupted when attempting to read delta signature block sizedata corruption detected: Incoherence in LZO compressed datadata corruption detected: Too large block of compressed datadata has been cleaned, object is now emptydatabase version: %Sdate before 1970 is not alloweddeleted entrydestroyed entries informationdevices have not the same major number: %d <--> %ddevices have not the same minor number: %d <--> %ddifference of last modification date: %S <--> %Sdifferent Extended Attributesdifferent Filesystem Specific Attributesdifferent file data, offset of first difference is: %idifferent file typedifferent owner (uid): %i <--> %idifferent owner group (gid): %i <--> %idifferent permission: %S <--> %Sdirectorydistribution of inode(s)door inodedropped from the archive and marked as already savedelastic buffer incoherent structureerror met while creating archive: %Serror restoring Unix socket %s, path too long to be stored properly, socket will be created as %s instead, do you confirm?error!execution of [ %S ] returned error code: %dexecve() failed. (process table is full ?)execvp() failed but did not returned error codefailed openning slice %S: %S. Will try to erase it first, if allowedfailed reading CRC from file: falsefile exists, and DONT_ERASE option is set.file not openfirstfully saved : %igid value is too high for this system for libdar be able to restore it properlygziphard linked inodehard links informationheader only mode askedhide_file cannot be initialized with write-only fileignoring -O option, as it is useless in this situationimmutablein file %S line %Sincoherent catalogue structureincoherent data after escape sequence, cannot read internal data set labelincoherent endian between U_16 and U_32incoherent endian between U_16 and U_64incoherent endian between U_16 and U_Iincoherent slice header: Slice size too smallincompatible compression library version or unsupported feature required from compression libraryincomplete data set name found in a slice headerinode inode last change date (ctime) greater, EA might be differentinode last change date (ctime) greater, FSA might be differentinode metadata only : %iinvalid FSA family flaginvalid FSA nature flaginvalid decimal digitinvalid hash algorithm provided for key derivation functioninvalid length for FSA family flaginvalid length for FSA nature flaginvalid number given to -9 option: %sit seems to be a bug herejournalizedlastlast char of user command-line to execute is '%', (use '%%' instead to avoid this message)libcurl does not allow skipping in write modelibcurl initialization failed: %slibgcrypt not initialized and libdar not allowed to do solibgcrypt version < %s. Ligcrypt used has a bug that leads md5 and sha1 hash results to be erroneous for files larger than 256 Gio (gibioctet), do you really want to spend CPU cycles calculating a useless hash?libthreadar required for multithreaded executionline exceeding the maximum of %d characters in listing file %S, aborting. Concerned line starts with: %slisting() method must be givenlittlelzolzo compressionmemerged with possible overwritingminimum size should be lesser or equal than maximum size when specifying delta signature block size formulamissing data to buildmissing data to build a cat_directorymissing data to build a special devicemissing data to build an inodemissing function name argument in stringmissing multiplier argument in stringmtime, data modification timenamed pipenextnono Extended Attribute to compare withno Extended Attributes to compare withno atime updateno current directory definedno current reading directory definedno dump flagno tail mergingnodump flag feature has not been activated at compilation time, it is thus not availablenonenot enough space provided to dump the elastic buffernot same size: %i <--> %inotlastnullptr argument given to tools_readlink()nullptr given as argument of user_interaction_callback()overwrittenpatch plain fileplease answer with a single characterplease indicate the hash algoritm to use for key derivation function '1' for sha1, '5' for sha512, 'm' for md5, or 'q' to abort: presentpresent protocol %S is not supported by libcurl, abortingprovided password is too long for the allocated memoryre-enabling all signal handlers and continuing reached End of File before all expected data could be readread and writeread onlyremovedremoved removed from the archivereplacedroot directory has no parent directoryroot does not have a parent directoryroot has no parent directoryroot's entrepot must be an absolute path: saved scrambling (weak encryption)secondsecu_string randomization requested exceeds storage capacitysecure deletionslicing focused output is not available in sequential-read modesoft linksymbolic link does not point to the same target: synchronous directorysynchronous updatesystem() call failed: the "Keep file compressed" feature is not possible when merging two archives using different compression algorithms (This is for a future version of dar). You can still merge these two archives but without keeping file compressed (thus you will probably like to use compression (-z or -y options) for the resulting archivetoo large elastic buffer or elastic buffer incoherent structuretoo large value provided for the min block sizetop of directory hierarchytotal number of inode : %itruetype %s is neither big nor little endian! Do not know how to handle integer in a portable manner on this host, abortinguid value is too high for this system for libdar be able to restore it properlyundeletableunexpected extra argument in stringunix socketunknown FSA family: unknown compressionunknown compression algorithm: %Sunknown cryptographic algorithm: unknown entryunknown hash algorithm corresponding to char `%c'unknown name give for delta signature block len functionunknown type of data in catalogueusage : %s usage: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] [options...] user/group ownership not supported for this repositoryvalid hash algoritm needed for key derivation functionwith -+ option, -A option is mandatorywith -C option, -A option is mandatorywrite onlywriting down the final elastic buffer through the encryption layer...wrong value given as initial_size argument while initializing cachexzyes| Signature Status| Key Status | Finger Print | Signature Date |Project-Id-Version: DAR 2.6.0 Report-Msgid-Bugs-To: http://sourceforge.net/tracker/?group_id=65612MSGID_BUGS_ADDRESS =atid=511612 POT-Creation-Date: 2020-02-08 16:12+0100 PO-Revision-Date: 2020-01-12 15:30+0100 Last-Translator: Denis Corbin Language-Team: French Language: fr MIME-Version: 1.0 Content-Type: text/plain; charset=ISO-8859-1 Content-Transfer-Encoding: 8bit Plural-Forms: nplurals=2; plural=(n > 1); Standard ou lors de l'extraction d'une archive propriété le premier argument qui n'est pas une option est le nom de l'archive à lire le fichier fourni. le second argument qui n'est pas une option et le nom de l'archive à créer Pas de pause lors de l'affichage Pause toutes les %d lignes d'affichage Exécution de la ligne : %S Base de donnée utilisée [%s] : %S ATTENTION ! Cette archive ne contient que la liste du contenu d'une autre archive et ne peut être utilisé qu'en tant que référence pour une sauvegarde différentielle ou comme secours en cas de corruption de l'archive d'origine. Il est impossible de restaurer des données à partir de cette archive seule Votre décision pour les AE du fichier : [p] préserver [o] écraser [s] marquer comme déjà sauvé et préserver [t] marquer comme déjà sauvé et écraser [m] fusionner les EA et préserver [n] fusionner les EA et écraser [r] supprimer tous les EA [*] laisser l'action non définie [a] arrêter l'opération Votre choix ? Votre décision à propos des FSA du fichier : [p] préserver [o] écraser marquer comme déjà [s]auvé et préserver [t] marquer comme déjà sauvé et écraser [*] laisser non défini [a]rrêter l'opération Votre choix ? Votre décision à propos des données du fichier : [P] préserver [O] écraser [S] marquer comme déjà sauvé et préserver [T] marquer comme déjà sauvé et écraser [R] supprimer l'entrée [*] laisser non défini [A] arrêter l'opération Votre choix ? format de date: [[[année/]mois]/jour-]heure:minute[:seconde] Algorithmes disponibles : gzip, bzip2, lzo, xz. Exemples -zlzo:5 -z1 Attribut Étendu : [ avec %i lien(s) physique(s) %i fichier(s) ont été enregistré(s) comme détruit(s) depuis la sauvegarde de référence -# taille des blocs de chiffrement -$ clef de chiffrement de l'archive auxiliaire -* comme -# mais pour l'archive de référence -+ fusion de deux archive / création de sous-archive -, ignore les répertoires marqués comme répertoire cache --nodump ne pas sauver les fichiers ayant le drapeau 'd' positionné -/ défini que quelle façon dar peut écraser des fichiers lors de la fusion -8 sig génère signature pour diff binaire si utilisé comme ref. -@ [chemin/] Réalise une isolation à la volée de l'archive crée -@ [chemin/] archive à prendre comme référence auxiliaire -A ajoute une archive à la base -A [chemin/] archive à prendre comme référence -B base de donnée à utiliser -B lit les options à partir du fichier -C isole le catalogue d'une archive -C création d'une base vide -D les répertoires exclus sont enregistrés comme des répertoires vides -D suppression d'archives de la base -E ligne de commande à exécuter entre les tranches d'une archive -E commande exécutée entre les tranches -E commande à exéctuer après chaque tranche de l'archive destination -F commande à exécuter avant chaque tranche de l'archive source -F comme -E mais pour l'archive de référence -H [N] considère identiques des dates qui diffèrent d'un nombre entier d'heures -I fichiers à inclure dans l'opération (tous par défaut) -J comme -K mais pour l'archive de référence -K mot de passe à utiliser pour chiffrer/déchiffrer -L exécute un fichier de traitement par lot sur une base tel défini par -M reste dans le même système de fichiers lors du parcours de l'arborescence -N ignore ~/.darrc et /etc/darrc s'ils sont présents -O[ignore-owner | mtime | inode-type] ne pas prendre en compte la propriété des fichiers -P sous-répertoires à exclure de l'opération -Q supprime le message initial si non lancé depuis un tty -R répertoire à considérer comme racine (rep. courant par défaut) -S taille de la première tranche -S taille de la première tranche (si elle doit différente des autres) -T affichage sous forme d'arbre -U masque d'exclusion d'Attributs Étendus -V Informations de version -V affiche la version -V affiche les informations de version -X fichiers à exclure de l'opération (aucun par défault) -Y ne compresser que ces fichiers -Z ne pas compresser ces fichiers -[ fichier contenant une liste de fichiers à inclure -] fichier contenant une liste de fichiers à exclure -^ permission[:utilisateur[:groupe]] pour la création des tranches -aSI signification des suffixes k, M, T, G, etc. comme puissances de 10 -aSI signification des suffixes k, M, T, G, etc. comme puissances de 10 -aa ne cherche pas à conserver atime des fichiers ouverts en lecture. -abinary signification des suffixes k, M, T, G, etc. comme puissances de 2 -abinary signification des suffixes k, M, T, G, etc. comme puissances de 2 -ac ne cherche pas à conserver ctime (comportement par défaut). -acase dans les masques qui suivent majuscules et minuscules sont différents -ag bascule les masques suivants comme expression glob -am filtres en mode ordonné -an dans les masques suivants majuscules et minuscules sont équivalents -ar bascule les masques suivants comme expression régulières -as affichage uniquement des fichiers sauvegardés -b fait sonner le terminal pour solliciter l'utilisateur -b change le nom de base de l'archive dont le numéro est donnée -c vérifie la base pour l'ordonnancement des dates -c créer une archive -d comparaison de l'archive avec le fichiers existants -d chemin de la commande dar -e exécution à vide (rien n'est écrit sur le disque) -f ne pas restaurer l'arborescence de répertoire -f liste les archives où est présent le fichier donnée -g sous-répertoire à inclure dans l'opération -h affiche cette page d'aide -h affiche cette page d'aide -i mode interactif -i tube à utiliser à la place de l'entrée standard pour lire les données de dar_slave -i tube à utiliser à la place de l'entrée standard pour lire les ordres de dar -k non suppression des fichiers enregistrés comme détruits -l affiche les archives utilisées dans la base de donnée -l affichage du contenu d'une archive -m ne pas compresser les fichiers de taille inférieure à octets -m change la position d'une archive dans une base. -n ne pas écraser de fichier -o tube à utiliser au lieu de la sortie standard pour ordonner dar_slave -o tube à utiliser en place de la sortie standard pour envoyer les données à dar -o liste d'options à transmettre à dar -p arrêt (pause) après chaque tranche -p change le chemin associé à l'archive du numéro donné en argument -q supprime le rapport final de statistiques -r ne pas restaurer de fichiers plus anciens que ceux sur le système de fichiers -r restauration des fichiers spécifiés -s affiche la quantité de fichiers les plus récent par archive -s coupe l'archive en tranches de taille octets -t test l'intégrité d'une archive -u masque de sélection des Attributs Étendus -u liste les fichiers les plus récents contenus dans l'archive donnée -v mode verbeux -v[s|t|d|m|f|a] affichage détaillé -V affiche les fichiers ignorés -w ne pas avertir avant un écrasement de fichier -w uniquement avec -r, restaure les fichiers dans leur état à la date donnée -wa ne pas avertir ni avant écrasement ni avant suppression de fichier -x restauration de fichiers -y répare une archive tronquée -z [[algo:]niveau] compression des données -~ commande à exécuter entre les tranches d'une archive auxiliaire Compression delta : %s Boutisme du système/CPU : %s Support Attributs Étendus : %s Support de lecture furtive : %s Entiers utilisés : %d bits Entiers utilisés : sans limite Optim. grands répertoires : %s Support fichiers géants : %s Compression libbz2 (bzip2) : %s Compression liblzma (xz) : %s Compression liblzo2 (lzo) : %s Compression libz (gzip) : %s Support FSA ext2/3/4 : %s Support FSA HFS+ de Mac OS X : %s Plusieurs taches(libthreadar): %s %s Support de Posix fadvise : %s Chiffrement clef pub.(gpgme) : %s Support répertoires distants : %s Symlinks, dates restorées : %s Chiffrement fort (libgcrypt) : %s Support environ. multi-tâches: %s Précision de lecture du temps: %S Précision d'écriture du temps: %S Support drapeau NODUMP : %s avec %i lien(s) physique(s) enregistré(s) Num. archive | derniers fichiers/total | EA plus récent/total%i octet(s) ont été gaspillé(s) dans l'archive lors de re-tentatives de sauvegarde de fichiers ayant changés %i inode(s) ajoutée(s) à l'archive %i inode(s) modifiée(s) au moment de sa (leurs) sauvegarde n'a (n'ont) pas pu être sauvée correctement %i inode(s) supprimée(s) %i sauvegarde(s) d'inode(s) échouée(s) (erreur du système de fichiers) %i inode(s) n'ayant pu être restaurée(s) (erreur système) %i inode(s) ignoré(s) (exclus par les filtres) %i inode(s) non restaurée(s) (non sauvée(s) dans l'archive) %i inode(s) non restaurée(s) (décision de la politique d'écrasement) %i inode(s) non sauvées (pas de changement) %i inode(s) enregistrée(s) comme supprimée(s) %i fichier(s) enregistré(s) comme supprimé(s) depuis l'archive de référence %i inode(s) restauré(s) %i inode(s) sauvées %i inode(s) ayant seulement un changement de métadonnées %i entrée(s) ne correspond(ent) pas à ce qui est sur le système de fichiers %i entrée(s) ignorée(s) (exclus par les filtres) %i entrée(s) traitée(s) %i entrée(s) avec erreur %s est fourni sans AUCUNE GARANTIE ; %s est fourni SANS AUCUNE GARANTIE ; pour les détails taper '%s -W'. %s est fourni SANS AUCUNE GARANTIE; pour les détails taper `dar -W'. %s fait partie de la suite Disk ARchive (Release %s) , veuillez le fournir. - entrées Portes (Door) : %i - fichiers spéciaux bloc : %i - fichiers spéciaux char : %i - répertoires : %i - tubes nommés : %i - nombre d'inode ayant des liens durs : %i - nombre de référence à des inodes ayant des liens durs : %i - fichiers normaux : %i - liens symboliques : %i - prises unix : %i . Fichier ou répertoire ignoré . Essayer à nouveau ? . Essayer à nouveau la ligne de commande ? . Doit-on relancer dar comme précédemment ? : Echec de la sauvegarde sans compression des données de l'inode : Resauver sans compression les données de l'inode pour gagner de la place n'est pas possible, on conserve les données compressées Choix : Doit-on continuer malgré tout ? Attributs Étendus restaurés pour %i inode(s) Attributs Étendus sauvés pour %i inode(s) FSA restaurés pour %i inode(s) FSA sauvés pour %i inode(s) SI POSSIBLE ESSAYEZ DE REPRODUIRE L'ERREUR, UN SCÉNARIO QUI PERMET DE LA REPRODUIRE SERAIT UTILE POUR CORRIGER CE PROBLÈME. MERCI SI POSSIBLE ESSAYEZ DE REPRODUIRE L'ERREUR, UN SCÉNARIO QUI PERMET DE LA REPRODUIRE SERAIT UTILE POUR CORRIGER CE PROBLÈME. MERCI Une seule option parmi -c -d -t -l -C, -x ou -+ est permise Prêt à continuer ? MERCI DE REPORTER L'AFFICHAGE PRÉCÉDENT AU MAINTENEUR EN DONNANT UNE DESCRIPTION DES CIRCONSTANCES. Ceci est un logiciel libre, vous pouvez le redistribuer sous certaines conditions ; Ceci est un logiciel libre, vous pouvez le redistribuer sous certaines conditions ; Nombre total d'inode(s) considérée(s) : %i Nombre total d'entrée(s) considérée(s) : %i Libdar %u.%u utilisée, construite avec les options suivantes : Libdar %u%u utilisé, compilé avec les options : Libdar %u.%u.%u utilisée, construite avec les options suivantes : Libdar %u.%u.%u utilisée, compilé avec les options : [Entrée = OUI | Echap = NON] a : Sauver sous n : pause chaque 'n' ligne d'affichage (zéro pour aucune pause) c : vérification de l'ordre des dates taper `dar -L | more' pour les détails. compilé le %s avec %s version %s d : chemin de dar o : option passées à dar f : localisation d'un fichier m : change l'ordre des archives Pour les détails taper `dar -W'. est une tranche d'une autre archive, merci de fournir le bon fichier. est sur le point d'être écrasé. n'est pas présent dans l'archive l : contenu de la base A : Ajout d'une archive non fusionné (choix de l'utilisateur) non restauré (choix de l'utilisateur) p : modifie le chemin d'un archive b : modifie le nom de base d'une archive q : Sortir taper '%s -L | more' pour les détails. taper `dar -L | more' pour les détails. u : contenu d'une archive D : Suppression d'une archive w : enregistre les changements s : statistiques de la base de données avec %i lien(s) physique(s) enregistré(s) "-k" (ou "-kignore") et "-konly" ne sont pas compatibles"first_file_size" ne peut être différent de zéro si "file_size est égal à zéro# A B A N D O N ! ## EXCEPTION NON ATTRAPÉE, ## EXCEPTION NON ATTENDUE, #%S Pouvons-nous la considérer nulle (1er janvier 1970 à 00:00:00 UTC) ?%S [%c] [ ENTREÉ SUPPR. ] (%S) %S%S existe déjà et l'écrasement est interdit, j'abandonne%S n'existait pas avant la date spécifiée et ne peut être restauré%S a un en-tête corrompu ou malformé, merci de fournir le bon fichier.%S est sur le point d'être supprimé (action requise par la politique d'écrasement), OK ?%S est sur le point d'être écrasé, OK ?%S est sur le point d'être écrasé, on continue ?%S est sur le point d'être supprimé du système de fichier, continue t-on ?%S n'est pas un chemin valide : %S%S n'est pas un fichier valide (mauvais numéro magique), merci de fournir le bon fichier.%S n'est pas nombre valide%S est requis pour la suite des opérations, merci de fournir ce fichier si vous l'avez.%S est requis pour la suite des opérations, merci de fournir ce fichier.%S est sur le point d'être supprimé, mais le type de fichier ne correspond pas avec ce qui est attendu, l'efface t-on ?%S : %s, abandon%S : %s, retente-t-on l'opération réseau ?%S : %s, nouvelle tentative dans %d secondes%S : La politique d'écrasement (Données) reste non définie pour ce fichier, impossible de savoir si l'écrasement est autorisé ou pas !%S : La politique d'écrasement (Données) reste non définie pour ce fichier, impossible donc de savoir si la suppression est autorisée ou non !%S : La politique d'écrasement (Attributs Étendus) reste non définie pour ce fichier, impossible de savoir si l'écrasement est autorisé ou pas !%S : La politique d'écrasement (FSA) reste non définie pour ce fichier, impossible de savoir si l'écrasement est autorisé ou pas !%SSous-répertoire de : %S [%S]%SLe chemin est : %S [%S]%SLe chemin mène à : %S [%S]%Sexpression glob: %S [%S]%Snon( %S %S)%Sexpression régulière: %S [%S]%d n'est pas une taille valide%d est la valeur par défaut pour --sparse-file-min-size, il n'est pas nécessaire de préciser cette valeur%d est la valeur par défaut pour -m, il n'est pas nécessaire de préciser cette valeur%s ne désigne pas une compression "[algo][:][niveau]", comme par exemple "gzip", "lzo", "bzip2", "lzo:3", "gzip:2", "8" ou "1". Merci de revoir la description de l'option -z dans la page de manuel%s n'est pas un type signé tel attendu%s n'est pas un type non signé tel attendula longueur du type %s n'est pas %d octet(s) mais %d%s: %s'-A +' est indisponible avec l'option -y, taux de compression , prêt à continuer ? --$ n'est disponible qu'avec les options -x et -cL'option -$ n'est utile qu'avec l'option -@, pour l'archive de référence auxiliaire-%% n'est disponible qu'avec l'option -+L'option -%% n'est utile qu'avec l'option -@ pour l'archive auxiliaire de référence--------------+-------------------------+-----------------------l'option --alter=do-not-compare-symlink-mtime est inutile avec l'option -y--alter=tape-marks est impossible avec l'option -yLa fonctionnalité --nodump n'a pas été activée lors de la compilation, cette option n'est donc pas disponiblel'option --sequential-read est inutile avec -y--sparse-file-min-size n'est disponible que lors de sauvegarde ou fusion d'archives et sera ignoré icil'option -. n'est utile que lors de fusion, de création ou d'isolation d'archives et sera ignorée icil'option -/ n'est utile qu'avec -+ et sera ignorée icil'option -/ est inutile avec -yl'option -5 est inutile avec -yl'option -8 est inutile avec -yl'option -9 n'est valide uniquement qu'avec l'option -A et sera ignorée iciles options -< et -> sont inutiles avec l'option -yl'option -= n'est valide que lors de la sauvegarde (option -c) et sera ignorée, icil'option -= sera ignorée car inutile faute d'indication des fichiers ou répertoires auxquelles l'appliquer. Voir les options -< et -> dans la page de manuel pour plus de détails-@ n'est disponible qu'avec les options -+ et -cl'option -@ est inutile avec -y-A n'est disponible qu'avec l'option -ll'option -A est nécessaire avec l'option -yl'option -B ne peut pas être utilisée à l'intérieur d'un fichier de traitement par lot-D est inutile avec -yL'option -F n'est utile qu'avec l'option -A, car elle concerne l'archive de référenceLors d'une sauvegarde, l'option -H n'est utile qu'avec l'option -A-H n'est utile qu'avec -c, -d ou -xLors d'une restauration, -H n'est utile qu'avec l'option -r-H est inutile avec -yL'option -J n'est utile qu'avec l'option -A, car elle concerne l'archive de référence-M n'est utile qu'avec l'option -cl'option -M est inutile avec -yl'option -0 est incompatible avec l'option -yl'option -S nécessite l'utilisation de -s-T n'est disponible qu'avec l'option -l-X, -I -P, -g, -], -[ et autres commandes relatives à la selection de fichier sont impossibles avec l'option -y-Y et -Z ne sont utiles qu'avec la compression de données (option -z), -Y et -Z seront ignorés icil'option -\ est inutile avec -yIl manque un argument à l'option -a-ac n'est utile qu'avec -c ou -dl'option -ad est inutile avec -yl'option -ae est inutile avec -y-af est indisponible avec l'option -yL'option -af doit être placée avant l'option -A, et non pas après-af n'est disponible qu'avec l'option -c ou -+Inutile de spécifier plus d'une fois l'option -afl'option -ah est inutile avec -yl'option -ai est inutile avec -y-ak n'est disponible qu'avec l'option -+ et sera ignoré icil'option -ak est inutile avec -yinutile de spécifier l'option -ak plus d'une fois-as n'est disponible qu'avec l'option -l, -as sera ignoré icil'option -as est inutile avec -yl'option -asecu est inutile avec -y-wa n'est utile qu'avec les options -x, -c ou -+-e n'est disponible qu'avec l'option -r, abandon de l'opération en cours-f n'est disponible qu'avec l'option -x, -f sera ignoré icil'option -k n'est pas compatible avec l'option -yl'option -k n'est utile qu'avec l'option -x-konly et kignore ne peuvent être utilisés en même temps-m n'est utile qu'avec l'option -c-m n'est utile qu'avec la compression de données (option -z), -m sera ignorés icil'option -o est obligatoire quand "-A -" est utilisé conjointement avec "-c -", "-C -" ou "-+ -"les options -p et -Q sont mutuellement exclusivesL'option -r est inutile avec -n-r est inutile avec -y-u, -U, -P, -g, -], -[ et autres commandes relatives à la selection d'EA n'est impossible avec l'option -y-vf n'est utile qu'avec l'option -cl'option -w n'est valide uniquement qu'avec l'option -r et sera ignorée ici-w est inutile avec -n-wa n'est utile qu'avec l'option -x-z n'est utile qu'avec les options -c , -C ou -+les options -{ et -} sont inutiles avec l'option -y-~ n'est disponible qu'avec les options -+ et -cL'option -~ n'est utile qu'avec l'option -@, car elle concerne l'archive auxiliaire de référenceUne clef forte générée aléatoirement a pu être trouvée après %d iteration(s) : fichier absent du système de fichiersUne erreur est survenue lors de la lecture du contenu de l'archive : ETAbandon du traitement par lot :Arrêt du programme. Abandon. une erreur est survenue lors de l'exécution d'une commande utilisateur : Abandon. Une erreur est survenue lors d'un appel à libdar : Abandon. L'opération demandée nécessite une fonctionnalité qui n'a pas été activée à la compilation : Abandon. L'utilisateur a refusé de continuer lors de la question : Arrêt du programme : le processus fils est mort de façon inattendueDroit d'accès |Propr.|Groupe | Taille | Date |[Infos][D][ EA ][FSA][Compr][S]| Nom de fichierAjout de %S à l'archive : %SAjout de lien dur à l'archive : Ajout d'une nouvelle couche : Cache pour de meilleures performances ...Ajout d'une nouvelle couche : Couche d'échappement pour permettre la lecture séquentielle ...Ajout d'une nouvelle couche : Chiffrement fort ...Ajout d'une nouvelle couche : Compression ...Ajout d'une nouvelle couche : Chiffrement faible ...L'ajout explicite d'une séquence d'échappement de type seqt_not_a_sequence est interditAjout d'une couche de cache au-dessus du tube pour fournir une capacité de déplacement limitéeEnregistrement uniquement des métadonnées dans l'archive : %SAjout des fichiers qui ont été détruits depuis la sauvegarde de référence ...Tous les fichiers demandés n'ont pu être restaurésToutes les couches d'abstraction ont été crées avec succèsUne chaîne vide n'est pas un nom de groupe valideUne chaîne vide n'est pas un nom d'utilisateur valideL'archive %S nécessite un mot de passe : Nom de base de l'archive à ajouter : Mise en route de la procédure de terminaison de l'archiveMise en route de la procédure de terminaison rapide de l'archiveUn format d'archive plus ancien que "08" (release 2.4.0) ne peut être lu à partir d'un seul tube, seule la lecture avec dar_slave ou à partir de fichiers (c'est à dire de tranches) est possibleLe catalogue interne de l'archive n'a pas été signé de la même façon que l'archive elle-même, c'est probablement le signe d'une compromission de l'archiveLe catalogue interne de l'archive est correctement signé mais ne correspond pas aux marques de bandes utilisée pour la lecture séquentielle. Il est possible qu'une corruption de donnée ou une compromission de l'archive ait eu lieu! Si les données extraites en mode séquentiel ne correspondent pas à celle extraites en mode drect, on peut considérer que l'archive a été modifié suite depuis sa création.L'archive est fermée.Archive composée de %i fichier(s)Signature de l'archive : %SNuméro d'archive à modifier : Numéro de l'archive à déplacer : Numéro d'archive à supprimer : Numéro de l'archive dont le chemin doit être modifié : Numéro d'archive : L'archive de référence n'est pas signée correctement (difference de signataire entre le catalogue interne et l'archive elle-même), est-ce qu'on continue ?La production d'archive sur stdout n'est pas compatible avec plusieurs tranches (option -s)Réparation d'archive terminée. ATTENTION ! Il est fortement recommandé de tester l'archive resultante avant de supprimer celle qui est abîméeLa signature d'un archive n'est possible qu'avec le chiffrement gnupgTaille d'archive inconnue (lecture depuis un tube)Taille de l'archive : %i octetsTaille totale : %i octetsVersion du format de l'archive : %sVersion d'archive trop élevé, utiliser une version plus récente de libdarÉtes vous sûr de vouloir supprimer l'archive numéro %d ?L'argument donné à -H doit être un entier positifArguments lus depuis %S :L'argument donnée à l'option -r doit être un chemin relatif (jamais commencer par '/')Un élément hors des limites est demandéClef de chiffrement asymmétrique : %SAu moins une tranche d'une ancienne archive ayant le même nom reste présente dans le répertoire %s. Il est conseillé de supprimer toutes les anciennes tranches avant de créer une archive de même nom. Peut-on supprimer les anciennes tranches ?CASSÉ Mauvais CRC, une corruption des données a eu lieuRéponse incohérente du correspondant pendant la terminaison de la connexionMauvais format de dateNombre octal malforméeEn-tête SAR mal-formé (type de TLV inconnu dans l'en-tête d'une tranche)Base de donnée malforméeTerminateur malformé, la position du catalogue ne peut être déterminée : "infinint" malformé ou format non supportéL'attribut date de création ne peut pas être restauré pour %s car aucune famille de FSA capable de le supporté n'a été activée lors de la compilation.Les deux archives de référence sont nullptr, impossible de fusionner à partir de rienConstruction de l'objet catalogue ...CONTENU DU CATALOGUE :EXCEPTION NON (LIB)DAR ATTRAPÉEDifference de CRC pour les données du fichierErreur de CRC détectée lors de la lecture des AEErreur de CRC détectée lors de la lecture des FSAErreur de CRC lors de la lecture de la signature de différence binaire : Corruption de données.Erreur de parité : données corrompues.Erreur de CRC lors de la lecture du catalogueCalcul de signature delta à partir du système de fichiers : Calcul de la signature du hashage du catalogue ...Impossible de lire les données sauvegardées.Arrêt non confirméImpossible d'ajouter un chemin absoluImpossible d'allouer de mémoire pour un tampon lors de la lecture %SImpossible de bloquer les signaux : Impossible de calculer les signatures delta lors d'une fusion s'il a été demandé de garder les fichiers compressésImpossible d'arrêter proprement l'opération, l'absence de support pour un environnement multi-tâches arrêtera abruptement le programme, en conséquence l'archive générée risque fortement d'être inutilisable.Comparaison d'AE impossible : le support pour les Attributs Étendus n'a pas été activé lors de la compilationImpossible de comparer des fichiers ouverts en écriture seuleImpossible de convertir un entier flotant négatif en un entier non signé (positif)Impossible de convertir un nom d'utilisateur en numéro (UID) dans un executable lié statiquement. Fournir soit directement l'UID soit utiliser un exécutable lié dynamiquementImpossible de créer la base, un fichier de même nom existe déjàCréation d'une inode impossible : Impossible de dup()liquer un descripteur pendant la copie d'un "fichier_local" : %sNe peut pas évaluer cet objet crit_and car aucun autre objet criterium n'y a été ajoutéNe peut pas évaluer cet objet crit_or car aucun autre objet criterium n'y a été ajoutéImpossible d'extraire du catalogue interne la liste des fichiers à effacerNe peut pas purger la lecture d'un generic_file en écriture seuleImpossible d'obtenir le catalogue de référence car il n'a pas été fourniImpossible d'obtenir le nom de données de cette archive ; cette archive n'est pas complètement initialiséeImpossible d'obtenir les droits d'accès à partir d'un description de fichier : Impossible d'accéder aux attributs ext2 (drapeau nodump) pour %S : %sImposible de connaître la taille du fichier : %sImpossible d'obtenir le chemin complet du répertoire courant : Impossible d'obtenir d'information sur l'inode d'un fichier à effacer Impossible d'obtenir des informations sur l'inode %s : %sImpossible d'obtenir des informations sur l'inode %s : %sImpossible d'obtenir la date de dernière modification : %sImpossible de connaître la taill de %S: ce n'est pas un fichier pleinImpossible de gérer un entier aussi grand. Utiliser la version complète de libdar (compilé pour utiliser le type "infinint") pour résoudre ce problèmeOuverture du catalogue impossible : Fichier destination impossible à ouvrir : %sImpossible d'ouvrir le fichier : Fichier source impossible à ouvrir : %sImpossible de lire les drapeaux "fcntl" du fichier : Impossible de lire les arguments en ligne de commande, abandonImpossible de lire le contenu du répertoire : %s : Impossible d'obtenir d'information sur %s : %sImpossible de lire l'inode de Lecture impossible en entréeNe peut recevoir autant de donnée en regard de la place mémoire allouéeImpossible de poursuivre la récursion dans une entrée qui n'est pas un répertoireImpossible de réduire la taille du chaîne à une valeur qui est plus grande que sa taille actuelleSuppression de répertoire impossible Suppression du fichier %s impossible: %sImpossible de supprimer un fichier qui n'existe pas : %SNe peut pas supprimer l'entrée %S du catalogue car elle n'y existe pasImpossible de restaurer un batch binaire (delta patch) sans fichier à patcher dans le système de fichierImpossible de restaurer les métadonnées d'une inode sans une inode présente sur le système de fichierAucun fichier ne peut être restauré, aucune action n'a été réaliséeImpossible de restaurer le patch pour %S car l'inode existante n'est un fichier normalImpossible de restaurer le fichier %S : fichier inexistant dans le baseRestauration des droits d'accès de %s impossible : %sNe peut pas lire un caractère en arrièreNe peut pas déplacer la lecture dans le fichierImpossible de lire séquentiellement une archive de référence quand des signature delta ou des patch sont demandésImpossible de modifier les drapeaux "fcntl" du fichier : Impossible de définir la date de naissance: Impossible de positionner les dates de dernier accès et de dernière modification : Ne peut pas atteindre la fin du fichierNe peut peut aller plus loin pour restaurer un trouImpossible de se positionner à un endroit valide dans le fichierImpossible de se déplacer à la position courante dans un "tronc"Ne peut pas synchroniser l'écriture sur un generic_file en lecture seuleImpossible de débloquer les signaux : Impossible d'écrire une base en lecture seuleImpossible d'écrire l'archive : Écriture impossible en sortieImpossible d'écrire dans le fichier destination, abandon de l'opérationImpossible d'écrire sur la sortieLe hashage du catalogue ne correspond pas au hashage signé de l'archive, l'archive a été modifiée depuis sa création !Taille du catalogue dans l'archive : %i octetsTaille du catalogue dans l'archive : NCUne exception non "libdar" a été interceptéeUne exception Egeneric inconnue a été interceptée : Mise à jour des en-têtes de la base de données ...Mise à jour des informations de la base de données ...Vérification de l'ordonnancement chronologique des fichiers entre les archives ...Vérification de l'ordonnancement des dates entre les archives ...Vérification de l'ordonnancement des dates de fichiers ...Choisir un seul algorithme de compressionEffacement des attributs étendus (requis par la politique d'écrasement) : Fermeture de la couche de base ...Fermeture de la couche de compression ...Fermeture de la couche de chiffrement ...Fermeture de la couche d'échappement...Les commandes sont : Commandes : Options communes : Problème de communication, essayer à nouveau ?Compression et enregistrement de la base de donnée dans le fichier...Algorithme de compression utilisé : %Sle niveau de compression doit être compris entre 1 et 9 inclusLa compression (option -z) est inutile et ignorée lorsque -ak est utilisé"Compression_level" doit être compris entre 1 et 9 inclusLe CRC calculé pour sur EA du fichier %S diffère de celui stocké dans l'archive, les EA de ce fichier sont probablement corrompusLe CRC calculé sur les FSA du fichier %S diffère de celui stocké dans l'archive, les FSA de ce fichier sont probablement corrompusCalcul des statistiques ...Conflit lors de la sélection du fichier à conserver dans l'archive résultante : Considération d'une couche de chiffrement ...On considère la date comme nulle (Janvier 1970<)Traitement de la (première) archive de référence :Prise en compte de l'archive de référence auxilaire :Échec de la vérification de cohérence pour l'en-tête de l'archiveContinuer avec le contenu de l'archive ?Continuer ?On continue l'action en cours qui est de sorti ... donc on sort !Poursuite...Les données copiées ne correspondent pas au CRCLes données copiées ne correspondent pas au CRCCopie terminée. Il manque %.0f octet(s) de données Copie des signature delta depuis l'archive de référence : Données corrompues lues depuis un tubeLes données lues depuis un tube sont corrompuesBase de données corrompue : Entrée corrompue dans l'archive à la suite d'une séquence d'échappementCorruption rencontrée lors de la lecture de la structure de donnée header_versionCréation d'une inode impossible : Impossible de trouver la séquence d'échappement pour le catalogue interneÉchec lors de l'ouverture du répertoire %s en mode furtif (%s), ouverture en mode normalAffection du fichier à son propriétaire d'origine impossible : La structure de donnée du catalogue a pu être lue à %i de l'archive, elle contient ceci :Création d'une nouvelle file d'exécution pour le couche de compression...Création d'une nouvelle file pour exécuter la couche d'échappement ...Création d'une nouvelle file pour exécuter les couches précédemment créées ...Création de fichier ...Création de la couche de base : Ecriture de l'archive dans un trou noir (équivalent à /dev/null) ...Création de la couche de base : Ecriture de l'archive dans un fichier normal ...Création de la couche de base : Ecriture de l'archive vers un objet sar (Segmentation and Reassemblement) pour le découpage en tranches ...Création de la couche de base : Ecriture de l'archive vers la sortie standard ...La taille d'un bloc de chiffrement doit être supérieure à 10 octetsLe répetoire courant ne peut pas être un chemin relatifL'implémentation actuelle du chiffrement blowfish n'est pas compatible avec l'ancienne implémentation (boguée), utiliser dar-2.3.x (ou autre logiciel basé sur libdar-4.4.x) pour lire cette archiveAlgorithme de chiffrement non disponible dans libgcrypt : %s/%sLe sous-processus DAR a terminé avec le code de sortie Arrêt de DAR suite à la reception du signal : DIFF DonnéesUne corruption de données semble avoir eu lieu, impossible de déchiffrer les donnéesCorruption de données rencontré en fin de tranche, drapeau interdit à cet endroitCorruption de données rencontrée en fin de tranche, drapeau inconnuCorruption de donnée ou mark inconnue au sein des données d'un fichier à trouDonnées sauvées : %S %SDonnées plus récentes : %S %SLes données du fichier %S de la première archive ont été préservés de l'écrasementLe fichier %S est sur le point d'être %S, OK?Les données du fichier %S pris de la première archive de référence ont été écraséesLes données du fichier %S pris de la première archive de référence ont été retirésLes données devraient être en lecture seuleTaille des données : %i %iLa base de donnée a été initialisée avec succès.Base de données non sauvegardée, voulez-vous vraiment sortir ?Les dates ne croissent pas avec le numéro des archives pour tous les fichiers, l'utilisation de cette base de donnée peut conduire à la restauration d'une mauvaise version d'un tel fichier. S'il s'agit d'une erreur d'ordonnancement des archives dans la base, il est recommandé de le corriger afin d'avoir les archives les plus anciennes aux numéros les plus petits et les plus récentes aux numéros les plus grands Les dates de %S ne sont pas croissante avec le numéro des archives. Fichier concerné : %SDécompression et chargement de l'en-tête de la base de donnée en mémoire...Décompression et chargement de la base de données en mémoire ...Le mode décrémental est inutile lorsqu'il n'est pas appliqué à la fois à une archive de référence et à une archive auxiliaire de référence. Ignorer le mode décrémental et continuer ?Sauvegarde sous forme de batch binaire: tailles différentes des signatures delta : %i <--> %iLes signatures delta ne correspondent pasLes signatures delta ont bien la même taille mais leurs contenus ne correspondent pasDétection d'une marque de répertoire cache pour %s, le contenu de ce répertoire ne sera pas sauvegardéLe répertoire %S ne peut pas être restauré : écrasement interdit et une inode n'étant pas un répertoire existe déjà sous ce nom. Aucun fichiers de ce répertoire ne sera restauré : Le répertoire %S n'existe pas ou est un lien symbolic pointant nulle partInode sale : %S %SDésactivation du gestionnaire de signal. La prochaine réception de ce signal arrêtera immédiatement le programmeDivision par zéroVoulez-vous ignorer le même type d'erreur pour les prochains fichiers ?Voulez-vous l'utiliser pour l'opération ?A cause d'une corruption des données, il n'est pas possible si la tranche %S est ou non la dernière de l'archive. Merci de votre aide pour le déterminer. A l'invite qui suit, merci de répondre par "dernier" ou "non dernier" selon la nature de cette tranche (vous pouvez-aussi répondre par "stop" pour arrêter le programme immédiatement) : Enregistrement de la signature delta pour le fichier : Les EA et FSA du le fichier %S sont sur le point d'être %S, d'accord ?Les EA et FSA du fichier %S de la première archive ont été mis à jour avec ceux du fichier de même nom provenant de l'archive auxiliaireLes Attributs Étendus de %S sont sur le point d'être fusionnés, OK ?Des Attributs Étendus de %S sont sur le point d'être écrasés, OK ?Les Attributs Étendus de %S sont sur le point d'être supprimés, OK ?Les Attributs Étendus pour %S n'ont pas été effacés tel prévu par la politique d'écrasement car ce fichier est un lien dur pointant sur une inode déjà restauréeLes Attributs Étendus pour %S n'ont pas été écrasés à car ce fichier est un lien dur pointant sur une inode déjà restauréeAttributs Étendus sauvés : %S %SAttr. Etend. plus récents : %S %SNombre d'Attributs Étendus : %i %iLes Attributs Étendus du fichier %S de la première archive ont été retirés et marqués comme déjà sauvésLes attributs étendus du fichier %S de la première archive ont été retirésLes Attributs Étendus du fichier %S de la première archive ont été mis à jour avec ceux du fichier de même nom pris de l'archive auxiliaireLes Attributs Étendus du fichier %S ont été écrasésLes Attributs Étendus du fichier %S ont été écrasés et marqués comme déjà sauvésTaille totale des Attr. Eten.: %i %iERR ERR Soit les archives dans la base ne sont pas correctement rangées, soit la date de dernière modification du fichier a été artificiellement modifiée à une valeur plus ancienne. Ceci peut mener à une restauration incorrecte des Attributs Étendus de l'inode %SUne chaîne vide en tant que sous-répertoire ne donne pas un chemin valideValeur NULL donnée à en argument de tools_readlink()Une chaîne vide est un nom d'archive invalideUne chaîne vide n'est pas un chemin valideEntrer chaque argument ligne par ligne puis appuyer sur entrée à la fin Erreur de CRC sur l'information d'une entrée dans la table de contenuErreur de CRC sur l'entrée %SErreur de CRC sur l'entrée %S. Ignorer l'erreur ?Informations sur les entrées : "en place" "à ajouter"Type d'entrée : %s %sErreur lors de la conversion du code %d en messageErreur lors de la création de la prise Unix : Erreur pendant la création des couches pour l'archive : Erreur de création d'un tampon pour GPGME: %s Erreur lors de la création du lien dur %s : %s Tentative de duplication de l'inodeErreur lors de la création du lien dur : %s , l'inode à lier [ %s ] a disparue, elle va être recrééeErreur lors de la création du lien dur : %s , l'inode à lier [ %s ] n'est pas présente, le lien dur ne peut pas être restauréErreur pendant l'exécution de la ligne de commande utilisateur : Erreur recontrée lors de la recherche de GID pour le groupe %s : %SErreur recontrée lors de la recherche de GID pour le groupe %s : %SErreur rencontrée lors de la recherche de l'UID pour l'utilisateur %s: %SErreur lors de l'obtention de la position du fichier à lire : Erreur pendant l'obtention de la taille du fichier : Erreur dans le fichier inclus %s : Erreur lors de la construction de la table de hashage rsync: Erreur lors du transfert réseau : Erreur lors de la demande à libcurl de considérer ~/.netrc pour l'authentification : %sErreur lors de la désignation du fichier de clef privée : %sErreur lors de la désignation du fichier de clef publique : %sErreur lors de l'assignation de la méthode d'authentification sftp : %sErreur lors de la vérifiation de la capacité %S : %sErreur lors du contrôle du fichier résultant de l'application du patch : Erreur lors de la cration d'un objet libcurlErreur lors de la cration du fichier de hashage : Erreur lors de la duplication d'un objet libcurlErreur lors de l'envoi de données à librsync: Erreur lors lors de la récupération de la taille du fichier : %sErreur lors de lecture du répertoire FTP/SFTP %sErreur d'ouverture de %S : Erreur rencontrée lors de l'ouverture de la dernière tranche : %S . Tentative d'ouverture à partir de la première tranche ...Erreur lors du passage du nom d'utilisateur à libcurl : %sErreur lors de préparation d'un répertoire pour lecture : %s"Erreur lors du traitement de l'opération : Erreur lors de la lecture d'un block de données : %sErreur lors de la lecture des informations de découpage de l'archive de référence. Erreur ignorée, on continue sans cette informationErreur lors de la lecture de la ligne %S à partir du fichier %S : %SErreur lors de la lecture de l'entrée suivante : Erreur lors de la suppression du fichier %SErreur lors de la suppression du fichier %S : %sErreur lors de la réinitialisation de l'URL: %sErreur lors de la récupération de l'heure courante : %SErreur lors de l'assignation de la capacité %S : %sErreur lors de la désignation du fichier known_file : %sErreur lors de la configuration de l'authentification pour libcurl : %sErreur lors du paramétrage de libcurl pour la lecture du fichier : %sErreur lors du paramétrage de libcurl pour l'écriture du fichier : %sErreur lors de l'établissement de la connexion pour la suppression du fichier %S : %sErreur lors l'activation de la verbosité d'un objet : %sErreur d'ouverture de %S : Erreur lors de l'ouverture d'un répertoire en mode furtif : Erreur lors de l'ouverture du répertoire : Erreur lors de l'ouverture du tube : Erreur pendant l'exécution de l'action demandée : %S Erreur lors de la lecture des AE pour Erreur lors de la lecture de l'attribut %s du fichier %s : %sErreur pendant la lecture d'un caractère : Erreur lors de la lecture de la base %S : Erreur lors de la lecture du contenu du répertoire : Erreur pendant la l'analyse d'un fichier inclus (%s) : Erreur lors de la lecture de l'inode de %s : %sErreur lors de la lecture du fichier source (%.2f %% des données a été copiée), on essaie de lire plus loin : %s Erreur lors de la libération d'un travail de librsync: Erreur lors de la suppression du fichier %s : %SErreur lors de l'inventaire des AE pour %s : %sErreur lors de la sauvegarde des Attributs Étendus de Erreur lors de la sauvegarde des FSA de Erreur lors de la transformation de l'archive : Erreur lors de l'execution de %s : %sErreur lors de la suppression de %S : %sErreur lors de l'activation du garde de mémoire de libgcrypt : %s/%sErreur d'ajout de l'EA %s : %sErreur lors de l'assignation d'une clef à une référence de clef de libgcrypt (essiv) : %s/%sErreur lors de l'assignation d'une clef à un manipulateur de clef lors de la vérification de la force du mot de passe : %s/%sErreur lors de l'assignation d'une clef à une référence de libgcrypt : %s/%sErreur lors de l'appel à fork() pour lancer dar : Erreur lors du changement des propriétés du terminal : Erreur pendant la comparaison de l'archive avec le système de fichiers : Erreur lors de la prise en compte du fichier : Erreur lors de la conversion UID/GID en chaîne pour le crochet d'exécution lors de la sauvegarde d'un fichierErreur lors de la création d'une référence ESSIV : %s/%sErreur lors de la création d'un tube anonyme : Erreur lors de la création d'une référence pour générer un somme de contrôle : %s/%sErreur lors du chiffrement des données : %s/%sErreur lors du déchiffrement des données : %s/%sErreur lors de la création de la clef à partir du mot de passe (ouverture HMAC) : %s/%sErreur lors du calcul de la clef à partir du mot de passe (HMAC set key) : %s/%sErreur lors de la recherche des propriétés de l'archive : Erreur lors de recherche du nom d'hôte (hostname) : Erreur lors de la recherche d'informations pour %S : Erreur lors de la génération du vecteur initial (IV) : %s/%sErreur lors de l'initialisation de "mutex" pour la classe "statistics" : Erreur lors de l'initialisation d'une fonction de hashage : Algorithme non disponible dans libgcrypt : %s/%sErreur lors de l'affichage du contenu de l'archive : Erreur lors de la création d'un manipulateur de clef lors de la vérification de la force du mot de passe : %s/%sErreur lors de l'ouverture d'une référence de clef pour libgcrypt: %s/%sErreur lors de l'ouverture de l'archive de référence : Erreur de syntaxe concernant --min-digits : Erreur de syntaxe dans la date donné à l'option -A : Erreur lors de la lecture du CRC pour des AE à partir de l'archive : Aucune séquence d'échappement trouvé pour ce fichierErreur lors de la lecture du CRC pour des FSA à partir de l'archive : Aucune séquence d'échappement trouvé pour ce fichierErreur de CRC lors de la lecture de la signature de différence binaire : Corruption de données.Erreur lors de la lecture des FSA : Erreur lors de l'ouverture de l'en-tête de l'archive. Ce peut être dû à une ancienne archive chiffrée ou à une corruption de l'archive. On supposons qu'il s'agit d'une ancienne archive, pour laquelle pour laquelle l'en-tête n'est présent qu'au début de la première tranche ...Erreur lors de la lecture de donnée destinée à une mémoire sécurisée : Erreur lors de la lecture du fichier : Erreur lors de la lecture depuis un tube : Erreur lors de la lecture de la réponse sur le terminal : Erreur de suppression de %s : %sErreur lors de la réinitialisation de la clef de chiffrement pour un nouveau bloc : %s/%sErreur lors de la restauration de Erreur lors de la restauration de : "Erreur pendant la restauration des fichiers suivants : Erreur lors de la récupération de signature delta depuis l'archive : Erreur lors de la sauvegarde de Erreur lors de la sauvegarde des données : Erreur de positionnement dans un fichier d'un dépot distant : %sErreur lors de la détermination du vecteur initial (IV) pour le bloc courant : %s/%sErreur lors de la configuration des droits d'accès : %sErreur lors de la changement de propriétaire : %sImpossible de passer libcurl en mode ajout en écriture : %sErreur lors de la communication à libgcrypt que l'initialisation est terminée : %s/%sErreur lors du test de l'archive : Erreur lors de l'écriture sur un tube : Erreur lors de l'enregistrement du fichier : Erreur ! SHA1 n'est pas disponible dans libgcrypt : %s/%sErreur, le catalogue de référence n'a pas été fourniErreur, phrase secrète invalide, nouvel essai : Erreur lors de l'envoie de la phrase secrète à GPGME : Les séquences d'échappement utilisées pour lire l'archive conduisent à placer des fichiers en dehors de l'aborescence spécifiée. Pour résoudre ce problème, essayer de lire l'archive en mode direct, essayez de réparer l'archive avec Parchive si des fichiers de redondance ont été crées ou en dernier lieu essayez de lire l'archive en mode relaxAnnulation...Exception attrapée depuis l'execution de archive_listing_callbackException attrapée depuis l'execution de archive_listing_callback : %sles AE existants de %S n'ont pas pu être préservés : les Attributs Étendus existants de %S n'ont pas pu être lu ni préservé : les FSA existants de %S n'ont pas pu être préservés : les FSA existants de %S n'ont pas pu être lu ni préservé : Impossible de restaurer les seules métadonnées car le fichier présent est de nature différente de celui restauréAttributs Étendus du fichier trouvés dans la base mais impossibles à restaurer (ne sont présents que comme "inchangés" dans des archives différentielles)FAUXErreur FATALE lors de la communication réseau, abandon de l'opération : Erreur FATALE provenant du système d'exploitation, abandon de l'opération : Erreur FATALE, abandon de l'opération : Famille de FSA : %S %SLes FSA de %S sont sur le point d'être écrasés, OK ?Les FSA de %S n'ont pas été écrasés car ce fichier est un lien dur pointant sur une inode déjà restauréeFSA sauvés complètement : %S %SLes FSA du fichier %S de la première archive ont été retirés et marqués comme déjà sauvésLes FSA du fichier %S de la première archive ont été retirésImpossible d'assigner une URL à libcurl : %sÉchec de création d'un contexte GPGME : Échec de création d'une tranche %S: dar_slave n'est pas arrivé à atteindre la fin de l'archive: %S. Tentantive d'ouverture de l'archive depuis les premiers octetsEchec de l'ouverture de la couche de cache, faute de mémoire disponible, l'archive sera lue sans optimisation des performancesEchec de lecture pour %S : Echec de lecture du CRC pour les EA et FSA : Echec de lecture du CRC de données pour %S, le fichier est probablement abîmé et sera marqué comme saleEchec de lecture des FSA existant de la famille extX : Echec de lecture des informations d'inode de %s : Echec de lecture de la tranche %S : Echec de récupération de la phrase secrèteImpossible de connaître la taille de bloc utilisée par l'algorithme de chiffrement : %s/%sÉchec lors de la récupération de la taille de bloc utilisée par libgcrypt pour l'algorithme de chiffrement : %s/%sÉchec de la récupération de la longueur de clef à utiliser : %s/%sÉchec de la récupération de la longueur maximum possible des clefsImpossible d'envoyer un retour-chariot après la phrase secrèteEchec de l'envoi de la totalité de la phrase secrète à GPGMEEchec d'affectation des FSA extX : Echec d'affectation (ouverture) des FSA extX: Echec de paramétrage d'un contexte GPGME avec le protocole OpenPGP : Impossible d'enregistrer une taille de stockage nulle pour ce fichier sans CRC, une erreur de CRC sera signalée pour ce fichier lors de la lecture de l'archive réparéeImpossible d'ouvrir %S pendant la consultation du drapeau "nodump" : %sÉchec lors de l'écriture de la somme de contrôle : Erreur fatale sur la ligne de commande utilisateur : La récupération d'EA lors de la lecture d'une archive n'est pas possible en mode séquentielLe fichier %S a changé lors de sa sauvegarde and n'est probablement pas sauvé un état valide ("fichier sale"), voulez-vous malgré tout le considérer pour la restauration ?Fichier %S ligne %dFichier trouvé dans la base mais impossible à restaurer (n'est présent que comme "inchangé" dans des archives différentielles ou bien comme "delta patch" mais sans la référence sur laquelle le patch s'applique présente dans une archive de la base):Le fichier a changé lors de sauvegarde mais a été sauvé plusieurs fois, restauration de la copie suivante : Le fichier a disparu alors que nous étions en train de le lire, impossible de vérifier s'il a changé lors de sa sauvegarde : %SBoucle dans l'inclusion des fichiers. Le fichier %s s'inclut lui-même directement ou via d'autres fichiers (option -B)Le mode d'ouverture du fichier n'est ni en lecture ni en écritureLe fichier doit être un répertoire : Fichier non trouvé dans la baseIndexe du fichier trop grand pour être stocké var un variable de type off_tLe propriétaire des fichiers ne sera pas restauré faute de privilège pour ce faire. Vous pouvez supprimer ce message en demandant à ne pas restaurer ces propriétésFichier enregistré comme supprimé à cette date dans la base de données : La sélection des fichiers a été interrompue. Construction de l'archive résultante avec les fichiers déjà sélectionnésTaille des fichiers : %i octetsTaille de fichier trop petiteTaille du fichier : %i octetsLe fichier sur lequel le patch est sur le point d'être appliqué ne correspond pas à celui attendu, Abandon de l'opérationFichier à chercher : Le nom de fichier fourni par le système d'exploitation semble coupé au niveau du répertoire %s, enregistrement du nom de fichier tel quel: %sLes attributs spécifiques au système de fichier (FSA) font défautFinalisation du nettoyage de la mémoire ...Fin de l'inspection du répertoire %S , sauvé %S%SÉcriture terminée dans le fichier Fichier initial : %i octetsTaille du premier fichier trop petiteLa taille de la première tranche est trop petite pour être en mesure contenir ne serait-ce que l'en-tête de tranchePour information, c'est l'itération n°%d pour laquelle la clef aléatoire générée est considérée comme faible par libgcrypt. La rechercher d'une nouvelle clef aléatoire continue ... patienceMise en forme du fichier en base de donnée vide ...Trouvé 'Un en-tête d'archive a bien été trouvé en début d'archive, il n'indique pas que l'archive est d'un ancien format, la fin de l'archive est donc belle et bien corrompue. Vous pouvez soit réessayer en fournissant un catalogue isolé en complément de cette operation ou tenter de lire l'archive en mode sequentiel, voire tenter le mode laxiste comme dernier recours, en utilisant le mode laxiste et le mode sequentiel en même tempsUn en-tête d'archive a bien été trouvé en début d'archive, il n'indique pas que l'archive est d'un ancien format, la fin de l'archive est donc belle et bien corrompue. Sans la fourniture d'un catalogue externe et comme nous ne lisons pas l'archive en mode sequentiel, il n'y a que peu de chances d'arriver à récupérer quelque chose de cette archive corrompue. Doit-on continuer malgré tout ?Date négative rencontrée (%s) pour l'inode %s .Mode de lecture furtifLe mode de lecture furtive requièrt soit les droits root soit la capacité FOWNER, retour à une un mode lecture normalle moteur GPGME n'est pas disponible: %sprérequis insatisfait pour GPGME, une version supérieure à %s est nécessaireGénération d'une clef de chiffrement aléatoire pour le chiffrement symmétrique ...la date fournie doit être dans le passéeLes cibles utilisateurs suivantes n'ont pas été trouvées: %SDonner à l'option -S la même valeur que celle donnée à l'option -s est inutileDonner à -S la même valeur que celle donnée à -s est inutileLes liens durs pour les répertoires ne sont pas supportésNombre de ligne à afficher à chaque fois : ERREUR INTERNE, MERCI DE RAPPORTER L'AFFICHAGE PRÉCÉDENT AU MAINTENEUR DU PROGRAMMESi vous voulez vraiment arrêter la création de l'archive, appuyez sur CTRL+C puis appuyez sur la touche entréeIgnorer l'erreur précédente concernant la ligne de commande utilisateur et continuer ?Les arguments supplémentaires sur la ligne de commande sont ignorésFichier sans le drapeau NODUMP ignoré : Option inconnue -%c ignoréeDans le fichier inclus %S : Position à laquelle insérer l'archive : Réponse incohérente du correspondantIncohérence dans le catalogue : duplication des données associées à une inode liée en durstructure incohérente du catalogue : les données liées en dur ne sont pas celles d'une inodestructure incohérente du catalogue : données non trouvée pour une inode ayant un lien durIncohérence dans le catalogue : drapeau d'état inconnu pour une inode liée en durEn-tête de tranche incohérent : taille de première tranche trop petiteIncohérence dans la structure de données gérant les fichiers à trou : marque inconnuelabel incompletIntervalle d'archive incorrecte pour la baseQuantième inconnuHeure incorrecteMinute incorrecteMois incorrectSeconde incorrecteIndex hors limitesProblème d'initialisation pour la bibliothèque liblzo2l'entrée ne peut être lueInspection d'un répertoire Entier trop grand pour libcurl, impossible de placer le curseur à la position demandée dans le répertoire distantvaleur nullptr donnée à "ptr"Texte de chiffrement invalideNuméro d'archive invalide : Numéro d'archive invalide : %dArgument invalide donné comme archive destinationArgument invalide donné comme archive sourceArgument invalide transmis à l'option -T, [:] est attenduValeur incorrecte du diviseur pour la détermination de la taille des block de signature deltaAlgorithm de hashage inconnu fourni à l'option -T : %svaleur nullptr donnée à "ptr"valeur nullptr donnée à "close_archive"Nombre invalide dans la chaîne : %SNuméro invalide : Chemin invalide, le chemin doit être relatifTaille invalide pour un CRCTaille invalide donnée via l'option -STaille invalide donnée via l'option -sCaractère large invalide dans la chaîne : Est un répertoire : %S %SEst un lien dur : %S %SEst une inode : %S %SEst un fichier : %S %SL'isolation avec signature delta est impossible en une seule opération (isolation à la volée)L'itérateur ne pointe pas sur des donnéesAlgorithme de dérivation de clef : %Sitérations pour dérivation de clef : %SUne clef ne peut être une chaîne videClef trouvée mais tous les utilisateurs n'ont pas un niveau de confiance suffisantClef généréeMODE RELAX : "%S" n'est pas un format d'archive valideMODE RELAX : %S a un en-tête corrompu, tentative de détermination des valeurs d'origine puis on continue si c'est possibleMODE RELAX : %S est manquant. Vous avez la possibilité de créer un fichier de taille nulle de ce nom pour remplacer le fichier manquant. Ceci génèrera bien sûr des messages d'erreurs à propos des informations manquantes dans cette tranche, mais au moins libdar pourra continuer. Peut on continuer maintenant ?MODE RELAX : %i %% restantMODE RELAX : %i n'est pas une valeur valide de pourcentageLa structure de répertoires de l'archive est corrompue, elle conduirait à placer des fichiers en dehors de sa racine. Restauration du contenu de tels répertoires à la racine et non en dehorsMODE RELAX : l'archive est marquée comme ayant des séquences d'échappement (ce qui est normal pour une version récente d'archive). Cependant, si ce n'est pas attendu, doit-on considérer qu'une corruption des données a eu lieu pour ce champ et ignorer cette marque ? (En cas de doute, refuser)MODE RELAX : L'archive est marquée comme ne possédant pas de séquence d'échappement ce qui n'est plus le cas par défaut depuis le format 8 (release 2.4.x). Si une corruption a eu lieu sur ce champ et qu'effectivement des séquences d'échappement sont présentes, ceci peut conduire à un échec de la restauration. Répondre non à la question suivante permettra d'insérer une couche de traitement des séquences d'échappement lors de la lecture malgré les indications présentes dans l'en-tête de l'archive. Voulez-vous continuer comme le suggère l'en-tête de l'archive, c'est-à-dire sans couche de gestion des séquences d'échappement ?MODE RELAX : L'archive semble chiffrée, mais aucun algorithme de chiffrement n'a été fourni, on suppose qu'il s'agit d'une corruption des données et qu'en fait l'archive n'est pas chiffréeMODE RELAX : L'archive semble n'être qu'un catalogue isolé (aucune donnée présente), Doit-on considérer qu'il s'agit plutôt d'une corruption et peut-on considérer que l'archive est une vraie archive ?MODE RELAX : Début de la recherche du catalogue (de la fin vers le début de l'archive pour %i %% de sa longueur), l'opération peut durer longtemps ..."MODE RELAX : erreur de CRC pour le catalogue, le contenu de l'archive est corrompu. Ceci peut même amener dar à voir des fichiers dans l'archive qui n'ont jamais existé, mais conduira plus probablement à d'autres échecs lors de la restauration des fichiers. Doit-on poursuivre malgré tout ?MODE RELAX : Impossible d'aller à la fin de l'archive ! On utilise la position actuelle comme point de départ de la recherche du catalogueMODE RELAX : Exception attrapée : MODE RELAX : Impossible de trouver un catalogue entier dans l'archive. Si vous possédez un catalogue isolé, arrêter à ce stade et utilisez-le en secours du catalogue interne. Sinon continuez mais soyez prévenus que toutes les données ne pourront pas être récupéréesMODE RELAX : Voulez-vous ignorer les tests de cohérence et tenter à nouveau la lecture du contenu de l'archive (ce qui peut prendre du temps et peut aussi échouer) ?MODE RELAX : Voulez-vous rechercher des morceaux du catalogue d'origine s'il en reste (ce qui peut prendre encore plus de temps et en tout état de cause, ne permettra au plus de récupérer que quelques fichiers) ?MODE RELAX : A cause d'une corruption probable des données, libdar ne peut déterminer la taille exacte des tranches. Pour des archives récentes, cette information est dupliquée dans chaque tranche. Voulez-vous essayer de lire une autre tranche pour récupérer cette information ?MODE RELAX : Erreur lors de la construction d'une entrée du catalogue, on ignore et on passe à la suivante. L'erreur rencontrée était : MODE RELAX : La séquence d'échappement n'a pas pu être trouvée, elle a soit été corrompue, soit elle est hors du périmètre de recherche dans l'archive. On essaye de trouver le catalogue par l'autre méthode, patience ...MODE RELAX : Séquences d'échappement présentes dans l'archive. Deux méthodes différentes sont disponibles pour localiser le catalogue : soit on recherche la séquence d'échappement indiquant le début du catalogue, soit on essaie les différentes positions tour à tour dans l'espoir que cette portion de l'archive ressemblera à la structure d'un catalogueMODE RELAX : Échec de lecture de la version du format de l'en-tête de l'archiveMODE RELAX : Échec de lecture du catalogueMODE RELAX : Échec de lecture du catalogue (0 octets de l'archive demandé pour sa recherche)MODE RELAX : Échec de lecture du catalogue (aucune donnée à inspecter)MODE RELAX : Bonne nouvelle ! La séquence d'échappement marquant le début du catalogue a été trouvée ! On essaie maintenant de le lire ...MODE RELAX : On continue malgré l'absence d'information à propos de la taille des tranchesMODE RELAX : En dépit de son nom, %S ne semble pas être une tranche d'archive, on suppose qu'une corruption des données a eu lieu et on continueMODE RELAX : Merci de fournir un nombre entier strictement positifMODE RELAX : Merci d'indiquer le format de l'archive : Vous pouvez utiliser la table à l'URL %s pour trouver la version de format en fonction de la release de dar/libdar (par exemple si l'archive a été crée en utilisant une version de dar de 2.3.4 à 2.3.7 indiquer "6" sans les guillemets) : MODE RELAX : Merci de fournir le numéro de la tranche à lire : MODE RELAX : Fin de la zone de recherche atteinte, ECHEC de la localisation du catalogueMODE RELAX : Le drapeau de tranche est corrompu, mais comme une tranche de numéro supérieur a déjà été vu, ce drapeau ne pouvait pas indiquer cette tranche comme la dernière de l'archive, on continueMODE RELAX : le catalogue (table des matières) occupe en générale quelques pourcentages de l'archive à sa fin. Quel pourcentage voulez-vous qu'il soit balayé pour sa recherche (répondre par un *entier* entre 0 et 100) ? MODE RELAX : La fin de l'archive est corrompue, impossible d'obtenir le contenu de l'archive (le "catalogue")MODE RELAX : On essaie de localiser la séquence d'échappement (conseillé) ? MODE RELAX : état de sauvegarde inattendu pour un objet de la classe "cat_detruit", corruption de données probable et ignorée. On continueMODE RELAX : état de sauvegarde inattendu pour une fin de répertoire, corruption des données probable et ignorée. On continueMODE RELAX : Un algorithme de compression inconnu a été utilisé, on suppose qu'une corruption des données a eu lieu. Merci de m'aider à déterminer le mode de compression qui a été utilisé en saisissant l'un des mots suivants "aucun", "gzip", "bzip2", "lzo" ou "xz" : MODE RELAX : Data inconnue dans un en-tête de tranche, on ignore et on continueMODE RELAX : Suivre le format d'archive "%d" ?LAX MODE: le hashage du catalogue ne correspond pas au hashage signé de l'archive, erreur ignoréeMODE RELAX : l'étiquette du catalogue ne correspond pas à celle de l'archive, comme s'il s'agissait d'un catalogue isolé, on suppose qu'une corruption a eu lieu et que le catalogue est une catalogue normal interne à l'archiveMODE RELAX : fermeture de la tranche %i, l'en-tête a pu être lu correctementMODE RELAX : la taille de la première tranche est impossible à lire (manque de mémoire virtuelle ?), on continue malgré tout ...MODE RELAX : la taille de la première tranche est impossible à lire, on continue malgré tout ... MODE RELAX : Entrée inconnue dans le catalogue, corruption de données probable qui interdit de lire plus avant le contenu de l'archive car aucune la longueur de cet enregistrement n'est pas connueMODE RELAX : entrée inconnue trouvée dans le catalogue, on considère qu'une corruption de données a eu lieu. Passage à l'entrée suivante, ce qui peut conduire à une mauvaise restauration de la structure de répertoires, si cette entrée corrompue était un répertoireMODE RELAX : le nom interne de la tranche laisse à penser qu'elle ne fait pas partie de la même archive. On suppose qu'il s'agit d'une corruption de données et on ignore cette erreurMODE RELAX : ouverture de la tranche %i afin de lire l'en-tête de trancheMODE RELAX : Impossible de lire la taille des tranches, (manque de mémoire virtuelle ?), on continue malgré tout ...MODE RELAX : les tranches %i et %i sont toutes les deux marquées comme la dernière de l'archive, on considère que la dernière est celle de numéro le plus élevéMODE RELAX : La lecture séqentielle demandée a besoin que l'archive contienne de séquences d'échappement ce qui ne semble pas être le cas ici. On suppose qu'il s'agit d'une corruption de données. Cependant, si tel n'est pas le cas, et si aucune séquence d'échappement n'est effectivement présente dans cette archive, il ne faut pas utiliser le mode de lecture séquentielle pour lire cette archive, aucune information pertinente ne pourrait en être extraiteManque de mémoirePas assez de mémoire SÉCURISÉE pour terminer l'opération, abandonManque de mémoire sécuriséePas assez de mémoire pour terminer l'opération, abandonFichier final : %i octetsLa bibliothèque de chiffrement pour l'algorithme blowfish ne respecte pas la RFC 3962Options de listing (à utiliser avec -l) : Chargement en mémoire du catalogue...Chargement en mémoire du catalogue isolé...Localisation du contenu de l'archive ...Options longues supportées : %s Recherche des archives contenant les versions les plus récentes des fichiers et tri des fichiers par archive à utiliser pour la restauration ...Nettoyage mémoire (libération de la mémoire utilisée par l'archive de référence) ...Le préfixe pour une mask_list doit être un chemin absolu ou commencer par la chaîne "" pour la fusion d'archivesÉchec de l'allocation mémoire : %sFusion/Filtrage des fichiers de l'archive %s ...Il manque l'une des options -c -x -d -t -l -C -+, tapez '%S -h' pour l'aideIl manque un [ après un } dans la condition suivante : Il manque un ] dans la condition suivante : Il manque le nom de base de l'archive, voir l'option -h pour l'aideArgument manquant pour --backup-hook-executeIl manque un argument à --deltaIl manque un argument à --hashIl manque un argument à --min-digitsIl manque un argument à l'option -EPas d'argument à l'option -EIl manque un argument à l'option -FIl manque un argument à l'option -SIl manque un argument à -^Pas d'argument à l'option -iPas d'argument à l'option -oIl manque un argument à -sPas assez d'arguments sur la ligne de commande, abandonPas de catalogue dans le fichier.Données absentes pour le chiffrement asymétrique des donnéesil manque des données initialiser un objet TLVDéfaut de données lors de la lecture d'un object slice_layoutAucun support pour les algorithmes de hashage (partie intégrante du support pour le chiffrement fort, grâce à libgcrypt)Pas d'argument à l'option -%cIl manque la source ou la destination, voir l'option -h pour l'aideAbsence de support pour le chiffrement fort (libgcrypt)Il manque un } dans la condition suivante : NONAUCUN FICHIER DE CE RÉPERTOIRE NE PEUT ÊTRE SAUVÉ.une FONCTIONNALITÉ NON ENCORE IMPLÉMENTÉE a été sollicitée :Il est nécessaire d'activer la détection des fichiers creux afin de calculer la signature delta pour le fichier creux %SNombre négatif ou nul non autorisé pour le déplacement d'une archive au sein de la baseNouveau numéro pour l'archive %d : Nouveau nom pour la base de données : Nouveau chemin de l'archive %d : Aucun attribut spécifique au système de fichier avec lequel comparerAucune action spécifiée, abandonAucune tranche n'est présent dans %S pour l'archive %S, merci de fournir la dernière tranche de l'archive.Aucune couche de chiffrement ouverteAucune couche de chiffrement n'a été ouverte. Ajout d'une couche de cache pour améliorer les performancesAucune donnée trouvée dans cette archive, lecture séquentielle du catalogue situé à la fin de l 'archive...Aucune donnée à déchiffrerPas de base de donnée spécifiée, abandonAucune exception n'est autorisée en provenance d'une fonction de rappel de libdarAucun fichier de ce répertoire ne sera pris en compte pour la fusion.Aucun fichier de ce répertoire ne sera restauré.Aucun masque disponibleAucun masque sur lequel opérer dans la liste de masqueAucun masque sur lequel opérer dans la liste de masqueAucun problème rencontréAucun destinataire avec une clef valide ne demeure, le chiffrement est impossible, arrêt de l'opérationAucun signataire avec une clef valide ne demeure, la signature de l'archive est impossible, arrêt de l'opérationAucune information de découpage en tranche n'est disponible à propos de l'archive dont a été isolé le présent catalogue. Impossible de localiser les fichiers au sein de l'archive, arrêt des opérations.Plus d'espace disponibles pour les inodes, vous avez la possibilité d'en faire maintenant. Quand ce sera fait, pouvons-nous continuer ?Plus d'espace disque disponible, vous avez la possibilité d'en faire maintenant. Quand ce sera fait, pouvons-nous continuer ?Aucun terminal n'a été trouvé pour interagir avec l'utilisateur. On considérera une réponse négative à toutes les questions posées à l'utilisateur (choix le moins destructeur), ce qui la plupart du temps arrêtera le programme.Aucune cible utilisateur trouvée sur la ligne de commandeAucune clef de chiffrement valide n'a pu être trouvée pour %SAucune clef de signature valide n'a pu être trouvée pour %SArchive inexistante dans la baseFichier inexistant dans la baseFichier inexistant : Erreur non fatale lors de l'écriture des données, on essaie à nouveau Non SauvéLa chaîne lue depuis un fichier n'est pas terminée par le caractère zéroPas assez de données pour initialiser le champ "storage"Pas d'affectation du FSA extX IMMUTABLE pour %s à cause du défaut de privilège pour le fairePas d'affectation du FSA extX SYSTEM RESOURCE pour %s à cause du défaut de privilège pour le faireRéalisation de l'isolation à la volée ...OK OK OK, on garde %S comme nom de baseOUUne seule option -@ est permiseUne seule option -A est permiseUne seule option -R est permiseUne seule option -S est permiseUne seule option -s est permiseOuverture d'une paire de tubes pour lire l'archive, dar_slave doit être utilisé à l'autre extrémité de ces tubes ...Ouverture et lecture du fichier de traitement par lot...Ouverture de l'archive %s ...Ouverture d'une couche de construction ...Ouverture d'une couche de chiffrement ...Ouverture d'une couche de gestion des séquences d'échappement ...Ouverture du tube nommé %S comme source pour lire l'archive ...Ouverture de l'entrée standard pour lire l'archive ...Ouverture de l'archive de référence %s pour récupérer le catalogue isolé ...Ouverture de l'archive par la couche d'abstraction des tranches...Ouverture de la couche d'abstraction gérant la compression (aucun algorithme de compression utilisé)...Ouverture de la couche de d'abstraction gérant la compression ...Options : Index hors limite demandé pour un secu_stringÉcrasement interdit alors qu'une tranche de même nom de base a été trouvée dans le répertoire %s. Opération avortéeErreur de syntaxe en ligne de commande (ou dans un fichier inclus) : Erreur de syntaxe : Erreur de syntaxe : `%c' sans correspondanceRequête partielle reçue, opération abandonnée Phrase secrète requise pour la clef %s : Le patch ne correspond pas à son CRC, l'archive est corrompueChemin de dar (chemin vide pour l'utilisation de la variable PATH) : L'autre extrémité du tube est inconnue, impossible de fermer un quelconque descripteur de fichier pointant dessusL'autre extrémité du tube est inconnue, impossible d'en fournir un descripteur de fichierMerci de répondre avec un des caractères entre crochets et appuyer sur "entrée"Merci de vérifier la documentation et/ou de mettre à jour ce logicielMerci de vérifier votre matérielMerci de confirmer le mot de passe : Merci de fournir le mot de passe pour le login %S sur l'hôte %S : La taille de l'archive ne tient pas compte des en-têtes de chaque trancheBogue probable dans liblzo2 : lzo1x_*_compress a retourné un code non prévu : %dTraitement des fichiers pour la sauvegarde ...Traitement des fichier pour la réparation ...Traitement des fichier pour la fusion ...Le programme a été arrêté pour la raison suivante : Fin de fichier atteinte pendant la lecture de la structure de données header_versionFin de fichier atteinte pendant la lecture de la version d'archiveFin de fichier atteinte, aucune donnée correcte n'a pu être trouvée après la dernière erreur Fin de fichier atteinte alors que toutes les données n'ont pu être luesFin de fichier atteinte lors de la lecture d'un en-tête de tranche d'archiveFin de fichier atteinte lors de la lecture de la version d'archiveLecture sur un "generic_file" ouvert en écriture seuleLecture par anticipation sur un "generic_file" en écriture seuleLecture du contenu de l'archive ...Lecture du catalogue de l'archive à ajouter ...Options de lecture (à utiliser avec -x, -d, -t, -l, -A) : Lecture de l'en-tête de l'archive ...La lecture de l'archive de référence à partir d'une tube ou de l'entrée standard n'est pas possibleLecture de la fin de l'archive ...Prêt à commencer l'écriture de l'archive ?Signal reçu : %dSignal %s reçul'ordre ordre spécial reçu est inconnuEnregistrement de lien dur dans l'archive : Le "storage" de référence de l'itérateur est vide ou non existantSuppression du drapeau immutable afin de restorer les données de %SSuppression du fichier %sSuppressin du fichier (fichier marqué comme supprimé dans l'archive) : %SSuppression du fichier (politique d'écrasement) : %SSuppression des informations de la base ...Suppression du fichier sale %SRemplacement de %s dans l'option -R par le répertoire pointé par ce lien symboliqueSauve à nouveau mais sans compression: Remettre à zéro le processus de lecture séquentielle alors qu'il n'est pas terminé, rendra inaccessible toute les données non lues jusque làRestauration des EA de %S arrêtée : Restauration des FSA de %S arrêtée : Restauration du FSA immutable pour %S arrêtée : Restauration des Attributs Étendus du fichier : Restauration des FSA du fichier : Restauration des données du fichier à l'aide de delta patch : Restauration des données du fichier : Restauration des FSA du fichier Options de restauration (à utiliser avec -x) : Lancer un fichier de traitement par lot à partir d'un autre fichier de traitement par lot n'est pas autoriséeAUTO-CONTROL : AU MOINS UNE FILE D'EXÉCUTION N'A PAS ÉTÉ DÉTRUITE ET RESTE EN MÉMOIRE ALORS QUE LE PROGRAMME TOUCHE À SA FINAVERTISSEMENT DE SECURITÉ ! FICHIER SUSPECT %S : la date ctime a changé depuis que l'archive de référence a été faite, alors qu'aucune autre propriété de cette inode n'a changéIL SEMBLE QU'IL Y AIT UN PROBLÈME MATÉRIEL :SAUTÉ (lien dur en mode sequentiel) : Taille du grain de sel : %d octet%cDonnées identiques mais CRC non vérifié, car sa largeur n'a pas été devinée correctement (restriction due à la lecture sequentielle)Même donnée, mais le CRC enregistré ne correspond pas à ces données !?!SauvéSauvegarde des Attributs Étendus de Sauvegarde des attributs spécifiques au système de fichier Étendus pour Options de sauvegarde/d'isolation/fusion/réparation (à utiliser avec -c, -C, -+ ou -y) : Le chiffrement "scramble" est très faible. C'est un non-sens de l'utiliser avec le chiffrement asymétriqueUne chaine sécurés ne peut être saisie que depuis un terminalVoir la page de manuel pour plus d'options. Lecture séquentielle demandée, mais cette archive est marquée comme ne possédant pas de séquence d'échappement nécessaire pour cette opération. Opération abandonnéeMarques pour lecture séquentielle : %sLa lecture sequentielle d'une archive est impossible quand une comparaison des deltas binaires est demandée, l'archive doit être lue en mode direct (mode par defaut)taille trop grande pour un tampon élastiqueSaut effectué (manque %.0f octet(s)), données correctes trouvées, on continue la copie... La tranche %S possède un drapeau inconnu (ni terminal ni non-terminal).La taille de tranche est trop petite pour être en mesure de contenir ne serait-ce que l'en-tête de trancheLe découpage en tranches (option -s) n'est pas compatible avec la production de l'archive sur la sortie standard ("-" en nom d'archive)Des comparaisons de fichier ont échouéCertains fichiers sont corrompus dans l'archive et ne pourront pas être restaurésCertains fichiers ne sont pas rangés par ordre chronologique en suivant des indices croissants d'archive au sein de la base de donnée, ceci peut amener dar_manager à ne pas restaurer la bonne version d'un de ces fichiersDésolé, la taille du fichier est inconnue à cette étape du programmeFichier à trou : %S %SDébut de la copie de %u octet(s)Soustraction d'un "infinint" par un autre plus grand que le premier. Un "infinint" ne peut pas être négatifClef de chiffrement symmétrique : %SErreur de syntaxe dans l'argument fourni à l'option --retry-on-change : Erreur de syntaxe dans le fichier de traitement par lot : %SErreur de syntaxe dans le fichier de traitement par lot : option -C non autoriséeErreur de syntaxe dans le fichier de traitement par lot : option -i non autoriséeErreur de syntaxe dans la politique d'écrasement : VRAIL'utilisation du standard de marquage des répertoires de cache n'est utile que lors d'une sauvegardeL'archive %S est chiffrée mais aucune algorithme de chiffrement n'a été donné. Ouverture de l'archive impossible.L'archive et le catalogue isolé ne correspondent pas aux mêmes données, ils sont donc incompatibles entre euxLe nom de base '+' est réservé pour un usage particulier qui n'a aucun sens dans ce contexteLe catalogue sera construit au fil de la lecture séquentielle de l'archive. Préparation de la structure de données ...L'entrée dans laquelle poursuivre la récursion n'existe pas. Impossible d'ajouter celle-ci à un répertoire absentLes commentaires utilisateur suivants seront placés en clair dans l'archive : %SLa version du format ce cette archive est trop grand pour cette version d'application. On essaye quand même lire cette archive ?La version du format de cette base de données et trop élevé pour cette version de logiciel. Utiliser une version du logiciel plus récente pour lire ou modifier cette base de donnéeLe chemin %s doit être un répertoire (ou un lien sur un répertoire)La taux de compression global est : L'itérateur n'indexe pas l'objet sur lequel il lui a été demandé de lireL'itérateur n'indexe pas l'objet sur lequel il lui a été demandé d'écrireL'itérateur n'indexe pas l'objet pour lequel il a été définiLa position de l'itérateur n'est pas contenue dans le "storage" de référenceLe fichier juste restauré %S a été marqué comme sale (en lecture séquentielle cet état ne peut être détecté qu'après la restauration du fichier), voulez-vous supprimer le fichier sale qui vient juste d'être restauré ?Le fichier juste restauré %S a été marqué comme sale (en lecture séquentielle cet état ne peut être détecté qu'après la restauration du fichier), suppression du fichier sale qui vient juste d'être restauré, puisqu'il a été demandé d'ignorer ce type de fichierLe dernier fichier (tranche) de l'archive n'est pas présent dans La taille totale de l'archive inclut %i octet(s) gaspillé(s) à cause d'un mauvais taux de compressionL'option de "photographie" (-A +) n'est disponible qu'avec l'option -cLes deux mots de passes ne sont pas identiques. AbandonCette archive ne contient qu'un catalogue isolé et ne peut pas être utilisée pour cette opération. Elle ne peut servir que de référence pour une sauvegarde incrémentale/décrémentale ou comme secours du catalogue de l'archive d'origineCette archive est constituée de plusieurs tranches et ne peut pas être lue depuis un tubeCette archive n'est pas exploitable, vérifier la documentation pour plus d'informationsCette archive n'est pas exploitable, vérifier la documentation de l'API pour plus d'informationsCette archive est constituée de plusieurs tranches et ne peut pas être lue depuis un tubeCette base de donnée a été corrompue probablement à cause d'un bogue présent dans les versions 2.4.0 à 2.4.9, et il n'a pas été possible de corriger cette corruption. Merci de reconstruire cette base à partir des archives (ou catalogues extraits). Si cette base n'a jamais été utilisée avec l'une quelconque des versions mentionnées précédemment, merci d'ouvrir un rapport de bogue en fournissant le plus de détails concernant les circonstances de ce problème.Ceci est une ancienne archive, elle doit être ouverte en lisant d'abord la première trancheDemande d'arrêt de file d'exécution reçue, on termine proprement l'opération en coursDemande d'arrêt de file d'exécution reçue, arrêt immédiatAfin de calculer les signature delta des fichier creux sauvés, vous devez activer la redétection des fichiers creux lors de cette opération de fusionPour terminer entrer une ligne vide L'utilisation de --sparse-file-min-size lors requière l'utilisation conjointe de l'option -ah, voir la page de manuel pour plus de détailsBloc de données compressées trop grand : Soit à cause d'une corruption de données soit à cause d'une limitation système où la valeur de SSIZE_MAX implique un tampon mémoire plus petit que celui requisValeur trop grande pour l'intération de dérivation de clefTrop d'arguments sur la ligne de commande, voir l'option -h pour l'aideTrop de lignes dans le fichier %S (dépassement d'entiers)Version trop ancienne pour libgcrypt, la version minimale requise est %sTentative d'écriture hors d'un fichier de taille limitéeDeux tranches différentes %i et %i sont toutes les deux marquées comme la dernière de l'archive !Taper "man dar" pour plus de détails et pour connaître toutes les autres options disponibles. Parenthèses non équilibrées correctement dans l'expression suivante : Réponse incomplète reçue du correspondantArchive incomplète ! On considère qu'elle a été interrompue lors du processus de sauvegarde. Si une erreur a été reportée juste avant, on peut l'ignorer, il s'agit du fichier qui était en cours de sauvegarde au moment de l'interruption.Valeur inattendue pour un FSA boolean, une corruption de données est peut-être survenueRéponse inattendue de l'esclave, problème un de communication ou un bogue risque de suspendre l'opérationFin de fichier inattendueErreur inattendue retournée par GPGME : Erreur inattendue reçue lors de l'attente de la fin d'execution de dar : Exception non attendue levée par libdarValeur inattendue trouvée dans la baseValeur inattendue pendant la lecture de la version d'archiveType d'enregistrement inconnuArgument inconnu donné à -2 : %sArgument inconnu donné à -a : %sArgument inconnu donné à -k : %sArgument inconnu donné à -w : Opérateur unitaire inconnu ou non autorisé avec un argument : Caractère inconnu trouvé lors de l'analyse de la chaîne conditionnelle suivante : Choix inconnu Choix inconnu : Algorithme de chiffrement inconnuAlgorithme de chiffrement inconnuEntrée inconnue trouvé dans l'en-tête de tranche (type = %d), option non supportée. L'archive lue peut avoir été crée par une version plus récente de libdar, est-ce qu'on ignore cette entrée et on continue ?Type de séquence d'échappement inconnueExpression inconnue trouvée lors de l'analyse de la chaîne conditionnelle suivante : Expression inconnue dans la politique d'écrasement : Type de fichier inconnu ! Le nom du fichier est : Groupe inconnuEn-tête inconnu dans la base de donnée, abandon Macro %%%d inconnue dans le cadre de commentaire utilisateurOption inconnue -%cParamètre inconnu donné à l'option --delta : Paramètre inconnu donné à l'option --hash : Paramètre inconnu donné à l'option --modified-data-detection : Paramètre inconnu donné à l'option -a : La politique '%c' est inconnue pour les AE dans l'expression %SLa politique '%c' est inconnue pour les données dans l'expression %SProtocole inconnu : %SType d'enregistrement inconnuChaîne de substitution inconnue : %Suffixe inconnu [%c] dans la chaîne %SUnité de temps inconnueUtilisateur inconnuAjout des informations issues du catalogue à la base de donnée...Décision de l'utilisateur requise pour les AE du fichier %SDécision de l'utilisateur requise pour les FSA du fichier %SDécision de l'utilisateur requise pour les données de %SCommentaires de l'utilisateur : %SCible(s) utilisateur(s) trouvée(s) en ligne de commande ou dans un fichier inclus :La cible utilisateur "%s" n'est pas autorisée (cible réservé pour la syntaxe conditionnelle)le mode de lecture séquentiel n'est pas possible lors de la fusion d'archiveUtilisation du découpage fourni par l'utilisateur (première tranche = %i octets, autres tranches = %i octets)ATTENTION ! Fichier modifié pendant sa sauvegarde, plus de nouvelle tentativeATTENTION ! Fichier modifié lors de la lecture pour sa sauvegarde. Pas de nouvelle tentative pour ce fichier afin de de pas dépasser la taille maximale de gaspillage autorisée. Le fichier est ATTENTION ! Fichier modifié pendant sa sauvegarde. Tentative %i sur %iATTENTION ! Signature incorrecte trouvée pour l'archive, on continue malgré tout ?ATTENTION ! L'archive se trouve dans un des répertoires à sauvegarder, cela peut créer une boucle sans fin lorsque l'archive essaiera de se sauvegarder elle-même. Il est soit possible d'ajouter -X "%S.*.%S" sur la ligne de commande ou bien il faut changer l'archive de place (voir l'option -h pour l'aide). Voulez-vous vraiment continuer ?ATTENTION ! Ceci est un catalogue isolé, aucune donnée ou AE n'est présent dans cette archive, seule la structure du catalogue peut être validéeATTENTION : le support pour la mémoire sécurisé n'a pas été disponible lors de la compilation. En case d'utilisation importante de la mémoire, le mot de passe sur le point d'être fourni peut être écrit sur disque en clair (fichier d'échange). Vous avez été prévenu !ATTENTION : le support pour la mémoire sécurisé n'a pas été disponible lors de la compilation. En cas d'utilisation importante de la mémoire, les mots de passe peuvent être écrit sur disque en clair (fichier d'échange). Vous avez été prévenus !Attention ! Le support des FSA %s n'a pas été activé lors de la compilation. Ces attributs ne peuvent être restaurés pour %sAttention ! Aucun fichier de ce répertoire ne sera restauré : Attention ! Aucun fichier de ce répertoire ne sera pris en compte pour la fusion : Attention, %S semble être un nom de fichier plutôt qu'un nom de base. Souhaitez-vous le remplacer par %S ?Attention, êtes-vous sûr de vouloir arrêter (répondre "%S" pour confirmer)?Attention, l'archive %S a été chiffrée. Une mauvaise clef est impossible à détecter et conduira libdar a signaler l'archive comme corrumpueAttention: le support des FSA %s/%s n'a pas été activé lors de la compilation. Impossible de le restaurer pour l'inode %sAttention : échec de lecture pour %S : Attention: L'option -G est expérimentale et non supportée. Lire la page de manuel à propos de cette option pour plus d'informationNous utilisons une version incompatible de libdar. La version attendue est %d.%d.x alors que la version actuellement utilisée est %d.%d.%dCe qui correspond à %.2f %% de la quantité totale de donnés Pire Ecriture du contenu de l'archive ...Ecriture de la fin d'archive ...Ecriture de l'en-tête de l'archive ...Ecriture du premier terminateur d'archive...Ecriture du tampon élastique initial via la couche de chiffrement ...Ecriture du second terminateur d'archive ...Ecriture de la signature du catalogue..."Écriture sur un "generic_file" ouvert en lecture seuleOUIZéro n'est pas une taille valide de tampon élastique[ ][Attrib.][ Sauvé ][SALE ][Data ][D][ EA ][FSA][compr][S]| Accès |Proprio|Groupe | Taille | Date | Nom de fichier[EnRef][Inode][Sauvé]Un chiffre binaire est soit 0 soit 1stopabsentabsent une chaîne vide n'est pas un argument valideajout seulajout de données après la fin d'un secure_memoryajout de données après la fin d'un secure_memorynum archive | chemin | nom d'archiveatime, date d'accès aux donnéesLa fonctionnalité d'action de sauvegarde (options-< -> et -=) n'est disponible que lors de la sauvegarde et sera ignorée iciinode mal-formée : drapeau d'inode inconnuinode mal-formée : drapeau de FSA inconnuNota bene : une erreur de CRC sera signalée pour les EA de ce fichier lors d'une lecture séquentielle de l'archive réparéeNota bene : une erreur de CRC sera signalée pour les FSA de ce fichier lors d'une lecture séquentielle de l'archive réparéegrosfichiers patchés : %ifichier spécial blocSupport pour le chiffrement avec l'algorithme blowfishconstruction de signature delta avec des blocks de %d octetsbzip2Marque pour signature delta introuvableNe peut lire le CRC sur les données : aucune séquence d'échappement trouvée pour ce fichierne peut pas évaluer une chaîne vide comme politique d'écrasementimpossible de fournir des données pour un fichier non sauvegardé'none' n'est pas une algorithme de hashage valide pour la fonction de dérivation de clefinsensible à la cassesensible à la cassefichier spécial caractère"compressReset appelé alors que "compressInit" n'a jamais été invoqué au préalablecompresséerreur de CRC sur les données compresséesdonnées compressées corrompuesdonnées compressées corrompuesla compression (option -z) ne peut être modifiée avec l'option -ycompression : %Sfichier corrompudate de créationctime, date de modification des métadonnées d'inodeoptions de dar : %Schemin de dar : %Scorruption de donnée rencontrée lors de la lecture de la taille des blocks de la signature deltaCorruption de données détectée : incohérence dans les données compressées en LZOCorruption de données détectée : Bloc de données compressées trop granddonnées purgées, l'objet est maintenant videVersion B.de D. : %Sles dates antérieurs à l'an 1970 ne sont pas permisesentrée suppriméeinformation sur les entrées suppriméesles fichiers spéciaux n'ont pas le même numéro majeur : %d <--> %dles fichiers spéciaux n'ont pas le même numéro mineur : %d <--> %ddifférence de date de dernière modification : %S <--> %SAttributs Étendus différentsattributs spécifiques au système de fichier différentsles donnes ne correspondent pas: position de la première différence au sein du fichier: %itypes de fichier différentsgroupes propriétaires différents (uid) : %i <--> %igroupes propriétaires différents (gid) : %i <--> %idroits d'accès différents: %S <--> %Srépertoirerépartition des inodesinode Porteretiré de l'archive et marqué comme déjà sauvéStructure incohérent du tampon élastiqueErreur lors du test de l'archive : %SErreur de création de la prise Unix %s: chemin trop long, la prise sera crée comme %s, OK ?Erreur !l'exécution de [ %S ] a retourné un code d'erreur : %dexecve() a échoué. (table de processus pleine ?)execvp() a échoué mais n'a pas retourné de code d'erreurEchec d'ouverture de la tranche %S: %S. Nouvelle tentative avec écrasement préalable, si autoriséEchec de lecture du CRC depuis le fichier : fauxle fichier existe et l'option contre l'écrasement a été activée.Fichier non ouvertpremièreinode sauvegardée : %ivaleur de GID trop grande pour ce système pour que libdar puisse le restaurer correctementgzipinode liée en durinformation sur les liens dursmode en-tête seul demandé"hide_file" ne peut pas être initialisé avec un fichier en écriture seuleoption -O ignorée car inutile dans cette situationimmutabledans le fichier %S ligne %Sstructure incohérente du catalogueincohérence à la suite d'une séquence d'échappement : impossible de lire le label interne du jeu de donnéesboutisme incohérent entre U_16 et U_32boutisme incohérent entre U_16 et U_64boutisme incohérent entre U_16 et U_UEn-tête de tranche incohérent : taille de tranche trop petiteversion incompatible de la bibliothèque de compression ou fonctionnalité requises incompatible aveec cette bibliothèqueNom incomplet de jeu de données rencontré dans un en-tête de trancheinode date de dernier changement de l'inode (ctime) plus récent, les Attributs Étendus peuvent être différentsdate de dernier changement de l'inode (ctime) plus récent, les FSA peuvent être différentsmétadonnée seulement : %iDrapeau de famille FSA invalidenature de FSA invalidechiffre décimal non valideAlgorithme de hashage invalide pour le fonction de dérivation de clefLe champ de famille de FSA a une longueur invalidele champ de nature de FSA a une longueur invalideL'argument fourni à l'option -9 est un nombre invalide : %sIl semble qu'il y ait un bogue icijournalisédernierLe dernier caractère de la ligne de commande utilisateur à exécuter est '%', (utiliser '%%' à la place pour éviter ce message)libcurl interdit le changement de position en mode écritureEchec d'initialisation de libcurl : %slibgcrypt n'a pas été initialisée et libdar n'est pas autorisé à le faireversion de libgcrypt < %s. Cette version de ligcrypt a un bug qui conduit les calculs de hash md5 et sha1 à être faux pour des fichiers plus grands que 256 Gio (gibioctet), voulez-vous vraiment perdre du temps à calculer ces hash inutiles ?libthreadar est nécessaire pour une execution en parallèleune ligne excède le maximum de %d caractères lors du listing du fichier %S, Abandon de l'opération. La ligne concernée commence par : %sla méthode listing() doit être fourniepetitlzocompression lzomoifusionné avec écrasement possiblela taille minimale devrait être supérieure ou égale à la taille maximale des block de signature deltamanque de données pour construireil manque des données pour construire un répertoireil manque des données pour construire un fichier spécialdonnées absentes pour construire une inodeabsence de nom de fonctionAbsence du champ multiplicateurmtime, heure de modification des donnéestube nommésuivantenonaucun Attribut Étendu avec lequel compareraucun Attribut Étendu avec lequel comparerpas de mise à jour de atimeaucun répertoire courant définiaucun répertoire courant défini pour la lecturedrapeau no dumppas d'ajout en finLa fonctionnalité --nodump n'a pas été activée lors de la compilation, cette option n'est donc pas disponibleaucunPas assez de mémoire pour écrire le tampon élastiquetailles différentes : %i <--> %inon dernierValeur nullptr donnée à tools_readlink()l'argument nullptr a été transmis à user_interaction_callback()écrasépatch fichier pleinmerci de répondre avec un unique caractèreMerci d'indiquer l'algorithme de hashage à utiliser pour la fonction de dérivation de clef '1' pour sha1, '5' pour sha512, 'm' pour md5 ou 'q' pour interrompre: présentprésent le protocole %S n'est pas supporté par libcurl, arrêt de l'opérationle mot de passe fourni est trop grand pour la mémoire allouéeréactivation des gestionnaires de signaux et poursuite des opérations Fin de fichier atteinte alors que toutes les données n'ont pu être lueslecture et écriturelecture seulesupprimésupprimé supprimé de l'archiveremplacéla racine n'a pas de répertoire pèrela racine n'a pas de répertoire pèrela racine n'a pas de répertoire pèreLa racine de l'entrepot doit être une chemin absolu: sauvé scrambling (chiffrement faible)deuxièmeLa requète de génération d'une chaîne sécurisée aléatoire dépasse sa capacité de stockagesuppression sécuriséeL'affichage focalisé sur les tranches est impossible en lecture séquentiellelien symboliquele lien symbolique ne pointe pas sur la même cible: répertoire synchronemise à jour synchronesystem() a échoué : La fonctionnalité de fusion sans décompression/recompression n'est pas disponible avec des archives utilisant différents algorithmes de compression (Ce sera pour une prochaine version de dar). Vous pouvez cependant fusionner ces deux archives avec décompression/recompression et aurez probablement intérêt à compresser (option -z ou -y) l'archive résultanteTampon élastique trop grand ou de structure incohérenteValeur trop grande fournie comme taille minimale de blockssommet de hiérarchie de répertoiresnombre total d'inode : %ivraile type %s n'est ni grand-boutiste ni petit-boutiste ! Impossible de savoir comment gérer ce type d'entier d'une manière portable sur ce système.valeur d'UID trop grande pour ce système pour que libdar puisse le restaurer correctementinsuppressibleArgument supplémentaire inattenduprise unixFamille d'attributs FSA inconnue : compression inconnuealgorithme de compression inconnue : %Salgorithme de chiffrement inconnu : entrée inconnuele caractère '%c' ne correspond à aucun algorithme de hashage connuNom de fonction inconnu pour le calcul de la taille de block des signatures deltatype de donnée inconnu dans le catalogueusage : %s usage: %s [ -c | -x | -d | -t | -l | -C | -+ ] [/] [options...] utilisateur/groupe non supporté pour ce dépôtUn algorithme de hashage valide est requis pour la fonction de dérivation de clefl'option -A est obligatoire quand -+ est utilisél'option -A est obligatoire quand -C est utiliséécriture seuleEcriture du tampon élastique terminal via la couche de chiffrement ...Mauvaise valeur donnée à "initial_size" lors de l'initialisation du cachexzoui|Etat de signature| Etat de la clef | Emprunte de la clef | Date de signaturedar-2.6.8/po/Rules-quot0000644000175000017520000000423313617552277011662 00000000000000# This file, Rules-quot, can be copied and used freely without restrictions. # Special Makefile rules for English message catalogs with quotation marks. DISTFILES.common.extra1 = quot.sed boldquot.sed en@quot.header en@boldquot.header insert-header.sin Rules-quot .SUFFIXES: .insert-header .po-update-en en@quot.po-create: $(MAKE) en@quot.po-update en@boldquot.po-create: $(MAKE) en@boldquot.po-update en@quot.po-update: en@quot.po-update-en en@boldquot.po-update: en@boldquot.po-update-en .insert-header.po-update-en: @lang=`echo $@ | sed -e 's/\.po-update-en$$//'`; \ if test "$(PACKAGE)" = "gettext-tools" && test "$(CROSS_COMPILING)" != "yes"; then PATH=`pwd`/../src:$$PATH; GETTEXTLIBDIR=`cd $(top_srcdir)/src && pwd`; export GETTEXTLIBDIR; fi; \ tmpdir=`pwd`; \ echo "$$lang:"; \ ll=`echo $$lang | sed -e 's/@.*//'`; \ LC_ALL=C; export LC_ALL; \ cd $(srcdir); \ if $(MSGINIT) $(MSGINIT_OPTIONS) -i $(DOMAIN).pot --no-translator -l $$lang -o - 2>/dev/null \ | $(SED) -f $$tmpdir/$$lang.insert-header | $(MSGCONV) -t UTF-8 | \ { case `$(MSGFILTER) --version | sed 1q | sed -e 's,^[^0-9]*,,'` in \ '' | 0.[0-9] | 0.[0-9].* | 0.1[0-8] | 0.1[0-8].*) \ $(MSGFILTER) $(SED) -f `echo $$lang | sed -e 's/.*@//'`.sed \ ;; \ *) \ $(MSGFILTER) `echo $$lang | sed -e 's/.*@//'` \ ;; \ esac } 2>/dev/null > $$tmpdir/$$lang.new.po \ ; then \ if cmp $$lang.po $$tmpdir/$$lang.new.po >/dev/null 2>&1; then \ rm -f $$tmpdir/$$lang.new.po; \ else \ if mv -f $$tmpdir/$$lang.new.po $$lang.po; then \ :; \ else \ echo "creation of $$lang.po failed: cannot move $$tmpdir/$$lang.new.po to $$lang.po" 1>&2; \ exit 1; \ fi; \ fi; \ else \ echo "creation of $$lang.po failed!" 1>&2; \ rm -f $$tmpdir/$$lang.new.po; \ fi en@quot.insert-header: insert-header.sin sed -e '/^#/d' -e 's/HEADER/en@quot.header/g' $(srcdir)/insert-header.sin > en@quot.insert-header en@boldquot.insert-header: insert-header.sin sed -e '/^#/d' -e 's/HEADER/en@boldquot.header/g' $(srcdir)/insert-header.sin > en@boldquot.insert-header mostlyclean: mostlyclean-quot mostlyclean-quot: rm -f *.insert-header dar-2.6.8/po/Makevars0000644000175000017520000000657613617552301011353 00000000000000# Makefile variables for PO directory in any package using GNU gettext. # Usually the message domain is the same as the package name. DOMAIN = $(PACKAGE) # These two variables depend on the location of this directory. subdir = po top_builddir = .. # These options get passed to xgettext. XGETTEXT_OPTIONS = --keyword=_ --keyword=N_ # This is the copyright holder that gets inserted into the header of the # $(DOMAIN).pot file. Set this to the copyright holder of the surrounding # package. (Note that the msgstr strings, extracted from the package's # sources, belong to the copyright holder of the package.) Translators are # expected to transfer the copyright for their translations to this person # or entity, or to disclaim their copyright. The empty string stands for # the public domain; in this case the translators are expected to disclaim # their copyright. COPYRIGHT_HOLDER = Free Software Foundation, Inc. # This tells whether or not to prepend "GNU " prefix to the package # name that gets inserted into the header of the $(DOMAIN).pot file. # Possible values are "yes", "no", or empty. If it is empty, try to # detect it automatically by scanning the files in $(top_srcdir) for # "GNU packagename" string. PACKAGE_GNU = # This is the email address or URL to which the translators shall report # bugs in the untranslated strings: # - Strings which are not entire sentences, see the maintainer guidelines # in the GNU gettext documentation, section 'Preparing Strings'. # - Strings which use unclear terms or require additional context to be # understood. # - Strings which make invalid assumptions about notation of date, time or # money. # - Pluralisation problems. # - Incorrect English spelling. # - Incorrect formatting. # It can be your email address, or a mailing list address where translators # can write to without being subscribed, or the URL of a web page through # which the translators can contact you. MSGID_BUGS_ADDRESS = http://sourceforge.net/tracker/?group_id=65612MSGID_BUGS_ADDRESS =atid=511612 # This is the list of locale categories, beyond LC_MESSAGES, for which the # message catalogs shall be used. It is usually empty. EXTRA_LOCALE_CATEGORIES = # This tells whether the $(DOMAIN).pot file contains messages with an 'msgctxt' # context. Possible values are "yes" and "no". Set this to yes if the # package uses functions taking also a message context, like pgettext(), or # if in $(XGETTEXT_OPTIONS) you define keywords with a context argument. USE_MSGCTXT = no # These options get passed to msgmerge. # Useful options are in particular: # --previous to keep previous msgids of translated messages, # --quiet to reduce the verbosity. MSGMERGE_OPTIONS = # These options get passed to msginit. # If you want to disable line wrapping when writing PO files, add # --no-wrap to MSGMERGE_OPTIONS, XGETTEXT_OPTIONS, and # MSGINIT_OPTIONS. MSGINIT_OPTIONS = # This tells whether or not to regenerate a PO file when $(DOMAIN).pot # has changed. Possible values are "yes" and "no". Set this to no if # the POT file is checked in the repository and the version control # program ignores timestamps. PO_DEPENDS_ON_POT = yes # This tells whether or not to forcibly update $(DOMAIN).pot and # regenerate PO files on "make dist". Possible values are "yes" and # "no". Set this to no if the POT file and PO files are maintained # externally. DIST_DEPENDS_ON_UPDATE_PO = yes dar-2.6.8/man/0000755000175000017520000000000013617552346010067 500000000000000dar-2.6.8/man/Makefile.am0000644000175000017520000000012113606427316012031 00000000000000dist_man_MANS = dar.1 dar_manager.1 dar_slave.1 dar_xform.1 dar_cp.1 dar_split.1 dar-2.6.8/man/dar_xform.10000644000175000017520000001240513606427316012050 00000000000000.TH DAR_XFORM 1 "August 18th, 2018" .UC 8 .SH NAME dar_xform \- disk archive "re-slicer" .SH SYNOPSIS dar_xform [options] [/]source [/]destination .P dar_xform -h .P dar_xform -V .SH DESCRIPTION .B dar_xform changes the size of slices of an existing archive. .PP Source is the basename of the existing archive, destination is the basename of the archive to be created. If source basename is "-", the archive is read from standard input. If the destination basename is "-", the archive is written to standard output and -s option is not available. .SH OPTIONS .PP .TP 20 -h displays help usage. .TP 20 -V displays version information. .TP 20 -b make the terminal ring when user interaction is required (like for example the creation of a new slice when using the -p option) .TP 20 -s Size of the slices in bytes. If the number is followed by k (or K), M, G, T or P the size is in kilobytes, megabytes, gigabytes, terabytes or petabytes respectively. Example: by default "20M" means 20 megabytes it is the same as giving 20971520 as argument (see also -aSI and -abinary options). If -s is not present the backup will be written to a single slice whatever the size of the backup may be (there is probably some filesystem limitation, thus you might expect problems with file size over 2 gigabytes, depending on your filesystem). .TP 20 -S -S gives the size of the first slice which may be chosen independently of the size of following slices. This option needs -s and by default, the size of the first slice is the same as the one of the following slices. .TP 20 -p [] pauses before writing to a new slice (this requires -s). By default there is no pause, all slices are output in the same directory, up to the end of the backup or until the filesystem is full. In this later case, the user is informed of the lack of disk space and dar stops for user interaction. As soon as some disk space is available, the user can continue the backup. The optional integer that this option can receive tells dar to only pause very 'n' slice. Giving 3 for 'n' will make dar pause only after slices 3, 6, 9 and so on. If this integer is not specified, the behavior is as if '1' was given as argument which makes dar pause after each slice. .TP 20 -n Do not allow overwriting of any slice. .TP 20 -w Do not warn before overwriting slice. By default (no -n and no -w) overwriting is allowed but a warning is issued before proceeding. .TP 20 -E the string is a command-line to be launched between the slices of the destination archive. See dar(1) man page (same option) for more information. .TP -F the string is a command-line to be launched between the slices of the source archive. See dar(1) man page (same option) for more information. .TP 20 -aSI[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the SI meaning: multiple of 10^3 (a Mega is 1,000,000). .TP 20 -abinary[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the historical computer science meaning: multiple of 2^10 (a Mega is 1,048,576). .P -aSI and -abinary can be used several times, they affect all prefix which follow even those found in file included by -B option up to the next -a... occurrence. Note that if in a file included by -B option an -abinary or -aSI is met, it affects all the following prefix even those outside the included files (for example in the following "-B some.dcf -s 1K" 1K may be equal to 1000 or 1024 depending on the presence of an -aSI or -abinary in the file some.dcf. By default (before any -aSI/binary argument has been reached), binary interpretation of suffix is done (for compatibility with older versions). .TP 20 -Q Do not display any message on stderr when not launched from a terminal (for example when launched from an at job or crontab). Remains that any question to the user will be assumed a 'no' answer, which most of the time will abort the program. .TP 20 -^ perm[:user[:group]] defines the permission and ownership to use for created slices. .TP 20 -3 Beside each created slice is generated an on-fly hash file using the specified algorithm. Available algorithm are "md5" and "sha1", by default no hash file is generated. This option description is more detailed in dar man page (where it has the same designation as here). .TP 20 -9 [,] Defines the minimum number of digit to use for the source archive and eventually for the destination archive. If the source has not been defined with a minimum number of digits and you want to specify a number of digits for the destination archive, use zero (or one) as value for src_num. If dst_num is not provided, it defaults to 1. See the same option in dar man page for more details. .SH NOTES Dar_xform is not concerned by encryption or compression. It does not need to be aware of it to be able to change the slice scheme. Thus, it is not able to uncompress or uncipher an archive. .SH EXIT CODES .B dar_xform uses the same exit status as dar does, see dar(1) man page. .SH SIGNALS Any signal sent to dar_xform will abort the program immediately, there is no way to have a proper termination before the end of the process .SH SEE ALSO dar(1), dar_slave(1), dar_manager(1), dar_cp(1), dar_split(1) .SH KNOWN BUGS http://sourceforge.net/p/dar/bugs/ .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.6.8/man/dar_manager.10000644000175000017520000003546313606427316012340 00000000000000.TH DAR_MANAGER 1 "August 18th, 2018" .UC 8 .SH NAME dar_manager \- compiles several archives contents in a database to ease file restoration .SH SYNOPSIS dar_manager [-v] -C [/] [-z ] dar_manager [-v] -B [/] -A [/] [-9 ] [[/]] dar_manager [-v] -B [/] -l dar_manager [-v] -B [/] -D [-] dar_manager [-v] -B [/] -b dar_manager [-v] -B [/] -p dar_manager [-v] -B [/] -o [list of options to pass to dar] dar_manager [-v] -B [/] -d [] dar_manager [-v] -B [/] [-N] [-k] [-w ] [-e ""] -r [list of files to restore] dar_manager [-v] -B [/] -u dar_manager [-v] -B [/] -f file dar_manager [-v] -B [/] -s dar_manager [-v] -B [/] -m dar_manager [-v] -B [/] -c dar_manager [-v] -B [/] -i dar_manager [-v] -B [/] -@ { | "-" } dar_manager -h dar_manager -V .SH DESCRIPTION .B dar_manager is part of the Disk Archive suite. Its purpose is to simplify the restoration of a set of few files present in many backup, full or differential. This is achieved by gathering the catalogue of each archive (this has to be done once). At any time you just have to give the relative path to the files you want to restore, dar_manager will call dar with the proper options and restore the last version of each file (or the last version before given date). Note that dar_manager is especially interesting when you have removed some files by accident some time ago and wish to recover them from a set of full and differential backups. It is thus not really adapted/efficient to restore the state a directory tree had at a given time, in particular when some files have to be removed. For that you would better use dar directly with the corresponding archive(s) up to the date for which you wish to restore the state of a whole directory tree. .PP You can restore any file by hand without .B dar_manager , but if you have a lot of differential backups, you may spend a long time to find the archive that contains the last version of your files, as dar will not save it if it has not changed since previous backup. .B dar_manager simplify the process by looking in its internal database, built from archive "catalogues" (= table of contents). .SH OPTIONS .TP 20 -C, --create [/] creates an empty database that will collect information about several archives. The is a filename that is required for -B option. To destroy a just remove the file. .TP 20 -B, --base [/] specify the database to read or modify. The file must exist, and have a database structure (see -C option). .TP 20 -i, --interactive use a keyboard interactive text menu to do operations on the given database. So you may avoid reading the other options described in this manual page, if you wish, and just use the interactive option. You will however always have to create an empty database (-C option) and restore files manually (-r option). .TP 20 -A, --add [/] [ [/]] add an archive to the database. An isolated catalogue can also be used only if it has been produced by dar version 1.2.0 or above. Why ? Because, an isolated catalogue produced by older version will always tell that no files are saved in the archive of reference, in that case the solution is to provide the archive itself as argument. An optional second argument is the basename of the archive if it is different from the first argument (need for extraction of files). For example you could have an isolated catalogue in first argument and the basename of the original archive (where is stored the data) as second argument. By default, .TP 20 -9, --min-digits the slice number zeroed padding to use to get the slices filename (for more details see dar man page at this same option) .B dar_manager will look for an archive of reference in the command line used to create each archive, but in some cases, it may be necessary to specify the archive name (for example if you've changed its name). .TP 20 -l, --list displays the information about the archives compiled in the database. In particular, a number is given to each archive, which is required to some other option to design a particular archive within the database. Nothing avoids you to feed the database with several archive of the same basename ! You will just have to guess which one is asked under this name. :-) .TP 20 -D, --delete [-] removes an archive (or a range of archive) from the database. The number of the archive (or the min and max number or the archive range) is correspond to those given by the -l option. Note that all archive number greater than the one(s) to be delete will be decremented to keep continuous numbering of the archive inside the database. If a single number is given (not a range), it may be also a negative number, by which it means counting from the end. For example, -1 means the last archive of the base, -2 the penultimate, etc. .TP 20 -b, --base this option allows you to rename the archive basename (used when restoring files from it). Here too, the number may be also a negative number. .TP 20 -p, --path this option allows you to change the location of a given archive (used when restoring files from it). Here too, a negative number is allowed. .TP 20 -o, --options [list of option to pass to dar] Specify the option to use when calling dar. Each call erases the previous setting. Possible .B dar options are all the available ones except "-x" and simple arguments (the [list of path]) which will be added by .B dar_manager itself. .TP 20 -d, --dar [] Set the path to dar. If no argument is given, dar is expected to be located in the PATH .TP 20 -r, --restore [list of files or directories to restore] .B dar_manager will restore all (an only) the given files or directories, in their latest recorded status, or before the date give thanks to the -e option. If a directory is given all subfiles and subdirectories are restored recursively in it. You can filter out some files from this recursion thanks to dar usual filtering option (see dar man page) you can provide beside -r using the -e option (see below). Dar_manager lead dar to remove any file, if a file is stored as having been removed at date requested for restoration, it is simply not restored. Thus if you restore in an empty directory you will get all the files and directories you provided to dar_manager in the state they have at the date you asked. File that did not existed at that time will not be restored. However you can restore over an existing installation, dar will then warn you before overwriting files (see -w and -n options for dar) but will still not remove files that were recorded removed from a previous archive of reference. Note that files listed after -r option, must never have an absolute path. They will be restored under the directory specified with -R option of dar (passed to dar using -o or -e options), or by default, under the current directory. .TP 20 -w, --when alters the -r option behavior: still restores the files in the most recent version available but only before the given date (versions of more recent dates are ignored). The must respect the following format [ [ [year/]month/]day-]hour:minute[:second]. For example "22:10" for 10 PM past 10 or the current day, "7-22:10" for 10 PM past 10 the 7th of the current month, "3/07-22:10" for the 7th of march at 22:10 of the current year, "2002/03/31-14:00:00" the date of the first dar's release ;-). The given date must be in the past, of course, and is compared to the "last modification" date of the saved files and not to the date at which archives have been done. Thus if a file has been changed long ago but saved in a recent (full) archive, it will be elected for restoration even for dates older than the creation of the archive. In the other way, a file saved long time ago with a mtime that was set to a date in the future will not be elected for restoration when giving the date at which was done the archive. .PP .RS Note that the provided date is relative to the system timezone which is overriden if the TZ environement variable is set (see tzselect(1) for more details) .RE .TP 20 -e, --extra pass some more options to dar. While the -o options takes all that follows on the command line as argument to pass to dar and write these in the database, the -e option does not alter the database and has only one argument. In other words, if you need to pass several options to dar through the use of the -e option, you need to use quotes (simple quotes ' or double quotes ") to enclose these options. Example: .P .RS .RS dar_manager -B database.dmd -e "-w -v -p -b -r -H 1" -r some/files .RE .P while using -o option you must not use quotes: .P .RS dar_manager -B database.dmd -o -w -v -p -b -r -H 1 .RE .RE .TP 20 -u, --used list the files that the given archive owns as last version available. Thus when no file is listed, the given archive is no more useful in database, and can be removed safely (-D option). If is zero, all available file are listed, the status provided for each file present in the database is the most recent status. A negative number is allowed for this option (see -D option for details). .TP 20 -f, --file displays in which archive the given file is saved, and what are the modification date (mtime) and change date (ctime). .TP 20 -s, --stats show the number of most recent files by archive. This helps to determine which archive can be safely removed from the database. .TP 20 -m, --move changes the order of archives in the database. The first number is the number of the archive to move, while the second is the place where it must be shifted to. Archive order is important: An old archive must have a smaller index than a recent archive. If you add archive to a database in the order they have been created all should be fine. Else if a file has a more recent version in an archive which index is smaller, a warning will be issued (unless -ai option is used). This can occur if by mistake you added an archive to the database in the wrong order (old archive added after a recent one), in that case simply using the -m option will let you fix this mistake. If instead the problem is relative to a single file (or a small set of file), you should wonder why this file has its modification date altered in a way that it pretends to be older than its really is. Checking for the signs of a rootkit may be a good idea. .TP 20 -c, --check check the database consistency, in particular the date ordering is verified and warning are issued for each file having more recent version located in an archive with a smaller index inside the database. -ai option makes -c option useless. .TP 20 -N, --ignore-options-in-base Do not use the options stored in database when calling dar for restoration. This option is only useful while restoring files from dar_manager, either directly (-r option) or using a batch file (-@ option, see below). .TP 20 -k, --ignore-when-removed By default, dar_manager does not ask dar to restore file that have been removed at the requested date (or in the latest state available). This is useful for example to restore a directory in the state it has at a given date (only files that existed at that time are restored). However when you want to restore a file that has been destroyed by accident, you need to use -k option so you don't have to determine at which date that file existed to be be able to ask dar_manager to restore that file in the state it had before that date. In other words, -k option gives a behavior of dar_manager backward compatible with dar_manager released beside version 2.3.x of dar. .TP 20 -ai, --alter=ignore-order avoid dar_manager to issue a warning for each file not following a chronological order of modification date when the archive number in the database is growing. .TP 20 -@, --batch allows you to do several operations on a given database. All operations are defined in the provided and refer to the same database as defined by the -B switch on command line. This batch file, must thus not contain neither -B, -C, -i or -ai option (-ai are global to the batch operation). The batch file expected layout is one command per line, thus several arguments (like -l -v for example) may take place on a given line of the file (-v can be found both on command line for verbose output about the batch operation steps, as well as inside the batch file for verbose output of a particular batched command). Arguments are separated by spaces or tabs, other characters are passed as-is. In consequence, you should only need to use quotes (using " or ') if you intend to use an argument containing space. Last, comments may be placed on any line beginning by a hash character (#). .TP 20 -z, --compression Only available while creating a database (-C option), this option let you set the compression algorithm used to store the database in file. By default gzip is use, but you can use "none" for no compression, "bzip2", "xz" and "lzo" for other compression algorithm. Compression level selection may be added in the future (level 9 is used internally). .TP 20 -Q Do not display any message on stderr when not launched from a terminal (for example when launched from an at job or crontab). Remains that any question to the user will be assumed a 'no' answer, which most of the time will abort the program. .TP 20 -v, --verbose displays additional information about what it is doing. .TP 20 -h, --help display help usage .TP 20 -V, --version display software version .SH EXIT CODES .B dar_manager exits with the following code: .TP 10 0 Operation successful. .TP 10 1 see dar manual page for signification .TP 10 2 see dar manual page for signification .TP 10 3 see dar manual page for signification .TP 10 5 see dar manual page for signification .TP 10 7 see dar manual page for signification .TP 10 8 see dar manual page for signification .TP 10 11 and above .B dar called from dar_manager has exited with non zero status. Subtract 10 to this exit code to get dar's exit code. .SH SIGNALS dar_manager acts like dar (see dar man page for list of signals), upon certain signal reception dar aborts cleanly .SH SEE ALSO dar(1), dar_xform(1), dar_slave(1), dar_cp(1), dar_split(1) .SH LIMITATIONS at most 65534 archives can be compiled in a given database, which should be enough for most users. Dar_manager does not support encrypted archives for now and archive cannot neither be encrypted. See the FAQ for a workaround. .SH KNOWN BUGS http://sourceforge.net/p/dar/bugs/ .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.6.8/man/dar_split.10000644000175000017520000001236613606427316012056 00000000000000.TH DAR_SPLIT 1 "August 18th, 2018" .UC 8 .SH NAME dar_split \- dar helper to split an archive over several tapes .SH SYNOPSIS dar_split { split_input | [-s] split_output } .P dar_split -v .P dar_split -h .SH DESCRIPTION .B dar_split is to be used with dar to read or write a large archive from or to several tapes. .SH COMMANDS .B dar_split has two modes of operation: split_input and split_output .TP 20 split_input in this mode dar_split copies to its standard output. may be a special device like /dev/tape or any other inode that has the ability to access removeable volumes. When dar_split reaches the end of it suspends and waits for the user to press the return key in order to continue. In the meanwhile the user can rewind and change the tape. When dar_split is awaken again, it reopens and sends its content to its standard output as if it was the continuation of what was read so far before reaching the end of file. .TP 20 split_output in this mode dar_split copies its standard input to . However if is full, dar_split suspends and waits for the user to press the return key in order to continue. In the meanwhile, the user can rewind and change the tape. When dar_split is awaken again, it reopens and continues to copy to what was not yet written previously. .SH OPTIONS .B -s is only available for split_output mode an leads dar_split to perform sync writes in place of normal writes. This has the drawback to drastically reduce performances but may be necessary under certain circumstances where the operating system cache reports a write operation as successful while the cache underlying medium is already full. .SH EXAMPLES OF USE creating an archive over several tapes .RS .PP dar -c - ...possibily other dar options... | dar_split split_output /dev/tape .PP .RE Here dar generates the archive to its standard output which is read by dar_split and copied to /dev/tape. When the tape reaches its send, dar_split asks for the user to hit return to continue. reading an archive splitted over several tapes .RS .PP dar_split split_input /dev/tape | dar -t - --sequential-read ...possibily other dar options... .PP .RE Here dar_split reads /dev/tape and sends its content to dar which *must* be used in --senquential-read mode in order to read an archive from its standard input without the help of dar_slave. When the tape reaches its end, dar_split asks for the user to hit return to continue. .PP Note that dar_split can also be used with tar for the same purpose as with dar: .RS .PP tar -cz ... | dar_split split_output /dev/tape .PP .RE without -f option nor TAPE environment variable, tar sends the tar archive to stdout, which get fet to dar_split for slicing .RS .PP dar_split split_input /dev/tape | tar -tz .PP .RE without -f option nor TAPE environement variable, tar read the tar archive from stdin, however you must specify the correct option corresponding to the compression used at archive creation time. .PP The advantage here is to be able to save a huge dar or tar archive on several tapes that could be either partially filled or having different sizes, without having to calculate in advance the size of slices to specify. The drawback is that this implies sequential reading for dar (for tar also, but tar cannot do else). .SH With or without filesystem You can use dar_split with floppies, zip/jazz disks or usb key too, but it has many disadvantages compared to the normal way of using this media: doing that way avoids you relying on a filesystem, thus you cannot have direct access to file contents, which dar knows how to take advantage of, and you are instead stuck with sequential read access which is long as it requires reading the whole archive (same way as tar does). .PP When instead using dar alone in normal mode (non sequential reading mode using filenames instead of standard input/output), you have the opportunity to create redundancy data beside dar slices thanks to par2, which is not possible without a filesystem. At restoration time, thanks to dar archive's catalog, dar can directly seek to the file's data you want to restore, which is very quick compared to reading/unciphering/uncompressing the whole archive... .PP Note that copying tape contents generated by dar_split to different files which name would correspond to dar slices, does not make a valid multi-sliced archive because the slice header is missing in slices (except in the first). You can however concatenate all the pieces of data generated by dar_split and have a valid single sliced dar archive that you can use in direct access mode. However attention must be paied not to add any extra data after data generated by dar through dar_split (in particular if the last tape was not full), in that case you can only read the archive with --sequential-read mode. .SH EXIT CODES .B dar_split exists with the following codes: .TP 10 0 upon normal execution .TP 10 1 syntax error on command-line .TP 10 2 could not open source or destination files .SH SIGNALS Any signal sent to dar_split except SIG_PIPE will abort the program immediately .SH SEE ALSO dar(1), dar_xform(1), dar_manager(1), dar_slave(1), dar_cp(1) .SH KNOWN BUGS http://sourceforge.net/p/dar/bugs/ .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.6.8/man/dar_slave.10000644000175000017520000000370213606427316012027 00000000000000.TH DAR_SLAVE 1 "August 18th, 2018" .UC 8 .SH NAME dar_slave \- disk archive slave for remote access to a dar archive .SH SYNOPSIS dar_slave [options] [/]source .P dar_slave -h .P dar_slave -V .SH DESCRIPTION .B dar_slave reads an archive and listens for .B dar orders on standard input, sending asked parts of the archive on standard output. .PP Source is the basename of the archive to read. By default, .B dar_slave uses standard input and output to communicate with a .B dar process, which may be running on the local host or on a remote host. It is possible to change the behavior of .B dar_slave to use named pipe instead of standard input and output .SH OPTIONS .PP .TP 20 -h Displays help usage. .TP 20 -V Displays version information. .TP 20 -i reads the orders from the named file instead of standard input. In most cases the filename will be a named pipe. .TP 20 -o sends the data to the named file instead of standard output. In most cases the filename will be a named pipe. .TP 20 -E string is a command to be launched between slices. See dar(1) man page (same option) for more information. .TP 20 -Q Do not display any message on stderr when not launched from a terminal (for example when launched from an at job or crontab). Remains that any question to the user will be assumed a 'no' answer, which most of the time will abort the program. .TP 20 -9 Defines the minimum number of digits to use for archive slice numbers. See the same option in dar man page for more details. .SH EXIT CODES .B dar_slaves exists with the same codes as dar does, see dar(1) man page. .SH SIGNALS Any signal sent to dar_slave will abort the program immediately, there is no way to have a proper termination before the end of the process .SH SEE ALSO dar(1), dar_xform(1), dar_manager(1), dar_cp(1), dar_split(1) .SH KNOWN BUGS http://sourceforge.net/p/dar/bugs/ .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.6.8/man/dar.10000644000175000017520000051110113606427316010632 00000000000000.TH DAR 1 "May 25th, 2019" .UC 8 .SH NAME dar \- creates, tests, lists, extracts, compares, merges, isolates, repairs dar archives .SH SYNOPSIS dar [-c | -t | -l | -x | -d | -+ | -C | -y] [[]/] [] [] .PP dar -h .PP dar -V .SH DESCRIPTION .B dar is a full featured backup tool, aimed for local and remote disks (floppy, CD-R(W), DVD-R(W), zip, jazz, hard-disks, usb keys, etc. and cloud storage by mean of ftp or sftp protocols) and also adapted to tapes. .PP .B dar can store a backup in several files (called "slices" in the following) of a given size, eventually pausing or running a user command/script before starting the next slice. This can allow for example, the burning of the last generated slice on a DVD-R(W), Blue-ray Disk, or changing of usb key before continuing on the next one. Like its grand-brother, the great "tar" command, .B dar may also use compression, at the difference that compression is used inside the archive to be able to have compressed slices of the defined size. .PP But the most important feature of .B dar is its ability to make differential, incremental and decremental backups. In other words, backups that contain only new files or files that have changed from a backup of reference. Binary delta is available but not activated by default: in combination with differential and incremental backups, it leads not only to not save a file that has not changed (thing dar does without binary delta), but also to only save an rsync patch of any modified file, which lead to even smaller backups. .P Moreover with differential backup, .B dar also stores files that have been deleted since the backup of reference. Thus, when restoring, first a full backup, then additional differential backups, at each restoration you get the exact state of the filesystem at the time the differential backup was made. .B dar is the first backup program I know that can also remove files during restoration! By the way, in this document, "archive" and "backup" are used interchangeably, the difference is the purpose you build them for. .PP Unlike the .B tar command, .B dar has not to read a whole archive nor to stick together the different parts (the slices) to access its contents: dar archives contain a table of contents (aka "catalogue") located at the end, so dar can seek into the archive to read only the required data to restore files, thing which is much faster than what tar is used to do. The "catalogue" can be copied out of the archive (operation called isolation) to be used as reference for further backup and as backup of the internal catalogue in case of archive corruption. .PP Dar can also use a sequential reading mode, in which dar acts like tar, just reading byte by byte the whole archive to know its contents and eventually extracting file at each step. In other words, the archive contents is located at both locations, a first time all along the archive used for tar-like behavior suitable for sequential access media (tapes) and a second time at the end for faster access, suitable for random access media (disks). However note that tar archive and dar archive are not compatible. Note also that the sequential reading mode let you extract data from a partially written archive (those that failed to complete due to a lack of disk space for example) and since release 2.6.0 such truncated archive can be repaired to become a normal archive (the "catalogue" is rebuilt from inlined information). .PP Dar is able to save and restore to a cloud storage by mean of ftp or sftp network protocols. It can also leverage ssh protocol using dar_slave and dar_xform two auxilliary programs provided beside dar. .PP Dar format is quite robust against corruption: Only the file where the corruption took place in the archive will not be possible to restore. To have the possibility to repair a corrupted archive dar can work with par2 seamlessly just specifying "par2" on command-line (see /etc/darrc). Last a "relax" reading mode is available which let dar to either ignore some incoherence in archive structure, use internal redundant information to overcome data corruption or in last resort asking the user on what to do when some archive structure information is missing (-al option). This relax mode can be used with both sequential and direct access read modes. Note that you should rather use Parchive to protect your data rather than just relying on the "relax" mode, which has to be seen as a the last chance solution. .PP .B dar takes care of POSIX Extended Attributes (EA in short) that are used in particular under Linux to carry File Access Control List (FACL) as well as security attributes for SELinux, and also under MacOS X EA they are used to store file forks. EA also have room for user to add any key / value paire to any file, this is known as user EA. These attributes are not specific to any particular filesystem, they exist the same way under ext3/4, HFS+ and any other filesystem. .PP .B dar also takes care of Filesystem Specific Attributes (FSA in short) which are, as you can guess, specific to one or several filesystem(s). For example the Birth date of a file exists for HFS+ and NTFS but not for ext2/3/4 filesystem. The immutable attribute exists for ext2/3/4 but not for NTFS while the nodump files does not exists for NTFS but exists for HFS+, ext2/3/4 and many other Unix filesystems. .PP Sparse files (files with holes that system reports using several hundred gigabytes while they effectively use a few kilobytes on disk) are also well managed by .B dar: they are detected, stored and restored to filesystem properly. .PP Last, .B dar is also able to properly save and restore hard-links .PP .B .PP .SH WARNING This document is to be considered as a full reference of dar/libdar features. It is however not adapted to discover dar, for that purpose some tutorials are provided in dar documentation. Once you have apprehended the basic dar usages you are welcome to read further this document to see all other features you may find useful for your needs. .PP .PP .SH DOCUMENT STRUCTURE .PP The rest of this document is organized that way: .RS .TP 5 COMMANDS The eight actions you can performs with dar .TP 5 GENERAL OPTIONS A set of options common to all actions .TP 5 SAVING, ISOLATING, MERGING AND REPAIRING SPECIFIC OPTIONS A set of options that are specific to the operation of backup, catalogue isolation and archive merging .TP 5 RESTORATION SPECIFIC OPTIONS A set of options that are specific to the restoration operation .TP 5 TESTING AND DIFFERENCE SPECIFIC OPTIONS A set of options that are specific to the operation of archive testing and archive comparison with a filesystem .TP 5 LISTING OPTIONS A set of options that are specific to archive listing operation .TP 5 EXPICIT OPTIONAL ARGUMENTS Some system do not allow optional arguments to options, this chapter explain how to overcome this restriction .TP 5 EXIT CODES List of values .B dar returns at end of execution. This chapter should be read if you intend to create scripts relying on dar .TP 5 SIGNALS details the signal and their action on a running dar process .TP 5 FILES List configuration files that dar checks for .TP 5 CONDITIONAL SYNTAX Over command line, command and options can be passed to dar thanks to a plain file (known as DCF file). This plain file can also contain a specific syntax that will let you pass an option to dar only under certain situation/condition. This chapter describes this simple syntax and the different available conditions. .TP 5 USER TARGETS User can add their own conditions known as user targets. This chapter describes what they are and how to use them .TP 5 ENVIRONMENT Dar may rely on environment variables to look for DCF files and DUC files, SFTP private and public key and so on. .RE .SH COMMANDS AND OPTIONS .B COMMANDS: .PP Only eight commands define what action will be done by dar: Archive creation, archive extraction, archive listing, archive testing, archive comparison with filesystem, catalogue isolation, archive merging and archive repairing. These commands are described here below. Once defined, a large set of options can be used to modify the way the command is performed. These options are described just after the commands chapter. .PP .B Important note: Not all systems actually support long options (Solaris, FreeBSD, ...). For example --create will not be available on these systems, and you will have to use -c instead. In the same way, not all systems do support optional arguments (FreeBSD without GNU getopt for example), you then need to explicitly give the argument, for example in place of "-z" you will need to give "-z 9", see "EXPLICIT OPTIONAL ARGUMENTS" paragraph near the end of this document for details on that point. .PP A slice is just a simple file which name is composed of a "basename" followed by a dot, then a number, again a dot and the extension (dar) to form the filename of that slice. On the command line you will never have to give the full file name of a slice, just the basename. The number between the dots is the slice number, which starts from 1 and may be arbitrary large (as large as your system can support the corresponding filename). For example "my_first_archive.42.dar" is the 42th slice of the archive which basename is "my_first_archive". .PP .TP 20 -c, --create [[]/] creates a backup with the name based on . All the slices will be created in the directory if specified, details about the syntaxe is explained below at .B Remote repository syntax paragraph. Without nor the current directory is used. If the destination filesystem is too small to contain all the slices of the backup, the -p option (pausing before starting new slices) might be of interest. Else, in the case the filesystem is full, dar will suspend the operation, asking for the user to make free space, then continue its operation. To make free space, the only thing you cannot do is to touch the slice being written. If the filename is "-" *and* no slicing is asked for (no -s option) the archive is produced on the standard output allowing the user to send the resulting archive through a pipe (or into a tape device directly or using the dar_split command). .TP 20 -x, --extract [[]/] extracts files from the given backup. Slices are expected to be in the current directory or in the directory given by (see also .B Remote repository syntax below). It is also possible to use symbolic links to gather slices that are not in the same directory. Path may also point to a removable device (floppy, CD, USB key, etc.), in this case, to be able to mount/unmount the device, you must not launch dar from that directory. In other words, the current directory must not on the removable media you plan to unmount (see tutorial for details). The basename may be set to "-", in direct access mode (the default historical mode), you will then need dar_slave to work with dar (see -i and -o options, as well as dar_slave man page). However in sequential read mode (--sequential-read is used on command-line), dar will read the archive from standard input (see also -i option), this can eventually be used in combination with dar_split. .TP 20 -l, --list [[]/] lists the contents of the given backup (see also .B Remote repository syntax below) .B dar will only require the last slice of the archive in direct access mode. If however sequential mode is used, dar will read the overall archive, from the first slice to the last one. "-" can be used as basename, the behavior is the same as with -x option (read just above). .TP 20 -t, --test [[]/] checks the backup integrity. Even without compression, dar is able to detect at least one error per file in the archive, thanks to a variable length CRC recorded per file data, file EA and file FSA in the catalogue. Archive structure (slice header, archive header, catalogue) is also protected by CRC to be able to detect any kind of archive corruption. Same remark here, "-" may be used as basename (see -x option above for details). .TP 20 -d, --diff [[]/] compares saved files in the backup with those on the filesystem. may also be "-" (see -x option above for details). Note that the target for this operation is to be seen as a step further than archive testing, where in addition to archive coherence, the archive contents is verified to be the same as what is found on the filesystem. But if new files are present on the filesystem, dar ignores them. If you want to check for changes since a archive has been made, better use dry-run differential backup. .TP 20 -C, --isolate [[]/] isolate a catalogue from its archive (that's to say make a copy of the internal catalogue to its own archive container). The argument is the basename of the file to create which will contain the catalogue's copy. The -A option is mandatory here to give the name of the archive to copy the catalogue from, this archive is not modified at all. Slicing is available (-s -S -p -b etc.). If the filename is "-" *and* no slice is asked (no -s option) the isolated catalogue is produced on the standard output, allowing the user to send the resulting archive through a pipe. Note that there is quite no difference in concept between an isolated catalogue and an archive. Thus you can do all operations on an isolated catalogue, in particular take it in place of the original backup as reference for a differential archive, archive testing, archive comparison. Note however that for comparison (-d option) as data is not present in the isolated catalogue, dar relies on embedded CRC rather than comparing data byte by byte (what is done with a plain archive), and no comparison can be performed concerning EA or FSA even if each of them have their own CRC in the catalogue because different ordering as provided by the OS of the items composing EA and FSA may lead the CRC to be different while the EA or FSA are exactly the same, so CRC here is used only to dectect archive corruption. Since release 2.4.0 you can use an isolated catalogue to rescue a corrupted internal catalogue of the archive it has been based on (see -A option). .TP 20 -+, --merge [[]/] create a subset archive from one or two existing archives (the resulting archive name is the argument to this command). The dar file selection mechanism (see GENERAL OPTIONS) let the user decide which files will be present in the resulting archive and which one will be ignored. This option thus let the user merge two archives in a single one (with a filtering mechanism that accepts all files), as well as this option let the user create a smaller archive which data is taken from one or two archives of reference. Note that at no time the contents of the archives of reference is extracted to real files and directories: this is an archive to archive transfer, thus you may lack support for Extended Attribute while you will be able to fully manipulate files with their Extended Attributes from one archive to the resulting one. If the basename is "-" *and* no slice is asked (no -s option), the archive is produced on standard output allowing the user to send the resulting archive through a pipe. The first mandatory archive of reference is provided thanks to the -A option, while the second "auxiliary" (and optional) archive of reference is provided thanks to the -@ option. When a tie contention occurs (same file names from both archive have to be merged), the overwriting policy (-/ option) is used to define the one to keep in the resulting archive. By default, archive data selected for merging is uncompressed, and re-compressed. Thus the merging operation can be used to change compression algorithm of given archive as well as change its encryption. But, for better performance it is also possible thanks to the -ak option (see below the -ak option for usage restrictions) to merge files keeping them compressed, thus no decompression/re-compression is performed at all, which make the operation faster. Last it is not possible to merge two isolated catalogues. .TP 20 -y, --add-missing-catalogue [[]/] create a "repaired" archive based on the archive given with -A option. The repairing only concerns the case where an archive has been interrupted and dar could not cleanly end the archive creation process (lack of disk space, power outage, and so on). This operation consists in reading the tape marks in sequential reading mode to gather the content of the archive and once its end is reached, to recreate the missing table of content (aka catalogue) located at the end of the archive. Note that the damaged archive is not modified but a repaired copy is built beside it. Why not just appending the catalogue to the archive? Because first it was simpler to implement allowing to reuse routines of the merging operation, second by precaution for dar to not mess an existing archive due to a bug and last, it would not be compatible with archive signing and gpg encryption under certains conditions (several recipients or the archive is signed and you are not the one who signed it). .P .RS During the repairing operation, the repaired archive may have a different slicing (-s and -S options), a different encryption (-K and associated options, including gpg encryption and signing), a different repository slices permissions and ownership (--slice-mode option), user comment (--user-comment), generated hash (--hash) and min digits in slice number (--min-digits), but compression cannot be changed and tape marks cannot be removed (you can do it once reparation has completed using the merging operation). Last, file filtering is not allowed during archive repairing. .RE .TP 20 -h, --help displays help usage. .TP 20 -V, --version displays version information. .PP .B Remote repository syntax for [] .PP .RS for all commands described above as well as some options detailes below (-A and -@ options), the optional argument can be a Unix path like /var/tmp when the archive is located on the host dar runs on. But it can also make use of to define the remote host the archive is to be read or written to. "" follows the usual syntax: .PP .RS .B proto://[login[:password]@]hostname[:port]/path .RE .PP .TP 3 proto is either .B ftp or .B sftp .TP 3 login is optional, if not provided it defaults to .B anonymous. If the login string comports an @ it need to be escaped by \\\\ (a pair of backshashes) to avoid libdar considering it the hostname starting part. Exemple: login is me@here.com host is www.example.org gives: sftp://me\\\\@here.com@www.example.org/some/file. You may also need to escape the same way any other special characters like for exampl ecolumn (:) slash (/) if they are part of the login string. .TP 3 password if login is provided, the associated password may be given after a column (:) but this exposes the secret password to other users of the current system having access the table of process (using top, ps, /proc or other ways). If the login is given without password, the password will be asked interactively by dar at run time, which is much more secure. Alternatives are either to rely on ~/.netrc for FTP and also SFTP (!) transfers for that you need to use the --alter=file-authentication option (see below), or for SFTP only on public key authentication (you can also use --alter=file-authentication in that case to avoid a password being asked interactively). Note that passphrase support for sftp key is not (yet) supported. .TP 3 hostname is the name or IP address of the host to connect to. For sftp the server's public key is checked against the ~/.ssh/known_hosts file (or the file pointed to by then environment variable DAR_SFTP_KNOWNHOST_FILE, see more details about that variable at the bottom of this man page), the host must be known and the public key received from the network must match the one in that file, else dar aborts. If thus you want to operate with a new sftp server, first use ssh of sftp commands to do the usual fingerprint verifications which updates the known_hosts file accordingly, then run dar/libdar toward this sftp server. .TP 3 port if not provided, dar will use the default/standard port in regard to the protocol specified in the "proto" field .TP 3 path a unix path where resides the archive to read from the remote repository or where to write the archive to in that remote repository. The given path is absolute, in regard to the remote root filesystem available for the given account though the requested protocol. See also --network-retry-delay option below. .RE .PP .B GENERAL OPTIONS: .TP 20 -v, --verbose For backward compatibility, this is an alias to "-vt -vm" (both options set). .TP 20 -vs, --verbose=skipped Display files skipped because of file filtering exclusion specified by the user .TP 20 -vt, --verbose=treated Display treated files because of file filtering inclusion specified by the user or no file filtering specified at all. For each file a message is displayed *before* the file is treated. This option is not available for archive isolation and is useless for archive listing as it is always set, unless -q is used. .TP 20 -vd, --verbose=dir Display the directory under process. The messages shows *before* entering a directory. You can have a less verbose output than -vt while are still able to follow what's dar is doing. Note that -vt and -vd are mutually exclusive. .TP 20 -vm, --verbose=messages Display detailed messages about what dar is currently performing but not related to currently treated or skipped files and directories .TP 20 -vf, --verbose=finished Issues a summary *after* each treated directory containing the amount of data backed up in that directory as well as the average compression ratio. This option is only available for archive creation. .TP 20 -va, --verbose=all is equivalent to -vm -vs -vt, see also -Q and -q options below. Note: When using dar from a script better use dar's exit status to know which way the operation has ended (seen EXIT CODES at the end of this document). .TP 20 -vmasks, --verbose=masks Display raw information about the masks set by dar and passed to libdar .TP 20 -q, --quiet Suppress the final statistics report. If no verbose output is asked beside this option, nothing is displayed if the operation succeeds. When using dar from a script better use dar's exit status to know which way the operation has ended (seen EXIT CODES at the end of this document) .TP 20 -b, --beep makes the terminal ring when user action is required (like for example the creation of a new slice using the -p option) .TP 20 -B, --batch In the file which name is given in argument to this option, You can put any option or argument as used on command line, that will be parsed as if they were in place of the "-B " option. This way you can overcome the command line size limitation. Commands in the file may be disposed on several lines, and -B option can also be used inside files, leading a file to include other files. But an error occurs in case of loop (a file that includes itself directly or not) and DAR aborts immediately. Comments are allowed, and must start by a hash `#' character on each line. Note that for a line to be considered as a comment the hash character must be the first character of the line (space or tab can still precede the hash). See .B Conditional Syntax below for a richer syntax in this type of configuration files known as DCF file (Dar Configuration File). See also the environment variable DAR_DCF_PATH in the ENVIRONMENT section at the end of this document. .PP Note that you can use quotes simple (\'arg\') double ("arg") and back-quotes (`arg`) inside such file, but they need to be balanced (have an ending one). To use such character without the meaning of a quote, for example as an apostrophe, you need to escape it using a back-slack ("That\\'s an example"). Of course to add a single back-slash as a normal character in the file you will have to double it ("c:\\\\windows" for example) .TP 20 -N, --noconf Do not try to read neither ~/.darrc nor /etc/darrc configuration files. See .B files section below. .TP 20 -Q Do not display an initial warning on stderr when not launched from a terminal (when launched from a cronjob for example). This means that all questions to the user will be answered by 'no', which most of the time will abort the program. Please note that this option cannot be used in a configuration file (-B option). Since version 2.2.2, giving this option also forces the non-interactive mode, even if dar is launched from a terminal. This makes it possible for dar to run in the background. When you do, it's recommended to also redirect stdout and/or sterr to files: dar -Q ... &> /dev/null & .TP 20 -n, --no-overwrite do not allow overwriting If an overwriting policy is specified (see -/ option) -n option do only apply to slices overwriting, the overwriting of files during restoration or merging is handled by the overwriting policy. Without overwriting policy, -n applies to restored files as well as generated slices. .TP 20 -w, --no-warn Do not warn before overwriting (applied for slice overwriting and for overwriting decision make by the overwriting policy). By default overwriting is allowed but a warning is issued before proceeding. This option may receive 'a' as argument (see just below): .TP 20 -wa, --no-warn=all This implies the -w option, and means that over avoiding warning for file overwriting, DAR also avoids signaling a file about to be removed when its type is not the expected one. File are removed when they have been recorded as deleted since the archive of reference. At restoration of the differential archive, if a file of the given name exists, it is remove, but if the type does not match the file that was present at the time of the archive of reference (directory, plain file, fifo, socket, char or block device, etc.), a warning is normally issued to prevent the accidental removal of data that was not saved in the backup of reference. (See also -k option) .TP 20 -A, --ref [[]]/ Depending on the context, it specifies the archive to use as reference, which is mandatory for archive isolation (-C option) and merging operation (-+ option). Else it specifies the rescue catalogue to use when restoring (-x command), testing (-t command) or comparing (-d command) an archive. All slices of the reference backup are expected to be on the same directory given by or the current directory by default. Usually only the last slice is required to extract the catalogue of reference. If necessary the use of symbolic links is also possible here to gather slices that do not reside in the same directory. You can also point to a USB key, DVD-R(W) or any other mounted directory, because .B dar will pause and ask the user for required slices if they are not present. The argument to -A may be of four types: .RS .RS .TP 5 - An existing archive basename, which will be taken as reference .TP 5 - a dash ("-") in direct access mode (default mode, when --senquential-read is not used) it may imply the use of -o and -i options, this allows the archive of reference to be read from a pair of pipes with dar_slave at the other ends. Dar_slave can be run through ssh on a remote host for example. Note that this type of argument ("-") is only available when -A is used for isolation (-C option) and merging (-+ options). In sequential mode (--sequential-read is used), the archive of reference is read from standard input or from the named pipe specified by -i option. -o option has no use in sequential mode. Note that merging operation (-+ option) cannot read archive of reference in sequential mode. .TP 5 - a plus sign ("+") which makes the reference be the current directory status. This argument is only available for archive creation (-c option). In other word, no file's data will be saved, just the current status of the inodes will be recorded in the catalogue. This feature is known as the "snapshot" backup. A snapshot backup can be used as reference later on to detect or save only the files that have changed since the snapshot was made. .TP 5 - a , if -af option has been placed before -A on the command-line or in a included file (see -B option). For more about that feature see -af option below. This form is only available for archive creation (-c option). .RE .PP During backup operation (-c option) the archive of reference, given thanks to the -A option, is used for comparison with existing files on the filesystem. Dar will then backup only files that have changed since the archive of reference was done. If no -A option is given, the backup operation is a full backup. With -A option if the archive of reference is a full backup some call it a differential backup, while if the archive of reference is differential backup, some call this type of backup an incremental backup. For dar there is no difference in structure between incremental and differential backup, both are usually designed globally as "differential" backup in the documentation. .PP During merging operation (-+ option), the contents of the -A given archive will been taken eventually with the contents of the -@ auxiliary archive if specified (see below), to form a new archive from files of this or these archives. Note that you can filter out files from the operation and setup subset of the original archive(s). .PP During Catalogue isolation (-C option), dar will create the isolated catalogue from the one given with -A option. .PP During testing, diff or extraction, (-t, -d or -x options respectively), the table of contents (the catalogue) will be read from the archive given with -A instead of using the internal catalogue of the archive. The archive given for rescue must has been previously isolated from this same archive (else the contents will not match and dar will refuse to proceed to this operation). This acts as a backup solution to the case of corruption inside an archive's catalogue, while the best way is still to use Parchive to protect your data against media error. .RE .TP 20 -af, --alter=fixed-date Modify the -A option behavior, making it receiving a as argument in place of the []/ default argument. The is used to define which file to save: file which modification is newer or equal to , and which to consider unchanged: those older than . This option has only a meaning when creating an archive (-c option) and must be placed before -A option to have an effect. .RS .PP must be a date in the two following possible formats: .RS .TP 30 - a number of second since Jan 1st, 1970 .TP 30 - a date in the following form [[[year/]month/]day-]hour:minute[:second] .RE .PP Here are some examples of date: .RS 91836383927108078 .PP 2005/11/19-19:38:48 Which is 38 past 7 PM and 48 seconds, the 19th of November 2005 .PP 20:20 Which is 8 PM of the current day .PP 2-00:08 Which is 8 past noon, the second day of the current month .PP 2/2-14:59 Which is 1 to 3 PM, the 2nd of February in the current year .RE .PP Note that the provided date is relative to the system timezone which is overriden if the TZ environement variable is set (see tzselect(1) for more details) .RE .TP 20 -@, --aux [[]]/, --on-fly-isolate []/ specifies an auxiliary archive of reference (merging context) or the name of the on-fly isolated catalogue (creation context). This option is thus only available with -+ option (merging) and -c option (archive creation). Note that --aux and --on-fly-isolate are really aliases to the same option, this is the context of use (archive creation or merging) which lead it to behave a way or another. .RS .PP In a merging context, over -A option which is mandatory, you may give a second archive of reference thanks to the -@ option. This allows you to merge two archives into a single one. See also -$ option (encryption) -~ option (command execution) and -% (crypto block size) for other options concerning auxiliary archive of reference. They are the respective equivalent of -J, -F and -* options relative to archive given thanks to -A option. .PP In a backup context -@ option let the user specify the archive name for an on-fly isolation. With on-fly isolation, you can also use -$ option (to define encryption algorithm and passphrase), -~ option (to execute a command once the on-fly isolated catalogue is completed) and -% option (crypto block size). On-fly isolated catalogue is always bzip2 if possible else gzip else lzo compressed (using compression level 9) else not compressed, and it is also always a single sliced archive. Due to command-line exiguity, it is not possible to change compression algo nor slice size for the on-fly isolation. If you need a more complicated isolation, either look for a GUI over libdar, or do a normal (= not an on-fly) isolation operation (By the way it is possible to isolate an already isolated catalogue, this is equivalent to doing a copy, but you can change encryption, compression or slicing, for example), you can also use dar_xform on an isolated catalogue if you only want to change slices size (this is faster as no decompression/re-compression nor encryption/decryption is necessary). Using the merging operation on an isolated catalogue instead of isolating the isolated catalogue, leads the resulting archive to not be able to be used as a rescue for internal catalogue of the original archive. --aux-ref is a synonym to --aux. .RE .TP 20 -R, --fs-root The path points to the directory tree containing all the files that will be enrolled in the operation (backup, restoration or comparison). By default the current directory is used. All other paths used in -P or -g options on the command line are and must be relative to this path (or to current directory if -R is not present). Note that -R is useless for testing (-t option) isolation (-C option) and merging (-+ option) .TP 20 -X, --exclude The mask is a string with wildcards (like * and ? see glob(7) for details) which is applied to filenames which are not directories. If a given file matches the mask, it is excluded from the operation. By default (no -X on the command line), no file is excluded from the operation. -X may be present several times on the command line, in that case a file will not be considered for the given operation if it matches at least one -X mask. See also -ar and -am options. .TP 20 -I, --include The mask is applied to filenames which are not directories (see glob(7) for details on wildcard characters). If a given file matches the mask and does not match any mask given with -X, the file is selected for the operation. By default (no -I and no -X on the command line), all files are included for the operation. -I may be present several times on the command line, in that case all files that match one of the -I mask will be considered for the given operation, if they do not also match one of the -X mask. See also -ar and -am options. .TP 20 -P, --prune Do not consider file or directory sub-tree given by the path. -P may be present several time on the command line. The difference with -X is that the mask is not applied only to the filename, but also include the path. Moreover it applies also to directories (-X does not). By default (no -P on the command-line), no sub-tree or file is excluded from the operation, and the whole directory tree (as indicated by -R option) is considered. Note that may contains wildcards like * or ? see .B glob(7) man page for more information. .TP 20 -g, --go-into Files or directory to only take in account, as opposed to -P. -g may be present several time on command-line. Same thing here, the difference with -I is that the mask is applied to the path+filename and also concerns directories. By default all files under the -R directory are considered. Else, if one or more -g option is given, just those are selected (if they do not match any -P option). All paths given this way must be relative to the -R directory, which defaults to current directory. .B Warning, -g option cannot receive wildcards, these would not be interpreted. .TP 20 -[, --include-from-file Files listed in the listing file are included for the operation. No wildcard expression is interpreted in the listing file, the null character is not allowed and the carriage return is used to separate file names (one file name per line) each line must not exceed 20479 bytes. Note that this option applies to any files and directory exactly as -g does, with an important difference however: -g option only uses relative paths to the root directory (the directory given with the -R option), while -[ can use absolute path as well. Another difference is when the argument is a directory -g will include all the subdirectories under that directory, while when the same entry is found in a listing file given to -[ only that directory will be included, no subdirectory or subfile would be enrolled in the backup, with -[ you need to list the exact set of file you want to backup. You can thus generate a listing file with the 'find / -print > somefile' command and give 'somefile' as argument to -[ option. Note that however, dar will never save files out of the -R given root directory tree, even if some are listed in the 'somefile' file. .TP 20 -], --exclude-from-file Files listed in the listing file are excluded from the operation. If a directory is listed in the file, all its contents is excluded. This option is the opposite of -[ and acts the same was as -P option does (in particular it is compared to the whole path+filename and applies to files and directories). As for -[ option, -] listing file can contain absolute paths, but wildcards are not expanded, neither. .PP .B File selection in brief: .PP As seen above, -I -X -P, -g, -[ and -] options are used to select the files to operate on. -I and -X only use the name of files and do not apply to directories, while -P, -g -[ and -] use the filename *and* the path, they *do* apply to directories. .PP since version 2.2.0 two modes of interpretation of these options exist. The normal original method and the ordered method: .RS .TP 5 the normal method is the default and is the one that has been presented above: A directory is elected for operation if no -P or -] option excludes it. If at least one -g or -[ option is given one command line, one -g or -[ option must cover it, else it is not elected for operation. If a directory is not selected, no recursion is done in it (the directory is pruned). For non directories files, the same is true (P, -g, -[ and -] do apply) and a second test must also be satisfied: no -X option must exclude the filename, and if at least one -I option is given, one must match the given filename (using or not wildcards). .TP 5 the ordered method (when -am option is given on command-line): The ordered method takes care of the order of presence between -X and -I in one hand and of -P, -g, -[ and -] in the other hand (note that it has also the same action concerning EA selection when using -u and -U options, but that's no more file selection). In the ordered method the last argument take precedence over all the previous ones, let's take an example: .RS .TP 5 -X "*.mp?" -I "*.mp3" -I "toto*" Here dar will include all files except file of name "*.mp?" (those ending with "mpX" where X is any character), but it will however include those ending with ".mp3". It will also include files which name begin by "toto" whatever they end with. This way, "toto.mp2" will be saved (while it matches "*.mp?" it also begins by "toto") as well as "toto.txt" as well as "joe.mp3" (while it matches "*.mp?" it also ends by "mp3"). But will not be saved "joe.mp2" (because it does not begin by "toto", nor ends by "mp3", and match "*.mp?" mask). As we see the last option (-I or -X) overcomes the previous one. -P, -g, -[ and -] act together the same but as seen above they do not only act on filename, but on the whole path+filename. Note that (-g, -P, -[, -]) and (-X , -I) are independent concerning their relative order. You can mix -X -I -g -P -] -[ in any order, what will be important is the relative positions of -X options compared to -I options, and the relative positions of -g -[ -] and -P options between them. .RE .PP In logical terms, if is the mask generated by all previous mask on the command line, -I generates the new following mask: or . While -X generates the new following mask: and not . This is recursive each time you add a -I or -X option. Things work the same with -P, -g, -[ and -] options. .RE .B This ends the file selection explication let's continue with other options. .PP .TP 20 -u, --exclude-ea Do not consider the Extended Attributes (EA) that are matched by the given mask. By default, no EA are excluded, if the support for EA has been activated at compilation time. This option can be used multiple times. .TP 20 -U, --include-ea Do only consider the EA that match the given mask. By default, all EA are included if no -u or -U option is present and if the support for EA has been activated at compilation time. This option can be used multiple times. See also the -am and -ae options, they also apply to -U and -u options and read below the Note concerning EA. .PP .B Note concerning Extended Attributes (EA) .PP .RS Support for EA must be activated at compilation time (the configure script tries to do so if your system has all the required support for that). Thus you can get two binaries of dar (of the same version), one supporting EA and another which does not (dar -V to see whether EA support is activated). The archives they produce are the same and can be read by each other. The only difference is that the binary without EA support is not able to save or restore EAs, but is still able to test them and list their presence. .PP In the following when we will speak about Extended Attribute (EA) or EA entry, we will only consider a particular Extended Attribute key and its value. By opposition, the set of all EA associated to a file will be designated by "EA set". .PP Since version 2.3.x the name of EA entries include the namespace for dar be able to consider any type of EA (not only "system" and "user" as previously). Thus the two previous options -u and -U have changed and now take an argument which is a mask applied to EA entry names written in the following form .I namespace.name where "namespace" is for example "user". Note that the mask may or may not include the dot (.) and may match arbitrary part of the EA namespace+name, just remind that masks will be applied to the "namespace.name" global string. .PP the -am flag here also enables the ordered method, for EA selection too. The ordered versus normal method have been explained above in the file selection note, with some examples using -X and -I. Here this is the same with -U and -u, (just replace -X by -u and -I by -U, the corresponding mask will apply to Extended Attribute selection in place of file selection). Another point, independently of the -am option the -ae option can be used at restoration time only. If set, when a file is about to be overwritten, all EA will be first erased before restoring those selected for restoration in the archive (according to the -U and -u options given). If not set, the EA of the existing file will be overwritten, those extra EA that are not in the archive or are not selected for restoration in regard to the -u and -U options will be preserved. If you have not used any -u/-U option at backup time and want to restore from a set of full/differential backups the EA exactly as they were, you have to use -ae for dar removes the EA before overwriting their set of EA as stored in the archive. Without -ae option dar will simply add EA to existing ones, thus get a different set of EA for a give file than those recorded at the time of the backup. .PP Last point the -acase and -an options alters the case sensitivity of the -U and -u masks that follow them on the command-line/included files as they do for -I, -X, -P, -g, -[ and -] as well. Very last point ;-), if -ac option is used during backup dar set back the atime after having read each file (see -aa/-ac options), this has as side effect to modify the ctime date of each file. But ctime change is used by dar to detect EA changes. In brief, the next time you backup a file that had to be read (thus which contents changed), its EA will be saved even if they had not changed. To avoid this side effect, don't use the -ac option if not necessary. .RE .B This ends the Extended Attribute selection explication let's continue with other options. .PP .TP 20 -4 --fsa-scope [,[, ...] Reduce the scope of Filesystem Specific Attribute (FSA) to be considered for the operation. FSA are grouped by family. Current available families are: .RS .TP 5 extX this family takes care of Linux ext2/3/4 flag attributes set by chattr(1) and read by lsattr(1). Dar only considers flags that are possible to set or clear by users (or privileged user): append-only, compressed, no_dump (Yes, dar can save files having the nodump flag set and restore then afterward with that flag set!), immutable, data-journaling, secure-deletion, no-tail-merging, undeletable, noatime-update, synchronous-directory, synchronous-update, top-of-directory-hierarchy. Note that "extx" and "ext" are aliases for this FSA family. In spite of its name, this family of attributes is not limited to ext2/3/4 filesystems. .TP 5 HFS+ this family takes care of Mac OS X HFS+ birth date of files, in addition of commonly found dates like atime (last access time), ctime (last meta data change) and mtime (last data change). .TP 5 none "none" is not a FSA family but can be used alone to ignore all FSA families. .PP By default no restriction is done and FSA of all families are considered at restoration time, but if a family has not been activated at compilation time a warning is issued for each file that cannot have its FSA restored completely (unless this family is excluded from the scope thanks to the -4 option). At backup time, if an FSA family has not been activated at compilation time, no warning is issued and FSA of that family are ignored. Still at backup time, you can also ignore FSA that have compilation time support by excluding them from the operation thanks to this -4 option. .PP Example of use: --fsa-scope extX,HFS+ .RE .TP 20 -am, --alter=mask set the ordered mode for mask. This affects the way -I and -X options are interpreted, as well as -g, -P, -[ and -] options, -Z and -Y options and -U and -u options. It can take any place on the command-line and can be placed only once. See the .B file selection in brief paragraph above for a detailed explanation of this option. It has also an incidence on the --backup-hook-exclude and --backup-hook-include options. .TP 20 -an, --alter=no-case set the filters in case insensitive mode. This concerns only masks specified after this option (see also -acase option below). This changes the behavior of -I, -X, -g, -P, -Z, -Y, -u and -U options. .PP Warning: case insensitivity requires interpreting filenames which depends on the locale with which dar is run (defined by the LANG environment variable). For example if you create files with LANG set to fr_FR.UTF-8 and use non plain ASCII characters in filename, there is chances that these non ASCII characters will be stored over several bytes in that filename: so called "wide characters". If then you run dar with LANG set to another value like ru_RU.koi8r, there is much chances that these wide characters do not correspond to the same letter or worse, that they do not match any valid wide character for that locale. A filename is always a sequence of bytes and always saved as such, but using --alter=no-case implies interpreting that sequence in a way that depends on the given locale (as defined by the LANG environment variable). As such, dar cannot know if a given file has to be read with fr_FR.UTF-8 locale or with it_IT.iso88591 or ru_RU.koi8r and so on, because this information is not stored in filenames. In consequence, if different locales are used on your system and you are doing a system wide backup, using --alter=no-case option may lead dar to detect invalid wide character, in that case it falls back to a byte by byte case sensitivity comparison (ASCII characters), which may not be what you would expect at first sight: Most of the time, an upper case wide character (stored on several bytes) does not match the equivalent lower case wide character (several bytes too), when case sensitivity comparison is performed byte by byte. .TP 20 -acase, --alter=case set back to case sensitive mode for filters. All following masks are case sensitive, up to end of parsing or up to the next -an option. This changes the behavior of -I, -X, -g, -P, -Z, -Y, -u and -U options. .TP 20 -ar, --alter=regex set the filters to be interpreted as regular expressions (man .B regex(7) ) instead of the default glob expression (man .B glob(7) ) This modifies the -I, -X, -g, -P, -Z, -Y, -u and -U options that follows up to an eventual -ag option (see just below). Note that for -P option, the given mask matches the relative path part of the files path: Let's take an example, assuming you have provided /usr/local to the -R option, the mask "^foo$" will replaced internally by "^/usr/local/foo$" while the mask "foo$" will be replaced internally by "^/usr/local/.*foo$". .TP 20 -ag, --alter=glob This option returns to glob expressions mode (which is the default) after an -ar option has been used, this applies to any -I, -X, -g, -P, -Z, -Y, -u and -U options that follow up to an eventual new -ar option (see just above). .TP 20 -i, --input is available when reading from pipe (basename is "-" for -x, -l, -t, -d or for -A when -c, -C or -+ is used). When reading from pipe, standard input is used, but with this option, the file (usually a named pipe) is used instead. This option is to receive output from dar_slave program (see doc/usage_notes.html for examples of use). Note that when --sequential-read is used, dar uses a single pipe and does no more rely on dar_slave, -i option can be used to tell dar which named pipe to read the archive from, instead of the standard input. .TP 20 -o, --output is available when reading from pipe (basename is "-" for -x, -l, -t, -d or for -A when -c, -C or -+ is used). When reading from pipe, standard output is used to send request to dar_slave, but with this option, the file (usually a named pipe) is used instead. When standard output is used, all messages goes to standard error (not only interactive messages). See doc/usage_notes.html for examples of use. This option is not to be used in --sequential-read mode. .TP 20 -O, --comparison-field[=] When comparing with the archive of reference (-c -A) during a differential backup, when extracting (-x) or when comparing (-d) do only considers certain fields. The available flags are: .RS .TP 15 ignore-owner all fields are considered except ownership. .BR This is useful when dar is used by a non-privileged user. It will not consider a file has changed just because of a uid or gid mismatch and at restoration dar will not even try to set the file ownership. .TP 15 mtime only inode type and last modification date is considered as well as inode specific attributes like file size for plain files. Ownership is ignored, permission is ignored. During comparison, difference on ownership or permission is ignored and at restoration time dar will not try to set the inode permission and ownership. .TP 15 inode-type Only the inode type is considered. Ownership, permission and dates are ignored. Inode specific attributes are still considered (like file size for plain files). Thus comparison will ignore differences for ownership, permission, and dates and at restoration dar will not try to set the ownership, permission and dates. .RE .PP When no flag is provided to this option, -O option acts as if the "ignore-owner" flag was set, which is the behavior in older releases (< 2.3.0). Note also that for backward compatibility, --ignore-owner option still exists and since version 2.3.0 is just an alias to the --comparison-field=ignore-owner option. Of course if this option is not used, all fields are used for comparison or restoration. .TP 20 -H[num], --hour[=num] if -H is used, two dates are considered equal if they differ from a integer number of hours, and that number is less than or equal to [num]. If not specified, num defaults to 1. This is used when making a differential backup, to compare last_modification date of inodes, at restoration or merging time if overwriting policy is based on file's data or EA being more recent and last, when comparing an archive with a filesystem (-d option). This is to workaround some filesystems (like Samba filesystem) that seems to change the dates of files after having gone from or to daylight saving time (winter/summer time). Note that -H option has influence on the overwriting policy (see -/ option) only if it is found before on command-line or in an included file (using -B option). .TP 20 -E, --execute the string is a .B user command-line to be launched between slices. For reading an archive (thus using -t, -d, -l or -x commands), the given string is executed before the slice is read or even asked, for writing an archive instead (thus using -c, -C or -+ commands), the given string is executed once the slice has been completed. Some substitution macros can be used in the string: .RS .TP 10 %% will be replaced by % .TP 10 %p will be replaced by the slice path .TP 10 %b will be replaced by the slice basename .TP 10 %n will be replaced by the slice number (to be read or just written). For reading, dar often needs the last slice, but initially it does not know its number. If it cannot be found in the current directory, the user command-line is then called with %n equal to 0. This is a convenient way to inform the user command to provide the last slice. If after executing the string the requested slice is still not present, dar asks the user (as usually) with a message on the terminal. Once the last slice is found, the user command-line is called a second time, with %n equal to the value of the last slice number. .TP 10 %N is the slice number with the leading zero as defined by --min-digits option. If this option is not used, %N is equivalent to %n. .TP 10 %e will be replaced by the slice extension (always substituted by "dar") .TP 10 %c will be replaced by the context. Actually three possible values exist: "init", "operation" and "last_slice". When reading an archive for (testing, extraction, diff, listing, or while reading the archive of reference, see below the -F option), the "init" context takes place from the beginning up to the time the catalogue is retrieved. On a multiple slice archive this correspond to the last slice request. After, that point comes the "operation" context. While creating an archive, the context is always "operation" except when the last slice has been created, in which case the context is set to "last_slice". .TP 10 %u will be replaced by the full URL of path where the slice is stored .P .RE Several -E option can be given, given commands will then be called in the order they appear on the command line and -B included files. Note that having '-E script1 -E script2' is totally equivalent to '-E "script1 ; script2"'. In other words if script1 fails, script2 fill still be executed and dar will only be notified of the exit status of the last -E option. Exit status of previous -E given commands will be ignored. If this does not match your need, consider using a single -aduc option (see below). More generally you can use any shell construction in the argument to -E, including parenthesis, || and &&. Such files given to -E option are known as DUC files (Dar User Command). See also the environment variable DAR_DUC_PATH in the ENVIRONMENT section at the end of this document. .TP 20 -aduc, --alter=duc As described above for -E option, several -E/-F/-~ options (aka DUC commands) are combined using the shell ";" operator, which ignores the exit status of the first commands and only reports to dar the exit status of the last command, leading all commands to always being executed. --aduc option combines the different DUC commands using the shell "&&" operator, which execute the next command if and only if the previous command succeeded. In other words, dar get notified of an error in any given DUC command but due to an error not all DUC commands may be executed. .P --aduc modifies the way the next DUC file is sticked to the previous command, in other words: .RS .TP 5 dar --aduc -E script1 -E script2 ... leads libdar to call a shell with the following line "script1 && script2" .TP 5 dar -E script1 -script2 --aduc -E script3 ... leads libdar to call a shell with the following line "script1 ; script2 && script3". In other words if you want to avoid the ";" use --aduc before any -E/-F/-~ option. .RE .TP 20 -F, --ref-execute same as -E but is applied between slices of the reference archive (-A option). --execute-ref is a synonym. .TP 20 -~, --aux-execute same as -E and -F but is applied between slices of the auxiliary archive (-@ option). .TP 20 -K, --key [[]:] .TP 20 -K, --key gnupg:[]:email[,email[...]] .RS .B In the first syntax, encrypt/decrypt the archive using the cipher with the as pass phrase. An encrypted archive can only be read if the same pass phrase is given (symmetric encryption). Available ciphers are "blowfish" (alias "bf"), "aes", "twofish", "serpent" and "camellia" for strong encryption and "scrambling" (alias "scram") for a very weak encryption. By default if no or no ':' is given, the aes256 cipher is assumed (default was blowfish up to 2.5.x). If your password contains a column ':' you need to specify the cipher to use (or at least use the initial ':' which is equivalent to 'bf:'). If the is empty the pass phrase will be asked at execution time. Thus, the smallest argument that -K can receive is ':' which means aes256 cipher with the pass phrase asked at execution time. .PP Note that giving the passphrase as argument to -K (or -J or '-$' see below) may let other users learn pass phrase (thanks to the ps, or top program for examples). It is thus wise to either use an empty pass which will make dar ask the pass phrase when needed, or use -K (or -J option) from a Dar Command File (see -B option), assuming it has the appropriated permission to avoid other users reading it. For those paranoids that are really concerned about security of their passwords, having a password read from a DCF is not that secure, because while the file gets parsed, dar makes use of "unsecured" memory (memory than can be swapped to disk under heavy memory load conditions). It is only when the passphrase has been identified that locked memory (aka secure memory) is used to store the parsed passphrase. So, the most secure way to transmit a passphrase to dar, then to libdar, then to libgcrypt, is having dar asking passphrase at execution time, dar then makes use of secured (locked) memory from the beginning. .PP since archive format 9 (archive generated by release 2.5.0 and following) at reading time, it is not necessary to provide the encryption algorithm used, just the passphrase is required, dar will figure out which encryption algorithm had been used at archive creation time. You can either ommit -K in which case dar will ask for the passphrase at execution time, or you can use -K in a DCF file as explained above (avoid using -K directly on command-line). .PP .B The second syntax starts with the word "gnupg" followed by a column ':' . In that situation, the same set or symmetric encryption algorithms as described above is available after the column, but the passphrase is not given by the user but randomly chosen by libdar and encrypted using the public key of the target users which email is given in a comma separated list. This random key (see also --key-length below), once encrypted is placed at the beginning and at the end of the generated archive. At reading time only the listed user will be able to read that archive thanks to their respective private key. This feature implies that each user (the archive creator as well as the target users) have their GnuPG keyring set properly. In particular, the archive creator must have validated the public keys of the target users, and the target users must own the corresponding private key in their keyring. Example: using "--key gnupg::bob@nowhere.org,joe@somewhere.com" will generate an aes256 encrypted archive which passprhase randomly chosen by libdar will be encrypted with the public keys of bob@nowhere.org and joe@somewhere.com. To use blowfish in place of ars256 one could use "--key gnupg:bf:bob@nowhere.org,joe@somewhere.com". Note that no check is done about the trust you have set in GPG keyring that a particular public key is owned by the phyical person you expect. See also --sign option below. .PP Note that if you have set a passphrase on your private key, dar will ask it dynamically, which requires dar to be run from a terminal. No other way has been provided to transmit a private key's passphrase to libdar. In consequence if you want to use dar/libdar in scripts and make use of public key algorithm you should avoid setting a passphrase to the private key you want to use. See also GNUPGHOME in the ENVIRONMENT section at the end of this document. .PP .B Obvious but important! To read a gnupg encrypted archive, you need your private key (not only the passphrase to activate it, if set). Thus if you plan to make backup of your system and encrypt the backup using gnupg, you should have a copy of this private key available out of the archive (usb key, floppy, CD/DVD, ...) in order to be able to restore your backup! .RE .TP 20 -J, --ref-key [[]:] same meaning/use as -K option's first syntax, but the given key is used to decrypt the archive of reference (given with -A option). --key-ref is a synonym. Note that for archives generated using dar release 2.5.0 and above this option is no more necessary, unless you want to give the passphrase on command-line (not recommended) or in DCF file (which file would be set with restricted access permissions and/or ACL). .TP 20 -$, --aux-key [[]:] same as -J but for the auxiliary archive of reference (given with -@ option). Here too, this option is no more necessary to read archives generated by dar release 2.5.0 and above. .TP 20 -#, --crypto-block to be able to randomly access data in an archive, it is not encrypted globally but block by block. You can define the encryption block size thanks to this argument which default to 10240 bytes. Note that the syntax used for -s option is also available here (k, M, G, etc.). Note also that crypto-block is stored as a 32 bits integer thus value larger than 4GB will cause an error. Note last, that the block size given here must be provided when reading this resulting archive, using the -* option if the archive is the archive of reference (given to -A option) using -% options if the archive is the auxiliary archive of reference (given to -@ option) or using this -# option if it is the subject of the operation (listing, comparing, testing that archive). If the value is not the default and the given value is not correct in regard to the value given at archive creation time, the archive will not be possible to decrypt, it is thus safer to keep the default value (and not using at all the -#, -*, -% options). .TP 20 -*, --ref-crypto-block same as --crypto-block but to read the archive of reference (-A option). --crypto-block-ref is a synonym. .TP 20 -%, --aux-crypto-block same as --crypto-block but to read the auxiliary archive of reference (-@ option). .TP 20 -e, --dry-run Do not perform any action (backup, restoration or merging), displays all messages as if it was for real ("dry run" action). The --empty option is a synonym. .TP 20 -aSI, --alter=SI[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the SI meaning: multiple of 10^3 (a Mega is 1,000,000). .TP 20 -abinary, --alter=binary[-unit[s]] when using k M G T E Z Y prefixes to define a size, use the historical computer science meaning: multiple of 2^10 (a Mega is 1,048,576). .PP The --alter=SI and --alter=binary options can be used several times on the command line. They affect all prefixes which follow, even those found in files included by the -B option, up to the next --alter=binary or --alter=SI occurrence. Note that if in a file included by the -B option, an --alter=binary or --alter=SI is encountered, it affects all the following prefixes, even those outside the included files. For example, when running with the parameters "-B some.dcf -s 1K", 1K may be equal to 1000 or 1024, depending on --alter=binary or --alter=SI being present in the some.dcf file. By default (before any --alter=SI/binary option is reached), binary interpretation of prefixes is done, for compatibility with older versions. .TP 20 -ac, --alter=ctime When reading a filesystem (during a backup or comparison), restores the atime of all files to what it was before the file was read. This makes it appear as if it had not been read at all. However, because there is no system call to let applications changing the ctime (last inode change) of a file, setting back the atime results in the ctime being changed (hence the alter=ctime). Some recent unix system allow an application to get 'furtive read mode' to the filesystem (see below). On older systems, however, for most users, having the atimes of the files changed shouldn't be a problem, since they can be changed by any other program (running by any user!) as well (like the content-index program Beagle). Ctimes on the other hand, are the only way for security software to detect if files on your system have been replaced (by so called root-kits mostly). This means, that should you run dar with -ac, security software which uses ctimes to check, will mark every file on your system as compromised after the backup. In short, this means this option should only be used by people who know what they are doing. It's the opinion of this writer that any software susceptible to atime changes is flakey or even broken (because of the afore mentioned reasons why atimes can change). But, that doesn't take away that there are programs who rely on atimes remaining the same, like Leafnode NNTP caching software. Therefore this option exists. .TP 20 -aa, --alter=atime When specifying -aa (by opposition to -ac), the atime of every read file and directory is updated, and the ctime remains the same. In other words, Dar itself does nothing with atimes and ctimes, it only let the system do its job to update atimes when files are accessed for reading. This is in accordance with what atimes and ctimes were meant to represent. This is Dar's default (since version 2.4.0), unless 'furtive read mode' (see below) is supported by your system and dar has been compiled with this support activated. .PP .B Furtive read mode is a mode in which neither atime nor ctime are modified while dar reads each file and directory. This provides also better performances as nothing has to be wrote back to disk. A known Unix kernel that supports this feature is Linux 2.6.8 and above (support must also be present in the standard C library of the system for dar to be able to activate this feature at compilation time). When this feature is activated, it becomes the default behavior of dar for super user ; for other users the default is -aa. If however as root user, you do not want to use "furtive read mode" (while it has been activated at compilation time), you can specify either -aa or -ac option. .TP 20 -at, --alter=tape-marks For archive creation and merging, the default behavior (since release 2.4.0) is to add escape sequences (aka tape marks) followed by inode information all along the archive. If -at is given, dar will not add this information to the archive, resulting in a slightly smaller archive and faster backup. When reading an archive, the default behavior is to ignore these escape sequences and rather rely on the catalogue located at the end of the archive. If instead --sequential-read is given on command-line (see below), dar will avoid using the catalogue at the end of the archive and will rely on these escape sequences to know the contents of the archive, which will lead to a sequential reading of the archive, operation suitable for tape media. Note that it is not recommended to disable escape sequences (aka tape marks) by using -at option except if you are more concerned by the resulting size and execution speed of your backup (in particular if you have a lot of small files) than by the possibility to recover your data in case of corrupted or partially written archive. Without escape sequences, dar cannot sequential read an archive, which is the only way beside using an isolated catalogue to use an archive that has a corrupted catalogue or has no catalogue at all, thing that happens if a system crash occurred during the archive creation or due to lack of disk space to complete the archive. .TP 20 -0, --sequential-read Change dar's behavior when reading an archive. By default, the traditional way is used, which relies on the table of contents (aka "the catalogue") located at the end of the archive. With the --sequential-read option instead, dar will rely on escape sequences that are inserted all along the archive with each file's inode information. This will lead to a sequential reading of the archive, operation suitable for tape medium. However, this feature is only available for archive format starting revision "08" (i.e.: since release 2.4.0) and if -at option has no been used during archive creation or merging. This option is available for archive testing (-t), comparison (-d), restoration (-x), listing (-l) and to read the archive of reference (-A option) for isolation (-C) and archive creation (-c). The sequential reading of an archive is always much slower than the usual reading method, so you should not use this option unless you really need it. .TP 20 -9, --min-digits [,[,]] By default slice number contained in filename do not have any padded zeros, which, when sorting a directory contents alphabetically leads to read all the slices starting by '1', then by '2'. for example, slice 1, 10, 11, 12, 13, ... 2, 20, 21, 23, ... etc. While dar is absolutely not perturbed by this display problem, some user shall like to have the slices sorted by order. For that reason, the --min-digits option lets you ask dar to prepend enough zeros in the slice number for it be as wide as the argument passed to --min-digits. For example, if you provide 3 for that number, dar will store the slice number as 001, 002, 003, ... 999. Well, next slice will be 1000, thus it will break again the alphabetical sorting order. You are thus advised to use a number large enough to convert the number of slice you expect to use. Then, when reading your archive, you will also need to provide this same argument, else dar will fail finding the slice. In effect, when looking for slice 1 for example, dar should try opening the file "basename.1.dar", but if it fails, it should try opening the file "basename.01.dar", then "basename.001.dar", ... up to infinity. If the slice is just missing, dar would never ask you to provide it, being still looking for a slice name with an additional leading zero. The problem also arise when doing differential backup, merging or on-fly isolation, dar must know the number of zero to prepend for each of these archive. This is why the --min-digits option may receive up to three integer values, the first for the archive to create or read, the second for the archive of reference (-A option), the third for the auxiliary archive of reference (-@ option). By default, no zero is added, and it is also well working this way. But you might well set for example "--min-digits 5,5,5" in your ($HOME)/.darrc file to do it once and for all. .TP 20 --pipe-fd will read further arguments from the file-descriptor . The arguments read through this file-descriptor must follow a TLV (Type/Length/Value) list format. This option is not intended for human use, but for other programs launching dar like dar_manager. This feature has been added to overcome the command line length limit. .TP 20 -al, --alter=lax When reading an archive, dar will try to workaround data corruption of slice header, archive header and catalogue. This option is to be used as last resort solution when facing media corruption. It is rather and still strongly encourage to test archives before relying on them as well as using Parchive to do parity data of each slice to be able to recover data corruption in a much more effective manner and with much more chance of success. Dar also has the possibility to backup a catalogue using an isolated catalogue, but this does not face slice header corruption or even saved file's data corruption (dar will detect but will not correct such event). .TP 20 -G, --multi-thread When libdar is compiled against libthreadar, it can make use of several threads. The number of thread is not settable but depends on the number of features activated (compression, encryption, tape marks, sparse file, etc.) that require CPU intensive operations. The load-balancing type per thread used is called "pipeline". As performance gain is little (not all algorithms are adapted to parallel computing) this feature is flagged as experimental: it has not been tested as intensively as other new features and it is not encouraged for use. If you want better performance, use several dar processes each for different directory trees. You'll get several archives instead of one which isolated catalogues can be merged together (no need to merge the backups, just the isolated catalogues) and used as base for the next differential backup. Note: if you want to silent the initial warning about the fact this feature is experimental use -Q option before -G option. .TP 20 -j, --network-retry-delay When a temporary network error occurs (lack of connectivity, server unavailable, and so on), dar does not give up, it waits some time then retries the failed operation. This option is available to change the default retry time which is 3 seconds. If set to zero, libdar will not wait but rather ask the user whether to retry or abort in case of network error. .TP 20 -afile-auth, --alter=file-authentication With this option, When reading or writing an archive to a remote repository when no password is provided, instead of interactively asking for a password dar will first check the ~/.netrc file for credentials when relying on FTP protocol and also for SFTP protocol (libcurl allows that, which is unusual but somehow useful). If no password could be found in ~/.netrc, in second time and for SFTP only, dar will try to connect using public key authentication. Public key authentication is tried without this option, but it is useful here to avoid having password requested interactively. .TP 20 -ab, --alter=blind-to-signatures do not check whether an encrypted archive with public key that has also been signed have correct signatures. .PP .B SAVING, ISOLATION, MERGING AND REPAIRING SPECIFIC OPTIONS (to use with -c, -C or -+) .PP .TP 20 -z[[algo:]level], --compression[=[algo][:][level]] add compression within slices using gzip, bzip2, lzo or xz algorithm (if -z is not specified, no compression is performed). The compression level (an integer from 1 to 9) is optional, and is 9 by default. Be careful when using xz algorithm better specify a compression ratio less than or equal to 6 to avoid important memory requirements. A ratio of 1 means less compression and faster processing, while at the opposite a ratio of 9 gives the best compression but longest procesing time. "Algo" is optional, it specifies the compression algorithm to use and can take the following values "gzip", "bzip2", "lzo" or "xz". "gzip" algorithm is used by default (for historical reasons see --gzip below). If both algorithm and compression are given, a ':' must be placed between them. Valid usage of -z option is for example: -z, -z9, -zlzo, -zgzip, -zbzip2, -zlzo:6, -zbzip2:2, -zgzip:1, -zxz:6 and so on. Usage for long option is the same: --compression, --compression=9, --compression=lzo, --compression=gzip, --compression=bzip2, --compression=lzo:6, --compression=bzip2:2, --compression=gzip:1 --compression=xz:9 and so on. .PP .RS About lzo compression, the compression levels of dar and lzop program do not match. If you want to get the behavior of compression level 1 of lzop, use the lzop-1 algorithm in place of lzo with dar/libdar. If you want to get the behavior of lzop compression level 3, use the lzop-3 algorithm in place of the lzo algorithm. Lzop compression levels 2, 4, 5 and 6 are the same as level 3. last, there is no difference about compression level 7, 8 and 9 between dar and lzop. The lzop-1 and lzop-3 algorithms do not make use of any compression level (compression level is ignored with these algorithms). .RE .TP 20 --gzip[=level] Same as -z (see just above). This option is deprecated, please use --compression or -z. .TP 20 -s, --slice Size of the slices in bytes. If the number is appended by k (or K), M, G, T, P, E, Z or Y the size is in kilobytes, megabytes, gigabytes, terabytes, petabytes, exabytes, zettabytes or yottabytes respectively. Example: "20M" means 20 megabytes, by default, it is the same as giving 20971520 as argument (see also -aSI and -abinary options). If -s is not present the backup will be written to a single slice whatever the size of the backup may be (assuming your operating system can support arbitrarily large files). .TP 20 -S, --first-slice -S gives the size of the first slice which may be chosen independently of the size of following slices (either bigger or smaller). This option needs -s option and by default of -S option, the size of the first slice is the same as the one of the following slices. .TP 20 -p [], --pause[=] pauses before writing to a new slice (this requires -s). By default there is no pause, all slices are written in the same directory, up to the end of the backup or until the filesystem is full. In this later case, the user is informed of the lack of disk space and dar stops for user action. As soon as some disk space is available, the user can continue the backup. The optional integer that this option can receive tells dar to only pause every 'n' slice. Giving 3 for 'n' will make dar pause only after slices 3, 6, 9 and so on. If this integer is not specified, the behavior is as if '1' was given as argument which makes dar pause after each slice. .TP 20 -D, --empty-dir At backup time only, when excluding directories either explicitly using -P or -] options, or implicitly by giving a -g or -[ options (a directory is excluded if it does not match mask given with -g options or -[ options) .B dar does not store anything about these. But with -D option, dar stores them as empty directories. This can be useful, if excluding a mount point (like /proc or /dev/pts). At restoration time, dar will then recreate these directories (if necessary). This option has no meaning with -C and is ignored in that case. Independently of that, -D can also be used at restoration time, but it activates a slightly different feature (see RESTORATION SPECIFIC OPTIONS below). .TP 20 -Z, --exclude-compression Filenames covered by this mask are not compressed. It is only useful in conjunction with -z option. By default, all files are compressed (if compression is used). This option can be used several times, in that case a file that matches one of the -Z mask will not be compressed. Argument given to -Z must not be include any path, just the filename (eventually/probably using wildcards). This option used while merging or repairing allow one to change the compression of files. .TP 20 -Y, --include-compression Filenames covered by this mask (and not covered masks given to -Z option(s)) are the only to be compressed. It is only available with -z option. By default all files are compressed. This option can be used several times, in that case all files that match one of the -Y will be compressed, if they do not also match on of the -Z masks. The ordered method here applies too when activated (with -am option), it works exactly the same as -I and -X options, but apply to file compression, not file selection. In other word, it matches only on the file name, not on the path of files. This option used while merging or repairing allow one to change the compression of files. .TP 20 -m, --mincompr files which size is below this value will not be compressed. If -m is not specified it is equivalent to giving .B -m 100 as argument. If you want to compress all files whatever their size is you thus need to type .B -m 0 on the command line. The size unit is the byte (octet) and the same number extensions as those used with -s or -S are available here, if you want to specify the size in kilobyte, megabyte, gigabyte etc. .TP 20 -1, --sparse-file-min-size Define the minimum length of zeroed bytes to replace by "holes". By default, this feature is activated with a value of 15 bytes. To completely disable it, set the size to zero. Disabling this feature will bring some noticeable speed improvement but will probably make the archive slightly bigger (depending on the nature of the data). Sparse files are files that contain so called holes. On a filesystem, the portion of zeroed bytes is not stored on disk, thus an arbitrary large file with huge portion of zeros may only require a few bytes of disk storage. While dar cannot detect how is allocated a given file because it makes a filesystem abstraction (it does not know the implementation of any particular filesystem, where from its portability), when it finds a sequence of zeroed bytes larger than the given threshold it can assume that it is in presence of a hole. Doing so, it does not store the given zeroed bytes into the archive, but place a tag beside the saved data to record the size of the hole and thus where to place the next non zeroed bytes. This makes dar archive disk space requirement much smaller when a sparse files is met. At restoration time, dar will restore holes writing normal data and seeking over the hole to write down the normal data after each hole. If the underlying file system supports sparse files, this will restore the holes. Note that there is no difference for applications whether a file is sparse or not, thus dar may well transform normal files into sparse files and viceversa, only the disk requirement will change. Last point, if dar can reduce disk requirement for archive with holes as small as 15 bytes (smaller value works but the overhead cost more than what is required to store the zeroed bytes normally), it may not be the same at restoration, because filesystem allocation unit is usually several kilobytes (a page), however restored file will never be larger than it could be without holes. The only drawback of this feature is the additional CPU cycle it requires. .TP 20 -ak, --alter=keep-compressed During merging and repairing operation, keep files compressed, this has several restrictions : -z, -Z, -Y, -m are ignored, if two archives have to be merged, both must use the same compression algorithm or one of them must not use compression at all (this last restriction will probably disappear in a next version). The advantage of this option is a greater speed of execution (compression is usually CPU intensive). .TP 20 -ah, --alter=holes-recheck For merging and repairing, the sparse file detection mechanism is disabled by default. However if you want to activate it (assuming you have an old archive you want to convert the current archive format taking care of sparse files), you need to use -ah option to reactivate the sparse file detection mechanism. Then for merging and reparing --sparse-file-min-size can be used as described above for archive creation. In addition, you can have files stored as sparse file in the archive of reference be stored as normal files in the merged archive using -ah and passing to --sparse-file-min-size an value larger than all file sizes, for example as of today in year 2018, passing .B -ah --sparse-file-min-size 1E (1E for one exabyte) should be large enough. .TP 20 --nodump do not save files which have the 'd' flag set (see chattr(1) lsattr(1) ext2 commands). This option may not be available if the system dar has been compiled on did not provide support for ext2 flags. Note that this option does nothing with -+ option (merging) as no filesystem is used for that operation. .TP 20 -5, --exclude-by-ea[=] exclude inodes from backup that have been set with the EA given in argument. If not argument is given to that option the default EA used to exclude files from backup is "user.libdar_no_backup". To set this attribute to a given file, use the following command: "setfattr -n user.libdar_no_backup ", to remove it: "setfattr -x user.libdar_no_backup ". Last, to check the presence this EA: "getfattr " .TP 20 -M, --no-mount-points stay in the same filesystem as the root directory (see -R option), subdirectory that are mounting points for other filesystems will not be saved (or saved empty if -D option is used). This option is useless and ignored for merging operation. .TP 20 -, , --cache-directory-tagging don't save contents of directories that use the Cache Directory Tagging Standard. See http://www.brynosaurus.com/cachedir/spec.html for details. (this option is useless with -+ option) .TP 20 -/ , --overwriting-policy This option let the user define when or how file overwriting can occur at restoration or archive merging time. It does no apply to slice overwriting which are driven by the -n option, it does instead apply to file during extraction and files inside archives when merging two of them. When considering overwriting, a file is said to be 'in place' while an other is known as 'new' or 'to be added'. At restoration time, the 'in place' is the one that is present in filesystem while the 'to be added' is the one from the archive. At merging time, the 'in place' is the one of the '-A' archive of reference while the 'to be added' is the one from the auxiliary '-@' archive or reference. This option does not apply to archive reparing. .RS .PP As soon as you use -/ option -n only applies only to slice overwriting and the -r, -k and -ae options are ignored (restoration specific options). .PP The given argument is composed of actions and eventually of conditional expressions. Actions do define how to solve overwriting conflict about file's data on one side and file's Attributes (Extended and Filesystem Specific) on the other side. An action is thus a couple of action for Data and for EA+FSA. Actions for Data are represented by uppercase letters, while action for EA+FSA are defined by lowercase letters. Both actions are independent of each other: .RE .RS .TP 5 P means 'Preserve'. When merging two archives, the data of the resulting archive will be taken from the 'in place' file. While when extracting, the data of the inode in filesystem will be preserved (thus no overwriting will occur for the data). .TP 5 O means 'Overwrite'. When merging two archives, the data of the resulting archive will be taken from the 'to be added' file. While when extracting, the data of the inode in filesystem will be overwritten by data from the archive. .TP 5 S means 'mark Saved and preserve'. When merging two archives, the data of the resulting archive will be marked as already saved in the archive of reference (making thus a differential archive, even if none of the original archive were differential archives). All data will be dropped in the resulting archive, but the last modification date [aka mtime] (used to detect change in file's data) will be taken from the 'in place' file. This action does not apply when extracting files, it is thus considered equal to "Preserve" (P) in that situation. .TP 5 T means 'mark Saved and overwrite'. When merging two archives, the data of the resulting archive will be marked as already saved (same as 'S' action): all data will be dropped in the resulting archive, however the last modification date [aka mtime] (used to detect changes in a file's data) will be taken from the 'to be added' file. This action does not apply when extracting files, it is thus considered equal to "Overwrite" (O) in that situation. .TP 5 R means 'Remove'. When merging two archives, the resulting archive will not contain any entry corresponding to the file that were in conflict. This also implies that no EA will be stored for that particular entry as the entry will no more exist in the resulting archive (as if it had never yet existed). When extracting files, this will lead to file's suppression. .TP 5 p means 'Preserve', same as 'P' (but lowercase letter) preserve the whole EA set and FSA. When merging two archives, the Attributes set of the resulting file will be the ones of the 'in place' file (whatever is the overwriting action taken for its data). While when extracting files to filesystem, the Attributes of the file in filesystem will not be changed (whatever is the overwriting action taken for its data, unless the file is removed using the 'R' policy, which would remove the inode and thus also any Attributes it had). .TP 5 o means 'Overwrite', same as 'O' (but lowercase letter) overwrite the whole EA set and FSA. When merging two archives, the Attributes set of the resulting file will be taken from the 'to be added' file. While when extracting files, the Attributes set of the file in the filesystem will have its Attributes erased and replaced by those of the file in the archive (still independent of what overwriting action is taken for file's data). .TP 5 s means 'mark Saved and preserve', same as 'S' (but lowercase letter) for EA and FSA instead of data. When merging two archives, the EA and FSA of the resulting file are marked as already saved in the archive of reference, thus they are dropped but the date of last inode change [aka ctime] (used to detect changes in file's EA and FSA) will be taken from the 'in place' file. This action does not apply when extracting files, it is thus considered equivalent to "Preserve" (p) in that situation. .TP 5 t means 'mark Saved and overwrite', same as 'T' (but lowercase letter) for EA and FSA instead of data. When merging two archives, the EA and FSA of the resulting file are marked as already saved in the archive of reference, thus they are dropped but the date of last inode change [aka ctime] (use to track changes in EA) will be taken from the 'to be added' file. This action does not apply when extracting files, it is thus considered an equivalent to "Overwrite" (o) in that situation. .TP 5 m means 'merge Attributes and preserve'. The resulting file in the merged archive will have Attribute entries from both the 'in place' and the 'to be added' files. If both files share a same Attribute entry (same FSA or for EA the same key for a given association) the one of the 'in place' file is kept (where from the 'preserve' notion). When extracting a file, the file in the filesystem will have its EA and FSA set enriched by the ones of the file in the archive that do not exist on filesystem, but its already existing Attributes will stay untouched. .TP 5 n means 'merge Attributes and overwrite'. The resulting file in the merged archive will have Attribute entries from both the 'in place' and the 'to be added' files. If both files share a same Attribute entry (same FSA or for EA the same key for a given association) the one of the 'to be added' file will be kept (where from the 'overwrite' notion). When extracting file, the file in the filesystem will have its Attributes set enriched by ones of the file in the archive with some of them possibly been overwritten. .TP 5 r means 'remove', same as 'R' but for the Attribute set (thus all EA and FSA entries) of a given file ('r' is lowercase letter here). The file of the resulting archive during merging operation will not own any EA nor any FSA, even if the 'in place' and/or the 'to be added' files did have some. For file extraction, this means that the file in the filesystem will loose all its EA set. The FSA cannot be 'removed' from a filesystem and may not always have a default value, thus this action does not modify FSA at all in case of archive extraction. But in case of merging the FSA are removed as previously described. As for all the previous tests, this Attribute operation is independent of the operation chosen for file's data (uppercase letters). .TP d means 'delete'. When a same EA or FSA entry is found both in the 'in place' and 'to be added' files, such entry will be absent in the resulting archive. In other words, when merging, the EA set and FSA will only contain EA and FSA entries specific to the 'in place' and those specific to the 'to be added' file. Entries in common will not be present. When extracting a file from an archive, the file on filesystem will have its EA set enriched by entries of the 'to be added' file that are new to the 'in place' file. The other EA entries (which are thus present in both archive and filesystem) will be removed from the set, which the other FSA will stay untouched (FSA cannot be "removed" from a filesystem, nor they always have a default value). .TP 5 * is valid for both EA and data. It tells that the action is not yet defined at this step of the evaluation and that further evaluation is required (see the 'chain' operator below). .TP 5 A means 'Ask for user decision'. This uppercase letter concerns Data overwriting. An application interaction let the user define the action for each file in conflict. Note, that this action if used alone may become very boring or painful. The idea is to use it in conditional statements (which are described below) to have dar ask for only non obvious cases. .TP 5 a means 'Ask for user decision'. This lowercase letter is the equivalent for EA and FSA of the 'A' action. It is intended to be used in the same conditional statements described below. .PP An action is thus a couple of letters, the first being uppercase (for file's data) the second being lowercase (for file's EA and FSA). When -/ option is not given, the action is equivalent to '-/ Oo', making dar proceed to file, EA and FSA overwriting. This is to stay as close as possible to the former default action where neither -n nor -w where specified. Note that -w option stays untouched, in consequences, in this default condition for -/ option, a confirmation will be asked to the user before dar proceed to any overwriting. The former -n option (still used to handle slice overwriting) can be replaced by its equivalent '-/ Pp' for resolving file overwriting conflict (never overwrite). Here follows some examples of actions, all these are done for any entry found in conflict during archive merging or archive extraction, we will see further how to define conditional actions. .TP 5 -/ Rr will lead dar to remove any file from filesystem that ought to be restored(!). Note the action for EA/FSA is useless, the EA and FSA will always be erased as well as data using 'R'. Thus '-/ Rp' would lead to the same result. .TP 5 -/ Po will keep data of the 'in place' file and EA and FSA set from the 'to be added' file. .TP 5 -/ Ss Using this option when merging an archive with itself (used both as archive of reference (-A option) and auxiliary archive of reference (-@ option) ) will provide the same action as an archive isolation of the archive of reference, but using twice more memory (so keep using the isolation operation as before! Here this is just an illustration of the possibility) .PP As seem previously -u and -U options can be used to filter which EA entry to consider and which to ignore. The question here is to explain how this filtering mechanism interacts with the different policies we just presented above. For files that are not in conflict (found only as 'in place' or as 'to be added'), only the EA entries matching the EA filter are kept. For files in conflict, the overwriting policy is evaluated first, then the filtering mechanism is applied *after* it. Thus for example, using the following [ -/ "Po" -u "*test" ], when merging two archives, only EA ending with "test" will be retained, and when a conflict takes place, this "*test" ending EA will be taken from the 'to be added' file if it has some EA of that type, its other EA entry will be ignored as well as any EA entry of the 'in place' file even those ending by "test". At restoration in using the same options, file without conflict will get restored but only EA entry ending with "test" will be restored, and for file with conflict (already present in filesystem), EA set of file in filesystem will be removed and replaced the EA entries of the file in archive that ends by "test", if some exist. .PP the situation is similar with FSA family scope and overwriting policy. Only FSA of a family present in the scope will be retained, the overwriting policy acts first then the FSA scope is applied. Note however that any FSA present on filesystem and excluded from the FSA scope are not touched. .PP Well, now let's see how to bring some more fun using conditional statements in all these actions. The structure to use is the following: .TP 5 {}[] This syntax let you place an action (as the ones we saw just above) inside the brackets '[' and ']' (for example [Pp]) that will take effect only if the evaluation of the is true. Stated that a such statement is a new type of action, you may have guessed that you may use it recursively: {}[{}[]). .PP Well so far it seems useless. But instead of the "if then else " paradigm common to programming languages, due to the command line context it has been chosen to instead use and implicit "OR" operator between actions. Thus you can "stack" conditional statements this way: {}[] {}[] . In this example, if is true then will be used, ELSE if is true then will be used ELSE will be used. This leads to the same possibilities as what is available with programming languages, but with a slightly more simple syntax. Seen this, the recursion of conditional syntax is more interesting. For readability, you are allowed to add any space or tab in the overwriting policy, but the resulting overwriting policy must be given as a single argument to dar, thus the use of quotes (either simple \'arg\' or double "arg") is necessary. .PP The last operator we will see is the 'chain' operator. Once an expression is evaluated, the resulting couple of action may contain an '*' (undefined action for EA or data). Further evaluation must be done. The chain operator which is represented by a semi-column ';' let one to separate several independent expressions that will be evaluated in turn up to the time the couple of action is fully defined. Once an action (for EA or for Data) is defined, it can be redefined by a subsequent evaluation in the chain, however if the action is defined it cannot be set back to undefined, thus '*' will never overwrite a previously defined action. If at the end of the policy the couple of action is not fully defined, the 'preserve' action is used ('P' or 'p' depending on which of EA or Data is left undefined). Here follow a example of syntax: .TP 5 -/ "{}[P*] O* ; {[*p] *o} ; Rr" The first expression will evaluate to either P* or O*. At this step, as the action is not completely defined, the second part of the chain is evaluated, It will end with either *p or *o. In any case, we have after this second statement of the chain a fully defined action for both data and EA (either Pp, Po, Op or Oo). Thus the evaluation stops here and the "Rr" policy will never be evaluated. .PP We now have one last thing to see: the available conditions (what to place between braces '{' and '}'). Conditions are defined each by a letter, eventually followed by an argument between parenthesis. The usual logical operators are available: negation (!), conjunction (&) disjunction (|). These characters must be escaped or quoted to not be interpreted by the shell when used on command-line. In particular the '!' under most shell must be quoted and escaped (-/ '{\\!R}[..]..', The escape character '\\' is not necessary inside DCF files (those given to -B option) as no shell is used to interpret these files. To these usual operators has been added a new one: the "inversion" operator, noted '~'. Like the negation, it is an unary operator but unlike the negation, it inverses the roles of 'in place' and 'to be added' for the evaluation, which is slightly different from taking the negation of the result of the evaluation. All these operators follow the usual precedence: unary operators ('!' and '~') are evaluated first, then the conjunction '&' then the disjunction '|'. To override this, you can use parenthesis '(' and ')' inside the condition. Over these logical operators, the conditions are based on atomic operator that compare the 'in place' file to the 'to be added' file. Here they follow: .TP 5 I true only if the 'in place' entry is an inode (a 'detruit' which record the fact that a file has been removed since the archive of reference is not an inode for example). This condition do not have any consideration toward the to be added object. Note that ~I can be used to check the nature of the 'to be added' object. .TP 5 D true only if the 'in place' entry is a directory. To know whether the 'to be added' is a directory or not, one would use the "inversion" operator: ~D .TP 5 F true only if the 'in place' entry is a plain file (true also if this plain file is a 'hard link', that's it if its inode is linked several times to the directory tree) .TP 5 H true only if the 'in place' entry is an inode linked several times to the directory tree (= hard link) it may be a plain file, a Unix socket, a pipe, char device, a block device for example. .TP 5 A same as H but the current 'in place' entry is the first link we meet pointing to that hard linked inode. .TP 5 R true if the 'in place' entry is more recent than or of same date as the 'to be added' entry. The last modification date [aka mtime] is used for this comparison. If the 'to be added' entry is not an inode (and thus has no mtime), the 'in place' is considered to be more recent than the 'to be added' entry. Same thing if the 'in place' entry is not an inode (and has no mtime available for comparison), it is here too assumed to be more recent. .TP 5 R() true if the 'in place' entry is more recent than or of the same date as the fixed given in argument. No consideration is done toward the 'to be added' element. The format is the same as the one used with -af option. If an entry has no mtime (it is not an inode for example) it is assumed an virtual mtime of zero. .TP 5 B true only if both 'in place' and 'to be added' are plain file (hard linked or not) and if the 'in place' file's data is larger or equal to the 'to be added' file's data. If one or both entry are not plain files (or hard link to plain file) and thus the file size comparison is not possible, the 'in place' entry is assumed to be 'bigger' than the 'to be added' entry. .TP 5 S true only if the 'in place' data is saved in the archive (not marked as unchanged nor marked as only inode metadata changed, since the archive of reference). Note that while extracting files from an archive, the 'in place' file is the one in the filesystem, which always has its data 'saved' (from libdar point of view). The 'inversion' of this atomic operator ~S may still be interesting in the context of restoration. .TP 5 Y true only if the 'in place' data is saved but dirty (plain file having its data changed at the time it was read for backup). Note, that restoring in sequential read mode, it is not possible to known whether a file is dirty (it is possible to know it once having read its data, but sequential reading does not allows then to skip forward to get the dirty state of the file and skip backward to eventually restore that file, depending on the overwriting policy result). .TP 5 X true only if the 'in place' data is a sparse file .TP 5 T true only if the 'in place' and 'to be added' entries are of same type (plain file, Unix socket, named pipe, block device, char device, symlink, directory, 'detruit' (which stands for file deleted since the archive of reference was done), and so on). Note that the number of links to inode (i.e. whether this is a hard links or not) is not taken into account. .TP 5 L true only if the 'in place' entry has delta signature associated with it. .TP 5 e true if the 'in place' entry has EA (may they be saved or just recorded as existing). .TP 5 r true if the 'in place' entry has more recent or equal dated EA to the 'to be added' entry. If 'to be added' has no EA or is even not an inode, true is returned. If 'in place' has no EA or is even not an inode, true is returned unless 'to be added' has some EA. The comparison is done on ctime dates. .TP 5 r() true if the 'in place' entry has more recent or equal dated EA to the fixed given in argument. No consideration is done toward the 'to be added' element. The format is the same as the one used with -af option. If an entry has no date (ctime date) (when it is not an inode for example) it is assumed an virtual ctime of value zero. .TP 5 m true only if 'in place' has more or equal number of EA entry in its set of EA than 'to be added' has. If an entry has not EA or is not even an inode, it is assumed it has zero entry. The comparison is done on this number. Note that the number of EA entry is not the size used to store these entries. For example, the EA entry "user.test" counts for 1, whatever is the length of the value associated to it. .TP 5 b true if the 'in place' entry has bigger EA set or equal size EA set than the 'to be added' entry. If an entry has no EA or is even not an inode, it is assumed that it has a zero byte length EA set. The comparison is done on this number in that case. Note that the comparison is done on the bytes used to store the whole EA set associated to a given file. .TP 5 s true if the 'in place' entry is an inode (or a hard linked inode) and has its EA saved in the archive of reference, not only marked present but unchanged since last backup. This test does not take the 'to be added' entry into account. .PP Well, you've seen that uppercase letter are kept when comparison is based on the inode or data while lowercase letter is used for atomics based on EA. Now that we have completed our tour of this feature let's see some examples: .TP 5 -/ Pp as seen previously this is what does -n option for files when no overwriting policy is defined, which avoids any overwriting for Data as well as for EA. .TP 5 -/ "{!T}[Pp] {R}[{r}[Pp]Po] {r}[Op] Oo" Space and tabs are allowed to ease readability. Here the policy stands for: If files in conflicts are not of the same type then keep Data and EA of the entry 'in place'. Else if 'in place' has a more recent data then if 'in place' has more recent EA then keep both its Data and EA, else keep only its Data and overwrite its EA. Else (if 'in place' has not the more recent data), if it has the more recent EA then overwrite the data but keep its EA, else overwrite both its data and EA. This policy tends to preserve the most recent data or EA, but it does not take into account the fact that EA or Data is effectively saved into the archive of just marked as unchanged since the archive of reference. .TP 5 -/ "{!T}[{~D}[Oo] Pp]" If entries are not of the same type, if the 'to be added' entry is a directory then we keep it and overwrite the 'in place' entry, else we keep the 'in place' entry. If entry are of same type, the policy does not provide any action, thus the default action is used: "Pp". You can change this default action easily using a chain operator: .TP 5 -/ "{!T}[{~D}[Oo] Pp] ; Aa" In this case instead, if entry are of the same type, the user will be asked what to. .TP 5 -/ "{!T|!I}[{R}[Pp] Oo] {S}[{~S}[{R}[P*] O*] P*] {~S}[O*] {R}[P*] O*] ; {s}[{~s}[{r}[*p] *o] *p] {~s}[*o] {r}[*p] *o]" Well this may seems a bit too complex but just see it as an illustration of what is possible to do: If both 'in place' and 'to be added' are not of the same type we keep data and EA of the most recent file (last modification date). Else, both are of the same type. If both are inode we evaluate a two expressions chain (expressions are separated by a semi-column ';') we will see in detail further. Else if they are of same type but are not inode we take the EA and data of the most recent entry (this is the last 10 chars of the string). Well, now let's see the case of inode: The first expression in the chain sets the action for data and keep the action for EA undefined. While the seconds, is the exact equivalent but instead it leaves the action for data undefined '*' and set the action for EA. These two expressions follow the same principle: If both entries are saved (by opposition to be marked as unchanged since the archive of reference) in the archives, the most recent EA/Data is kept, else, the one of the inode that is saved is kept, but if none is saved in the archive the most recent entry (mtime/ctime) is kept. .RE .TP 20 -^, --slice-mode perm[:user[:group]] defines the permission and ownership to use for created slices. By default, dar creates slices with read and write available for anyone letting the umask variable disable some privileges according to user's preferences. If you need some more restricted permissions, you can provide the permission as an octal value (thus beginning by a zero), like 0600 to only grant read and write access to the user. Be careful not to avoid dar writing to its own slices, if for example you provide permission such as 0400. Note also that the umask is always applied thus specifying -^ 0777 will not grant word wide read-write access unless your umask is 0000. .TP 20 -_, --retry-on-change count[:max-byte] When a file has changed at the time it was read for backup, you can ask dar to retry saving it again. By default a file can be re-saved up to 3 times (this is the 'count' field), you can set it to zero to disable this feature. In option the overall maximum amount of byte allowed to be wasted due to retry changing file's backup can be given after a column charactrer (:), this is the 'max-byte' field. By default (no --retry-on-change option specified) a limit of 1 wasted byte is allowed which is the mininum. Specifying zero for max-byte set no limit on the amount of wasted bytes (same as if no 'max-byte' was specified), each changing file is then saved up to 'count' times if necessary. .RS .PP A file is considered as changed when the last modification time has changed between the time the file has been opened for backup and the time it has been completely read. In some situation it is not possible to replace the already saved data for a file (writing archive to a pipe for example), in that situation only, a second copy of the file is added just after the first previous try which leads that previous try to becomes inaccessible, however it holds some place in the archive, where from the designation of "wasted bytes". You can remove all wasted bytes from an archive using the merging/fitering feature: dar -+ new_arch -A old_arch -ak. .PP Note: since release 2.5.0, in normal condition no byte is wasted when a file changed at the time it was read for backup, except when doing a backup to pipe (using '-c -' option), except if the beginning of the modified file is located in a previous slice and except if slice hashing or strong encryption is used. .RE .PP .TP 20 -ad, --alter=decremental This flag is to be used only when merging two archives. Instead of the usual merging where each files of both archives are added to the resulting archive with eventually a tie using the overwriting policy (see -/ option), here the merging builds an archive which corresponds to the decremental backup done based on two full backups. the -A backup is expected to receive the older archive while the -@ is expected to point to the more recent one. If this option is used, the eventually overwriting policy is ignored and replaced internally by -/ "{T&R&~R&(A|!H)}[S*] P* ; {(e&~e&r&~r)|(!e&!~e)}[*s] *p". Additionally, files found int the newer archive that do not existed in the older are replaced by a 'detruit' entry, which marks them to be remove at restoration time. For more information about decremental backups read the usage_notes.html file in the documentation. Note that decremental backup is not compatible with delta binary. .TP 20 -asecu, --alter=secu This option disable the ctime check done by default during an differential backup: If the ctime of an plain file has changed since the archive of reference was done while all other values stay unchanged (inode type, ownership, permission, last modification date), dar issues a "SECURITY WARNING", as this may be the sign of the presence of a rootkit. You should use the -asecu option to disable this type of warning globally, if you are doing a differential backup of a just restored data (a differential backup with the archive used for restoration taken as reference). Effectively in that situation, as it is not possible to restore ctime, the restored data's ctime will have changed while other parameters will be unchanged for all restored files, leading dar to issue a warning for all restored files. This security check is disabled (implicitly) if dar is run with -ac option. Last, if a file has only its EA changed since the archive of reference was done (new EA, removed EA, modified EA), the security warning will show (false positive). .TP 20 -., --user-comment "" This option let the user add an arbitrary message into the archive header. Warning! this message is always stored in clear text, even if the archive is encrypted. You can see the message inserted in an archive displaying the archive summary (dar -l -q). Some macro can be used inside the : .RS .TP 5 %c is replaced by the command line used. Note that for security, any option related to archive encryption is removed (-K, -J, -$, -#, -*, -%). The command included from a DCF file (see -B option) are never added by this macro. As a consequence, if you do not want to see --user-comment stored in user comments you can add the --user-comment definition in an included file like ~/.darrc for example. .TP 5 %d this is the current date and time .TP 5 %u this is the uid under which dar has been run .TP 5 %g this is the gid under which dar has been run .TP 5 %h the hostname on which the archive has been created .TP 5 %% the % character. .RE .TP 20 -3, --hash With this option set, when creating, isolating, merging or repairing an archive, beside each generated slices an on-fly hash file of the slice is created using the specified algorithm. Available algorithm are "md5", "sha1" and "sha512". By default no hash file is generated. The hash file generated is named based on the name of the slice with the .md5, .sha1 or .sha512 extension added to it at the end. These hash files can be processes by md5sum, sha1sum and sha512sum usual commands (md5sum -c ) to verify that the slice has not been corrupted. Note that the result is different than generating the hash file using md5sum or sha1sum once the slice is created, in particular if the media is faulty: calling md5sum or sha1sum on the written slice will make you compute the hash result on a possibly already corrupted file, thus the corruption will not be seen when testing the file against the hash at a later time. Note also that the creation of a hash file is not available when producing the archive on a pipe ("dar -c -"). .TP 20 -7, --sign email[,email[,...email]] When creating, isolating, merging or repairing an archive with public key encryption (read -K option) it is also possible to sign it with one or more of your private key(s). At the difference of the hash feature above, only the randomly generated key used to cipher the archive, key that is dropped at the beginning and at the end of the archive, is signed. If the archive is modified at some place, that part will not be possible to decipher, but signature verification will stay quick and valid, unless the part that has been tempered is the key inside the archive in which case signature check will report a failure and archive will not be readable at all. If the signature is valid and the archive could be extracted without error, the whole archive could be assumed to be signed by the gnupg key owners, but read below the security note. See also GNUPGHOME in the ENVIRONMENT section at the end of this document. .RS .PP A summay information about the signature information is displayed while listing an archive in summary mode "dar -l -q". For any operation involving a signed archive, a short message only shows if the archive is signed an one or more signature check failed, no message is displayed in case of successful signature check. This warning may be disabled using the --alter=blind-to-signatures command. .RE .TP 20 -<, --backup-hook-include The mask is applied to path+filename during backup operation only. If a given file matches the mask, a user command (see -= option below) will be run before proceeding to the backup and once the backup will be completed. See also -> option below. IMPORTANT: if using the short option, you need to enclose it between quotes: '-<' for the shell not to interpret the < as a redirection. .TP 20 -> --backup-hook-exclude The mask is applied to path+filename during backup operation only. If a given file matches the mask, even if it matches a mask given after -< option, no user command will be executed before and after its backup. The -< and -> options act like -g and -P, they can receive wildcard expression and thus have their comportment driven by the --alter=globe and --alter=regex expressions seen above, as well as the --alter=mask option. Last the --alter=case and --alter=no-case modify also the way case sensitivity is considered for these masks. By default, no -> or -< option, no file get selected for backup hook. IMPORTANT: if using the short option, you need to enclose it between quotes: '->' for the shell not to interpret the > as a redirection. .TP 20 -=, --backup-hook-execute for files covered by the mask provided thanks to the -< and -> options, the given string is executed before the backup of that file starts and once it has completed. Several macro can be used that are substituted at run time: .RS .TP 10 %% will be replaced by a literal % .TP 10 %p will be replaced by the full path under backup .TP 10 %f will be replaced by the filename (without the path) .TP 10 %u will be replaced by the UID of the file .TP 10 %g will be replaced by the GID of the file .TP 10 %t will be replaced by a letter corresponding to the type of inode: 'f' for plain file, 'l' for symlink, 'd' for directory, 'c' for char devices, 'b' for block devices, 's' for sockets, 'p' for pipes, 'o' for doors. .TP 10 %c and most interesting, %c (c for context), will be replaced by "start" or by "end" when the command is executed before or after the backup respectively. .RE This way, one can dump a database in a directory just before it is about to be backed up, and clean it up once the backup has completed. Note that the masks seen above that drive the execution of this command can be applied to a directory or a plain file for example. When a directory is selected for this feature, the command is logically ran before starting (with the context "start") to backup any file located in that directory or in a subdirectory of it, and once all files in that directory or subdirectories have been saved, the command is ran a second time (with the context "end"). During that time, if any file do match the backup-hook masks, no command will be executed for these. It is assumed that when a directory has been asked for a backup-hook to be executed this hook (or user command) is prepare for backup all data located in that directory. The environment variable DAR_DUC_PATH also applies to these user commands (see -E above, or the ENVIRONMENT paragraph below). .TP 20 -ai, --alter=ignore-unknown-inode-type When dar meets an inode type it is not aware about (some times ago, it was the case for Door inode on Solaris for example, Door inodes are handled by dar since release 2.4.0), it issues a warning about its inability to handle such inode. This warning occurs even if that entry is filtered out by mean of -X, -I, -P, -g, -[ or -] options, as soon as some other entry in that same directory has to be considered for backup, leading dar to read that directory contents and failing on that unknown inode type (filtering is done based on the result of directory listing). This option is to avoid dar issuing such warning in that situation. .TP 20 -8, --delta sig This option can be used for archive backup, isolation and merging. Important: read also the best practice paragraph below .RS .P Called during a backup operation it leads dar to create delta signature for each file: If the file is new or has changed, a signature is computed and stored beside the file's data, which increases the archive size. If the file is not new and has not changed (differential backup context) if an delta signature is found in the archive of reference (or isolated catalogue), this signature is copied to the resulting archive, but not the file's data. If the reference archive does not hold delta signature, a new delta signature is computed based on the current data found on filesystem for that file and then stored in the resulting archive. But in any case, without .B --delta sig the resulting archive will hold no delta signature. Note that delta signature transfer is not possible when the archive of reference is read in sequential mode, thus delta signature is disabled when the archive of reference is read in sequential mode. .P For isolation and merging operations, the behavior is sligthly different: .B --delta sig option let dar transfer existing delta signatures from the original archive to the isolated/merged one but does not lead dar to compute delta signatures for files that do not have one, unless one of the .B --include-delta-sig or .B --exclude-delta-sig option is specified; in that case the delta signatures are transfered/dropped and if not present calculated accordingly to these mask options. However note that it is not possible to calculate delta signature for unsaved files in the archive of reference (because the archive of reference does not hold their data) as well as for fully saved files when merging is performed keeping files compressed (see -ak option). Another restriction while merging concernes sparse files, it is not possible to calculate binary signature for file stored as sparse files, but if sparse file detection mechanism is activated at merging time, delta signature can be calculated for sparse files too even if it is missing in the reference archive. In short: if you want recalculation of delta signature while merging, do not keep file compressed (do not use -ak option) and if you want to avoid having sparse files excluded from the delta signature recalcutation, activate sparse file detection (use -ah option). Delta signature transfer is not possible for on-fly isolation, you need to do normal atchive isolation to obtain an isolated catalogue with delta signatures. .RE .TP 20 -8, --delta sig::[:[:[:]]] this variant of '--delta sig' option let you specify the block len used to build delta signatures. Larger values reduce CPU load required to build delta signature, but also lead to less accuracy when computing delta binary, which means larger delta patch and more data saved when a file has changed. The block length is calculated following the formula: .B block_len = function(filesize)*multiplier/divisor If this calculated value is lower than "min", it is set to min. If the calculated value is greater than "max" it is set to max unless max is set to zero in which case the value is kept as is. Of course "divisor" cannot be null. The avalaible functions are: .RS .TP 5 fixed always returns 1, in other terms, the block size is indepent from the file size to build delta signature for .TP linear returns the filesize. here, you will most of the time use 1 for multiplier and increase divisor to at least 10 for it makes sense .TP log2 returns the upper rounded power of 2 to the file size (base 2 logarithm). .TP square2 returns the approximated value of the square root of the file size. Note that for better performance and as accurary is not important here, this function is implemented as exp2(log2(filesize)/2) where exp2 and log2 are based ont the integer left and right bit shift operations. .TP square3 returns the approximated value of the cube root of filesize, implemented as exp2(log(filesize)/3) .RE .BR .BR .RS All numerical fields can receive multiplier suffix (k, M, ...) for more details about these suffixes, see -s option description. If not specified .B "max" defaults to zero (no maximum value defined). If not specified .B "min" defaults to RS_DEFAULT_BLOCK_LEN (see below for details on this symbol). If not specified .B "divisor" defaults to 1. Using "--delta sig" without additional fields is equivalent to using .B --delta sig:fixed:RS_DEFAULT_BLOCK_LEN where "RS_DEFAULT_BLOCK_LEN" is taken from librsync and is today equal to 2048 bytes (which may well change in the future by the way if librsync maintainers decide to do so). .RE .TP 20 -{, --include-delta-sig .RS By default when .B --delta sig is provided, delta signatures are computed for all files enrolled in the backup operation (see also .B --delta-sig-min-size option). This option and --exclude-delta-sig allow restricting the files for which delta signature have to be calculated in that situation. The mask applies to the whole path, the same way as -P/-g options do. .P For merging or isolation operations, when .B --delta sig is used no delta signature is computed only existing ones are transfered as is without restriction. To change that behavior and thus either drop or add delta signature to files that did not have one in the archive of reference, specify an combination of .B --include-delta-sig or .B --exclude-delta-sig with .B --delta sig. This option as well as --exclude-delta-sig can be used several times on command-line but are useless/ignored without --delta sig. See also -am, -ag and -ar options. .RE .TP 20 -}, --exclude-delta-sig Files matching the given mask will never have their delta signatures calculated, may .B --delta sig option be specified or not. See also --include-delta-sig option above and --delta-sig-min-size below. .TP 20 -6, --delta-sig-min-size For archive merging, isolation and creation, when dar has to (re-)calculate delta signatures, this option modifies the minimum file size (in bytes) below which dar never calculates delta signatures. This option acts independently from .B --include-delta-sig and .B --exclude-delta-sig , however it cannot re-activate delta signature recalculation by itself while merging/isolating an archive, it requires either --exclude-delta-sig or --include-delta-sig option to be active in that situation. For archive backup instead, it does not require --exclude-delta-sig nor --include-delta-sig to act, but only need .B --delta sig option to be set. By default, this minimum size is 10 kio. The same option suffixes (k for kilo, M for mega, G for giga, T, ...) as the ones available with --slice option can be used here too. Using zero as argument gives the same result as not specifying this option at all (default size). .TP 20 -8, --delta no-patch In the context of differential backup, this option leads dar to never consider files for delta binary even if delta signatures are present. By default delta binary (rsync-like) operation is performed when a file has changed since the archive of reference was made *and* if a delta signature could be found in the archive of reference for that file (or in the isolated catalogue used as reference for the incremental/differential backup). If no delta signature could be found or if --delta no-patch is used, the normal behavoir is done, which consist of saving that whole file in the archive. Note that if the archive of reference is read in sequential mode, the .B --delta no patch is implicitely used as reading in sequential mode an archive does not let skipping backward to fetch the delta signature necessary to setup a delta patch. .P Binary delta options usage and best practices: .RS First it must be understood that binary delta has advantages (less storage requirement) and drawbacks: data corruption has a wider impact on the ability to restore a given file, restoration of incrementaly backed up file may ask much more than one archive to be used. To limit the impact of the first drawback, dar binary delta is done per file, not globally on the total amount of saved data. You are also strongly encoraged to protect your backups with parity data using par2 (see dar_par.dcf file in the examples section of the documentation). Adding par2 data will increase storage requirement by a little, but usually much less than the amount gained using binary delta. Last drawback, binary delta relies on checksum (contained in the delta signature) and not on the real data to build the binary delta. There is chances that two different files provide the same checksum, even if the chances are very low, probability is not null. The consequence is that using binary delta the risk exists that the restored data do not match the original data and this will not be noticed by the librsync library on which libdar relies for that feature. Dar adds a second level of checksum, to detect data corruption inside the archive and to check that the file the delta patch is about to be applied is the expected base file, this reduces the risk of "collision" but does not remove it completely. After these warnings, let's now see the best practices about binary delta: .P Once a full backup has been done using --delta sig, any differential backup made based on this archive will use binary diff for file having a delta signature present in the full backup. If you always make differential (not incremental) backups based on such full backup you have nothing more specific to do in regard to binary delta, dar will handle it transparently. In particular you do not need to invoke --delta sig at subsequent backup, this saves space in differential archives as well as CPU cycles. .P However, When doing incremental (not differential) backups this time, if you want to have dar using binary delta at each subsequent incremental backup, delta signatures must be present in the successive incremental backups. This is done by using --delta sig option for each new incremental backup created. .P If you were used to use isolated catalogues before release 2.6.0 you can add --delta sig option while isolating a catalogue from an archive containing delta signatures. Such isolated catalogue will be much larger than what it can be without this option but it can be used as reference for a new differential/incremental backup letting dar relying on binary delta. Isolated catalogue generated without --delta sig do not contain delta signature and cannot lead to binary delta operation when used as reference for an incremental or decremental backup. .P Another way of doing differential backup is to make a normal full backup without --delta sig option, and only add delta signatures at archive isolation time using .B --delta sig --include-delta-sig """*""" options. Binary delta signature will then be calculated based on the saved files. Then, using the resulting isolated catalogue as reference dar will be able to proceed to binary delta for the differential backup. If this works pretty well for differential backup (or the first incremental backup) which is based on a full backup, for incremental backup this is less adapted as a file that has not changed since the archive of reference was made does not hold any data and calculating the delta signature is not possible. The first method explained two paragraphs above is better as the incremental backup fetches the already calculated delta signature from the reference to keep it in the resulting incremental backup, so even without data, binary delta is still possible. .P Isolated catalogue using the --delta sig option, can still be used as backup of the internal catalogue they have been isolated from. However, as they hold their own delta signatures, such isolated catalogue can only have access to its own ones, not to those of the archive of reference. In particular when testing an archive (-t option), using -A option to rescue the archive internal catalogue using an isolated catalogue containing delta signatures, dar will not be able to check that there is no corruption in the delta signatures fields of the archive under test. For that type of testing either use the internal catalogue of the archive or rescue it using an isolated catalogue built without --delta sig option. .RE .TP 20 -az, --alter=zeroing-negative-dates dar/libdar saves dates as a number of seconds since the beginning of year 1970, the well known "Unix time" (plus a positive fraction for sub-second time-stamping). Some systems may return a negative number as the Unix time of a given file (files having dates before 1970), in that situation by default and since release 2.5.12 dar pauses and asks the user whether to assume the date as being zero. But with -az option, dar/libdar automatically assumes such negative dates to be zero and just issue a warning about the problem met. .TP 20 -\\, --ignored-as-symlink [:[:...]] When dar reach an inode which is part of this provided column separated list, if this inode is not a symlink this option has no effect, but if it is a symlinks dar saves the file the symlink points to and not the symlink itself as dar does by default. In particular, if the pointed to inode is a directory dar recurses in that directory. You can also pass this list as argument to the DAR_IGNORED_AS_SYMLINK environment instead of using --ignored-as-symlink (which takes precedence over the environment variable). .TP 20 -'\\'', --modified-data-detection=any-inode-change, --modified-data-detection=mtime-and-size Before release 2.6.0, during a differential/incremental backup if any part of a file's inode metadata changed (ownership, permission, ...) even if the mtime (last modification time) and file size stood the same, dar had no choice than resaving the whole file for backup to record the metadata changes. This lead to a waste of backup time and space if in fact and for example only the ownership had been modified. You can still keep this historical behavior by invoking the .B --modified-data-detection=any-inode-change option. Since release 2.6.0 a new entry status ("inode-only") has been added. Dar can now resave only metadata when the inode change does not concern the data. To know whether the data has changed or not, by default (no --modified-data-detection option given) dar looks at mtime and at file's size only. Specifying --modified-data-detection=mtime-and-size (which is the default behavior) can be used to revert the action of --modified-data-detection=any-inode-change for example when playing with included files (DCF files): the latest met takes precedence. .TP 20 -T, --kdf-param [:] At the difference of the listing context (see below), in the context of archive creation, merging and isolation, -T option let you define the interation count used to derive the archive key from the passphrase you provided (archive encryption context) and the hash algorithm used for that. The default value is 200,000 iterations and sha1 algorithm. -T has another older meaning when doing archive listing, but due to the lack of free character to create a new CLI option, there was no other choice than recycling an existing option not used in the context of archive creation/merging/isolation. The consequence is that the -T option must appear after the -+/-c/-C options for the operational context to be known at the time the -T option is met and its --kdf-param meaning to be taken into account. As --kdf-param is an alias to -T, this long form of this option must also be found after the use of either -c, -C or -+ option. The suffixes described for -s option are also available here (k, M, G, T, P, ...) however pay attention to the -aSI/-abinary mode which default to binary, in which case "-T 1k" is equivalent to "-T 1024". .PP .PP .B RESTORATION SPECIFIC OPTIONS (to use with -x) .TP 20 -k[{ignored|only}], --deleted[={ignore|only}] Without argument or with the "ignore" argument, this option leads dar at restoration time to not delete files that have been deleted since the backup of reference (file overwriting can still occur). By default, files that have been destroyed since the backup of reference are deleted during restoration, but a warning is issued before proceeding, except if -w is used. If -n is used, no file will be deleted (nor overwritten), thus -k is useless when using -n. If -/ option is used, this option without argument is ignored! With the "only" argument, this option only consider files marked as to be removed in the archive to restore, no file are restored but some file are removed. When -konly (or --deleted=only) is used, the -/ option is ignored (at the opposition of the "--no-delete=ignore" option which is ignored when the -/ is used). Of course "--no-delete=ignore" and "--no-delete=only" are mutually exclusive, because if both of them were available at the same time dar would do nothing at all. .TP 20 -r, --recent only restore files that are absent or more recent than those present in filesystem. If -/ option is used, this option is ignored! .TP 20 -f, --flat do not restore directory structure. All files will be restored in the directory given to -R, if two files of the same name have to be restored, the usual scheme for warning (-w option) and overwriting (-n option) is used. No rename scheme is planned actually. When this option is set, dar does not remove files that have been stored as deleted since last backup. (-f implicitly implies -k). .TP 20 -ae, --alter=erase_ea [DEPRECATED use -/ instead] Drop all existing EA of files present in filesystem that will have to be restored. This way, the restored files will have the exact set of EA they had at the time of the backup. If this option is not given, a file to restore will have its EA overwritten by those present in the backup and if some extra EAs are present they will remain untouched. See the .B Note concerning Extended Attributes (EA) above for a detailed explanation about this behavior. If -/ option is used, this option is ignored! .TP 20 -D, --empty-dir At restoration time, if -D is not specified (default) any file and directory is restored in regard to the filtering mechanism specified (see -I, -X, -P, -g, -[ and -] options). But if -D option is provided the restoration skips directory trees that do not contain saved files. This avoid having a huge empty tree with a few restored files especially when restoring a differential archive in an empty place. Note: This feature cannot work when --sequential-read is used, as it is not possible to know whether a directory contains or not some saved files at the time the directory inode is read from the archive in sequential reading mode. .TP 20 -2, --dirty-behavior { ignore | no-warn } At restoration time, if a file in the archive is flagged as "dirty" (meaning that it had changed at the time it was saved), user is asked for confirmation before restoring it. Specifying "ignore" will skip those dirty files, while "no-warn" will restore them without user confirmation. This feature is incompatible with sequential reading mode, in this mode dar cannot know whether a file is dirty before having restored it. In consequences, in --sequential-read, once a file has been restored, if it is found to be dirty it will be removed unless dirty-behavior is set to "no-warn". .TP 20 -/, --overwriting-policy Overwriting policy can be used for archive restoration to define when and how file overwriting can occur. See above the description of this option. .TP 20 -A, --ref [[]]/ The --ref option can be used with an isolated catalogue to rescue an archive that has a corruption in the catalogue part, see GENERAL OPTIONS above for more details. .PP .B TESTING AND DIFFERENCE SPECIFIC OPTIONS (to use with -t or -d) .PP .TP 20 -ado-not-compare-symlink-mtime, --alter=do-not-compare-symlink-mtime With this option set, when comparing a symlink, no message shows when symlink in archive and symlink on filesystem do only differ by their mtime. See also -O option. .PP No other specific option, but all general options are available except for example -w which is useless, as testing and comparing only read data. -A option is available as described in GENERAL OPTIONS to backup of internal catalogue of the archive (assuming you have a previously isolated catalogue available). .PP Doing a difference in sequential read mode is possible but hard linked inodes can only be compared to the filesystem the first time they are met, next hard links to this same inode cannot obtain the corresponding data because skipping backward in sequential read mode is forbidden. In that situation, the hard links are reported as skipped, meaning that data comparison could not be performed. .PP .B LISTING OPTIONS (to use with -l) .PP .TP 20 -T, --list-format= .RS By default, listing provides a tar-like output (the 'normal' output). You can however get a tree-like output, an XML structured output or an output focusing on slice(s) where each file's data, EA and FSA is located in. The option --tree-format is an alias to --list-format=tree (backward compatibility). Note that the files doc/dar-catalog-*.dtd define the format of the XML output listing (This file is also installed under $PREFIX/share/doc) .PP the -Tslicing option can also be used with isolated catalogue generated with dar 2.5.0 or above, as isolated catalogues now contain a copy of the slicing layout of the archive of reference. However, if the archive of reference has been resliced (using dar_xform) after the isolated catalogue has been built, the slicing information would not be correct. For that corner case, you can use the -s and -S options with -Tslicing to specify what are the new slice sizes of the archive of reference. Last, -Tslicing and --sequential-read options are not compatible except for isolated catalogues. .RE .TP 20 -as, --alter=saved list only saved files .TP 20 -alist-ea, --alter=list-ea list Extended Attributes name for each file that has some. .TP 20 -ay, --alter=byte, --alter=bytes by default files size is displayed to occupy the shortest number of characters by using the largest unit possible (KiB, MiB, GiB, and so on). With this option instead, the size is displayed with maximum precision using the exact number of bytes used for each file. .TP 20 -I, -X, -P, -g, -[, -] can be used to filter file to list base on their name or path. .TP 20 -aheader displays the header (when --sequential-read is used) or the trailer of the archive and then stops. This archive header/trailer is always in clear text even when the archive is ciphered. This option is here to let you access to these fields without providing the encryption key. .PP From the general options it seems only -vm and -b stay useful here. Note that -vm displays an archive summary first, where a detailed of information about the archive can be obtained. If you want to display only this summary use -q with -l option. .TP 10 displayed fields .RS .TP 10 [Data] possible values are [ ] or [Saved] or [InRef] or [DIRTY] or [Inode] or [Delta]. [ ] means that the data has not been saved because there is no change since backup of reference. [Saved] means that the data has been saved completely, and thus this archive is able to restore the file without other help. [InRef] was used in archive generated by dar version 2.3.x and before when isolating a catalogue from an archive, and means that the file was saved in the reference archive. [DIRTY] means that data is saved (like [Saved]) but has changed at the time dar was reading it for backup, leading dar to possibly store the file in a state it never had. [Inode] means only permission ownership and ctime data changed since the archive of reference was done is recorded in the archive, the data did not changed according to the --comparison-field set or not set. Last [Delta] means the file's data is saved as a binary delta (or delta patch), which is much shorter than the full data as what is done with [Saved]. It also means that you can only restore the file if it exists on filesystem in the state it had when the archive of reference was done, for the patch to be possible to apply on it. This is the case for example if you just restored this file from the archive of reference. .TP 10 [D] possible values are [-], [ ] or [D]. [D] means that delta signature associate with this file is present in the archive. [ ] means that the file has no associated delta signature and thus binary diff will not be possible for it. [-] is used for non plain files inodes for which delta signature is not applicable. .TP 10 [EA] possible values are " " (empty string) or [ ] or [InRef], [Saved] or [Suppr]. It Shows whether Extended Attributes are present and saved ([Saved]), are present but not saved ([ ]) which means there is no change since backup of reference, if there is no EA saved for this file (empty string) or if some EA were present in the archive of reference but none is currently available ([Suppr]). [InRef] was used when isolating a catalogue (release 2.3.x and before) from an archive and means that the file was saved in the reference archive. .TP [FSA] Each character represent a FSA Family: .RS .TP 5 "L" is the first character (L/l/-) representing ext2/3/4 FSA family .TP 5 "H" is the second character (H/h/-) representing HFS+ FSA family .TP 5 "-" the third character is reserved for future FSA family and is always a dash for now. .PP Uppercase means the FSA set is saved, lowercase means the FSA is present in the archive of reference and has not changed since that time. Last a dash (-) means no FSA of that family has been saved for that file. .RE .TP 10 [compr] possible values are [....%] or [-----] or [ ] or [worse]. Shows if the file has been compressed ([...%]) and the compression ratio reached "(uncompressed-compressed)/uncompressed", for example [ 33%] means that the compressed data uses only 66% of the space required to store uncompressed data (33% of space saved thanks to compression), or if the file is stored without compression ([ ] see -m, -Y and -Z options) or if the file is not subject to compression because it is not a saved regular file ([----]), or if the file takes more space compressed than its original size ([worse]), due to compression overhead. Note that 1% compression ratio brings quite no data reduction, while obviously 98% is a very performant compression (compressed file takes only 2% of the size required by the uncompressed date). .TP 10 [S] possible values are [ ] or [X]. [X] only applies to saved plain files, and tells that the file is stored using sparse file data structure: not all data is stored, long sequence of zeros are skipped. This also means that at restoration time, if the filesystem supports it, holes will be restored. To store hole information libdar uses escape sequence (special sequence of byte), but to avoid real data to be considered as such escape sequence, a special escape sequence is used when data looks like an escape sequence. So if a data contains a such escape sequence, it must be read as if it contains holes to be able to restore back the data in its original form. For that reason, in some rare circumstances (saving an dar archive inside a dar archive without compression or encryption, for example) a file without hole may be marked [X] as if it had holes and will be longer by on byte for each data sequence looking like an escape sequence. .TP 10 permission see ls man page. Note that a star (*) is prepended to the permission string if the corresponding inode is linked several times to the directory structure (hard link). .TP 10 user owner of the file .TP 10 group group owner of the file .TP 10 size size in byte of the file (if compression is enabled, the real size in the archive is "compression rate" time smaller). .TP 10 date the last modification date of the file. The last access time is also saved and restored, but not displayed. .TP 10 filename The name of the file. .TP 10 Extended Attributes When using -alist-ea option, for hard linked inode, the filename is followed by an integer between braces: Entries with the same number do point the the same inode. .TP 10 Slice(s) In -Tslice mode, each file is given the range of slices it is located in. If slice size is chosen particularily small, some slices may contain no file, EA, FSA data but only tape marks or the internal catalogue, leading the aggregation of reported slices not to cover all available slices of the archive. .PP .SH EXPLICIT OPTIONAL ARGUMENTS When dar has not been compiled with GNU getopt, which is not present by default on some systems like FreeBSD, you may lack the optional arguments syntax. For example "-z" will create a parse error on command-line, or in -B configuration files. The solution is to explicitly give the argument. Here follows a list of explicit argument to use in place of optional ones: .TP 20 -z must be replaced by -z 9 .TP 20 -w must be replaced by -w d or -w default .TP 20 -H must be replaced by -H 1 .TP 20 -0 must be replaced by -0 ref .TP 20 -5 must be replaced by -5 "" .TP 20 -p must be replaced by -p 1 .TP 20 -v must be replaced by -v all .TP 20 -k must be replaced by -k ignore .TP 20 -5 must be replaced by -5 user.libdar_no_backup .PP .B important ! When using GNU getopt(), optional arguments are available by sticking the argument to the short option: "-z" for example is available as well as "-z9". But "-z 9" is wrong, it will be read as "-z" option and "9", a command line argument (not an argument to the -z option). In the other side, when using a non GNU getopt this time, "-z" becomes an option that always requires an argument, and thus "-z 9" is read as "-z" option with "9" as argument, while "-z9" will be rejected as a unknown option, and "-z" alone will generate an error as no argument is provided. In consequences, you .B need a space between the option (like "-z") and its argument (like "9"), when dar does not rely on a GNU getopt() call, which also imply you to explicitly use arguments to options listed just above. .SH EXIT CODES .B dar exits with the following code: .TP 10 0 Operation successful. .TP 10 1 Syntax error on command-line or DCF included file .TP 10 2 Error due to a hardware problem or a lack of memory. .TP 10 3 Detection of a condition that should never happen, and which is considered as a bug of the application. .TP 10 4 Code issued when the user has aborted the program upon dar question from dar. This also happens when dar is not run from a terminal (for example launched from crontab) and dar has a question to the user. In that case, dar aborts the same way as if the user pressed the escape key at the question prompt. .TP 10 5 is returned when an error concerning the treated data has been detected. While saving, this is the case when a file could not be opened or read. While restoring, it is the case when a file could not be created or replaced. While comparing, it is the case when a file in the archive does not match the one in the filesystem. While testing, it is the case when a file is corrupted in the archive. .TP 10 6 an error occurred while executing user command (given with -E or -F option). Mainly because the creation of a new process is not possible (process table is full) or the user command returned an error code (exit status different from zero). .TP 10 7 an error has occurred when calling a libdar routine. This means the caller (dar program), did not respect the specification of the API (and this can be considered as a particular case of bug). .TP 10 8 the version of dar used is based on finite length integers (it has been compiled with the option --enable-mode=...). This code is returned when an integer overflow occurred. use the full version (based in the so called "infinint" class) to avoid this error. .TP 10 9 this code indicates an unknown error. The exception caching code to take care of new exceptions has probably been forgotten to be update ... this is a minor bug you are welcome to report. .TP 10 10 you have tried to use a feature that has been disabled at compilation time. .TP 10 11 some saved files have changed while dar was reading them, this may lead the data saved for this file not correspond to a valid state for this file. For example, if the beginning and the end of the file have been modified at the same time (while dar is reading it), only the change at the end will be saved (the beginning has already been read), the resulting state of the file as recorded by dar has never existed and may cause problem to the application using it. This is known as a "dirty" file in the archive. .SH SIGNALS If dar receives a signal (see kill(2) man page) it will take the default behavior which most of the time will abruptly abort the program, except for the following signals: .TP 10 SIGINT This signal is generated by the terminal when hitting CTRL-C (with the terminal's default settings), it can also be generated with the kill command .TP 10 SIGTERM This signal is generated by the system when changing of run-level in particular when doing a shutdown, it can also be generated with the kill command .TP 10 SIGHUP Depending on the system, this signal may be sent before the SIGTERM signal at shutdown time, it can also be generated with the kill command .TP 10 SIGQUIT This signal is generated by the terminal when hitting CTRL-\\ (with the terminal's default settings), it can also be generated with the kill command .TP 10 SIGUSR1 This signal can be generated by the kill command .TP 10 SIGUSR2 This signal can be generated by the kill command .PP For those previous signals, two behavior exit. For SIGHUP, SIGINT, SIGQUIT, SIGTERM and SIGUSR1, a .B delayed termination is done: the backup or isolation operation is stopped, the catalogue is appended to the archive and the archive is properly completed with the correct terminator string, this way the generated archive is usable, and can be used as reference for a differential backup at a later time. Note that if an on-fly isolation had been asked, it will *not* be performed, and no user command will be launched even if dar has been configured for (-E option). For SIGUSR2 instead a .B fast termination is done: in case of backup or isolation, the archive is not completed at all, only memory and mutex are released properly. .PP For both type of termination and other operations than backup or isolation, dar's behavior is the same: For restoration, all opened directories are closed and permissions are set back to their original values (if they had to be changed for restoration). For listing, comparison, testing, the program aborts immediately. .PP Another point, when using one of the previous signals, dar will return with the exist status .B 4 meaning that the user has aborted the operation. Note that answering "no" to a question from dar may also lead dar to exit this way. last, If before the end of the program the same signal is received a second time, dar will abort immediately. .SH FILES .B $HOME/.darrc and .B /etc/darrc if present are read for configuration option. They share the same syntax as file given to -B option. If $HOME/.darrc is not present and only in that case, /etc/darrc is consulted. You can still launch /etc/darrc from .darrc using a statement like .B -B /etc/darrc. None of these file need to be present, but if they are they are parsed AFTER any option on the command line and AFTER included files from the command line (files given to the -B option). NOTE: if $HOME is not defined $HOME/.darrc default to /.darrc (at the root of the filesystem). Else you can see .B conditional syntax below, and -N option above that leads dar to ignore the /etc/darrc and $HOME/.darrc files. .SH CONDITIONAL SYNTAX configuration files (-B option, $HOME/.darrc and /etc/darrc) usually contain a simple list of command-line arguments, split or not over several lines, and eventually mixed with comments (see -B option for more). But, you can also use make-like targets to ask for a particular set of commands to be used in certain conditions. A condition takes the form of reserved word immediately followed by a colon ':'. This word + colon must stand alone on its line, eventually with spaces or tabs beside it. The available conditions are: .TP 20 extract: all options listed after this condition get used if previously on command line or file the -x command has been used .TP 20 create: all options listed after this condition get used if previously on command line or file (-B option) the -c command has been used .TP 20 list: (or listing:) if -l command has been used .TP 20 test: if -t command has been used .TP 20 diff: if -d command has been used .TP 20 isolate: if -C command has been used .TP 20 merge: if -+ command has been used .TP 20 repair: if -y command has been used .TP 20 reference: if -A option has been used (except when -A is used for the snapshot feature or in conjunction with -af) .TP 20 auxiliary: if -@ option has been used .TP 20 all: in any case .TP 20 default: if no -c, -d, -x, -t, -C, -l or -+ option has been used at this point of the parsing. .PP The condition stops when the next condition starts, or at End of File. The commands inserted before any condition are equivalent to those inserted after the "all:" condition. Remark : -c -d -x -t -C and -l are mutual exclusive, only one of them can be used while calling dar. .PP Here is an example of conditional syntax .RS .PP create: # upon creation exclude the .br # following files from compression .br -Z "*.mp3" -Z "*.mpg" .PP all: .br -b .br -p .PP default: .br # this will get read if not .br # command has been set yet .br -V .br # thus by default dar shows its version .PP all: .br -v .br # for any command we also ask to be verbose .br # this is added to the previous all: condition .RE .PP Last point, you may have several time the same condition (several .B all: ) for example. They will be concatenated together. .SH USER TARGETS User targets are arbitrary words found on command line, that do not start by a dash ('-'). On most system they should be placed after command and options. They are collected from command-line first, then comes the parsing of command and optional arguments. Their use is to extend conditional syntax described just above by having a set of options activated by the user just adding a single word on command-line. Of course user targets must not be equal to one of the reserved words of the conditional syntax (extract, create, ... all, default). A valid target is a word (thus without space) composed of lowercase or uppercase letters (case is sensitive) with eventually digits, dashes '-' or underscores '_' characters. .PP Let's see an example of use: .PP first a DCF file named 'example.dcf' that will be given on command line: .PP .RS # normal set of files considered for backup .PP create: -R / -P proc -P sys -P mnt -D # if the "home" user target is applied on command line the following command get added .PP home: -g home # if the "verbose" user target is used, we will have some more verbosity ... .PP verbose: -v -vs .RE .PP Then we could run dar in the following ways: .TP 20 dar -c test -B example.dcf in that case only the command in the "create:" section of example.dcf would be used. .TP 20 dar -c test -B example.dcf verbose here over the "create:" target the commands under the "verbose:" target (-v and -vs) would be also used .TP 20 dar -c test -B example.dcf verbose home last we use two user targets "verbose:" and "home:" in addition the the "create:" target of the usual conditional syntax. .PP Note that if the last option *may* receive an argument, the first user target that follows it will be assumed an argument to that option. To avoid this, either change the order of options on command line for the last option been an option that never or always uses an argument (for example -b never has an argument while -s always has one). Or separate the options from the user targets by the -- word. And of course you can also use the explicit argument of the last option (see EXPLICIT OPTIONAL ARGUMENT section, above). .PP Second point: It is allowed to have user targets inside a DCF file. Note however that targets are collected in a first phase, which leads some part of the file to be hidden (because the corresponding conditional syntax or user target is not present). Then, the remaining part of the file is then parsed and actions for each option found is taken. At that time, new user targets found are just recorded, but they do not modify the current DCF file layout, in particular, hidden part of the file stay hidden even if the corresponding user target is read in this same file. Next DCF parsing (which may be triggered by a second -B option on the command line, or by a -B option inside the current parsed DCF file) will thus be done with the additional targets found in that first DCF file, so in a way you may have user targets that activate other user targets, but they will be activated in starting the next -B file. Here follows an examples of two DCF files, first.dcf and second.dcf: .RS # cat first.dcf target3: -K toto target1: target2 -B second.dcf target3 target2: #never reached -s 10k # cat second.dcf target2: -v target3: -b .RE In that example, target1 activates both target2 and target3, but at the time of the parsing of first.dcf, neither target2 nor target3 were yet activated thus '-K toto' and '-s 10k' will never be given to dar (unless activated beside target1 before first.dcf get parsed), however when comes the time to parse second.dcf, target2 *and* target3 are activated, thus both '-v' and '-b' will be passed to dar, even if 'target3' is located after '-B second.dcf' in the file first.dcf .SH ENVIRONMENT .TP 10 DAR_DCF_PATH if set, dar looks for Dar Configuration File (DCF files, see -B option) that do not have an fully qualified path in the directories listed in DAR_DCF_PATH environment variable. This variable receives a column (:) separated list of paths and look in each of them in turn, up to the first file found under the requested name. .TP 10 DAR_DUC_PATH if set, dar looks for Dar User Command (DUC files, see -E, -F, -~, -= options) that do not have a fully qualified path in the directories listed in DAR_DUC_PATH. This variable receives a column (:) separated list of paths and looks in each of them in turn, up to the first file found under the requested name. .TP 10 DAR_SFTP_KNOWNHOSTS_FILE if set, dar will not use the $HOME/.ssh/known_hosts file to check sftp remote server authenticity but the file given as value for this environment variable. Note that setting this variable to an empty string completely disable host validation, which is not recommended. Dar, the command line interface program for dar archive relies on libdar for archive format management which relies on libcurl for network transfer which in turn relies on libssh2 for all that concerns sftp protocol. In the known_hosts file, libssh2 does not support recent lines like those with "ecdsa-sha2-nistp256" in second argument but only supports "ssh-rsa" lines. Check libssh2 documentations and litterature for more details about that limitation. To workaround this limitation you need to disable known hosts validation or set up a restricted known hosts file without any "ecdsa*" entry and have DAR_SFTP_KNOWNHOSTS_FILE pointing to it. .TP 10 DAR_SFTP_PUBLIC_KEYFILE by default dar will fetch the public key file in $HOME/.ssh/id_rsa.pub file. If you use the former id_dsa.pub or more recent key types you need to set this environment variable to point to the appropriated filename .TP 10 DAR_SFTP_PRIVATE_KEYFILE by default dar will fetch the public key file in $HOME/.ssh/id_rsa file. If you use the former id_dsa.pub or more recent key types you need to set this environment variable to point to the appropriated filename .TP 10 DAR_IGNORED_AS_SYMLINK receive a column separated list of absolute paths, which if they are symlinks are not saved as symlink but as the inode they point to. For more details see the --ignored-as-symlink option above. .TP 10 GNUPGHOME for asymetric encryption and signature, the keyring used is $HOME/.gnupg by default. You can change this default by setting GNUPGHOME to the directory containing the keyring. For example, if you are running dar as root and want to use your unprivileged account keyring use the following: .RS .PP export GNUPGHOME=~myaccount/.gnupg .PP dar -K gnupg:...@...,...@... --sign:...@... etc. .RE .SH CAPABILITIES .B dar fully supports the cap_chown capability, but by design, dar only uses this capability to restore files at their original ownership. Dar will thus not use this capability to access files and directories the caller would normally not have access to. In other words, it should be ok to set the cap_chown capability to the dar executable (setcap cap_chown+p dar). Calling dar from a process having the cap_chown in the inheritable set would lead the system to grant this capability to the dar process while other users would not be granted this capability and would not be able to modify ownership of files at restoration time. This can be used for the system account that has the role of restoring data upon user requests, without giving root privilege to this restoration process. .SH EXAMPLES You can find some more examples of use in the tutorial, mini-howto, sample scripts, and other related documentation. All these are available in dar's source package, and are also installed beside dar in the <--prefix>/share/dar directory. This documentation is also available on-line at http://dar.linux.free.fr/doc/index.html .SH SEE ALSO dar_xform(1), dar_slave(1), dar_manager(1), dar_cp(1), dar_split(1), TUTORIAL and NOTES included in the source package and also available at http://dar.linux.free.fr/doc/index.html .SH KNOWN LIMITATIONS dar saves and restores atime, mtime, birthtime but cannot restore ctime (last inode change), there does not seems to be a standard call to do that under UNIX. An up to date list of known limitation is at http://dar.linux.free.fr/doc/Limitations.html .SH KNOWN BUGS http://sourceforge.net/p/dar/bugs/ .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.6.8/man/Makefile.in0000644000175000017520000003771113617552304012057 00000000000000# Makefile.in generated by automake 1.15 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2014 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = man ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } man1dir = $(mandir)/man1 am__installdirs = "$(DESTDIR)$(man1dir)" NROFF = nroff MANS = $(dist_man_MANS) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) am__DIST_COMMON = $(dist_man_MANS) $(srcdir)/Makefile.in DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CXXSTDFLAGS = @CXXSTDFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPGME_CFLAGS = @GPGME_CFLAGS@ GPGME_CONFIG = @GPGME_CONFIG@ GPGME_LIBS = @GPGME_LIBS@ GREP = @GREP@ HAS_DOT = @HAS_DOT@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBCURL_CFLAGS = @LIBCURL_CFLAGS@ LIBCURL_LIBS = @LIBCURL_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTHREADAR_CFLAGS = @LIBTHREADAR_CFLAGS@ LIBTHREADAR_LIBS = @LIBTHREADAR_LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYEXT = @PYEXT@ PYFLAGS = @PYFLAGS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dot = @dot@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ dist_man_MANS = dar.1 dar_manager.1 dar_slave.1 dar_xform.1 dar_cp.1 dar_split.1 all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu man/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu man/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-man1: $(dist_man_MANS) @$(NORMAL_INSTALL) @list1=''; \ list2='$(dist_man_MANS)'; \ test -n "$(man1dir)" \ && test -n "`echo $$list1$$list2`" \ || exit 0; \ echo " $(MKDIR_P) '$(DESTDIR)$(man1dir)'"; \ $(MKDIR_P) "$(DESTDIR)$(man1dir)" || exit 1; \ { for i in $$list1; do echo "$$i"; done; \ if test -n "$$list2"; then \ for i in $$list2; do echo "$$i"; done \ | sed -n '/\.1[a-z]*$$/p'; \ fi; \ } | while read p; do \ if test -f $$p; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; echo "$$p"; \ done | \ sed -e 'n;s,.*/,,;p;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,' | \ sed 'N;N;s,\n, ,g' | { \ list=; while read file base inst; do \ if test "$$base" = "$$inst"; then list="$$list $$file"; else \ echo " $(INSTALL_DATA) '$$file' '$(DESTDIR)$(man1dir)/$$inst'"; \ $(INSTALL_DATA) "$$file" "$(DESTDIR)$(man1dir)/$$inst" || exit $$?; \ fi; \ done; \ for i in $$list; do echo "$$i"; done | $(am__base_list) | \ while read files; do \ test -z "$$files" || { \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(man1dir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(man1dir)" || exit $$?; }; \ done; } uninstall-man1: @$(NORMAL_UNINSTALL) @list=''; test -n "$(man1dir)" || exit 0; \ files=`{ for i in $$list; do echo "$$i"; done; \ l2='$(dist_man_MANS)'; for i in $$l2; do echo "$$i"; done | \ sed -n '/\.1[a-z]*$$/p'; \ } | sed -e 's,.*/,,;h;s,.*\.,,;s,^[^1][0-9a-z]*$$,1,;x' \ -e 's,\.[0-9a-z]*$$,,;$(transform);G;s,\n,.,'`; \ dir='$(DESTDIR)$(man1dir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(MANS) installdirs: for dir in "$(DESTDIR)$(man1dir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-man install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-man1 install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-man uninstall-man: uninstall-man1 .MAKE: install-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-man1 install-pdf install-pdf-am install-ps \ install-ps-am install-strip installcheck installcheck-am \ installdirs maintainer-clean maintainer-clean-generic \ mostlyclean mostlyclean-generic mostlyclean-libtool pdf pdf-am \ ps ps-am tags-am uninstall uninstall-am uninstall-man \ uninstall-man1 .PRECIOUS: Makefile # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.6.8/man/dar_cp.10000644000175000017520000000310413606427316011313 00000000000000.TH DAR_CP 1 "August 18th, 2018" .UC 8 .SH NAME dar_cp \- 'cp' clone command that is able to copy data located after an I/O error .SH SYNOPSIS dar_cp .P dar_cp -h .P dar_cp -V .SH DESCRIPTION .B dar_cp is similar to the standard 'cp' command, except it does not stop copying when an I/O error is met. Instead, it skips a bit further and continues to copy the rest of the file, as much as possible, filling the gaps by zeroed bytes. .PP .B dar_cp only accepts full filename (not directory) as argument. The reason of its existence in dar package is that you need it if you use Parchive with dar and have a corruption on a CD-R or any other read-only medium. You need to copy the corrupted slice on a read-write filesystem, for Parchive be able to repair it using slice associated redundancy files. .SH OPTIONS .PP .TP 20 -h Displays help usage. .TP 20 -V Displays version information. .SH EXIT CODES .B dar_cp exists with the following codes: .TP 10 0 upon normal execution (be some corrupted parted skipped or not) .TP 10 1 syntax error on command-line .TP 10 2 could not open source or destination files .TP 10 3 any other system error met during the copy .TP 10 5 when some data could not be copied due to I/O error .SH SIGNALS Any signal sent to dar_cp will abort the program immediately, there is no way to have a proper termination before the end of the process .SH SEE ALSO dar(1), dar_xform(1), dar_manager(1), dar_slave(1), dar_split(1) .SH KNOWN BUGS http://sourceforge.net/p/dar/bugs/ .SH AUTHOR .nf http://dar.linux.free.fr/ Denis Corbin France Europe dar-2.6.8/missing0000755000175000017520000001533013042165456010627 00000000000000#! /bin/sh # Common wrapper for a few potentially missing GNU programs. scriptversion=2013-10-28.13; # UTC # Copyright (C) 1996-2014 Free Software Foundation, Inc. # Originally written by Fran,cois Pinard , 1996. # This program is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2, or (at your option) # any later version. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # You should have received a copy of the GNU General Public License # along with this program. If not, see . # As a special exception to the GNU General Public License, if you # distribute this file as part of a program that contains a # configuration script generated by Autoconf, you may include it under # the same distribution terms that you use for the rest of that program. if test $# -eq 0; then echo 1>&2 "Try '$0 --help' for more information" exit 1 fi case $1 in --is-lightweight) # Used by our autoconf macros to check whether the available missing # script is modern enough. exit 0 ;; --run) # Back-compat with the calling convention used by older automake. shift ;; -h|--h|--he|--hel|--help) echo "\ $0 [OPTION]... PROGRAM [ARGUMENT]... Run 'PROGRAM [ARGUMENT]...', returning a proper advice when this fails due to PROGRAM being missing or too old. Options: -h, --help display this help and exit -v, --version output version information and exit Supported PROGRAM values: aclocal autoconf autoheader autom4te automake makeinfo bison yacc flex lex help2man Version suffixes to PROGRAM as well as the prefixes 'gnu-', 'gnu', and 'g' are ignored when checking the name. Send bug reports to ." exit $? ;; -v|--v|--ve|--ver|--vers|--versi|--versio|--version) echo "missing $scriptversion (GNU Automake)" exit $? ;; -*) echo 1>&2 "$0: unknown '$1' option" echo 1>&2 "Try '$0 --help' for more information" exit 1 ;; esac # Run the given program, remember its exit status. "$@"; st=$? # If it succeeded, we are done. test $st -eq 0 && exit 0 # Also exit now if we it failed (or wasn't found), and '--version' was # passed; such an option is passed most likely to detect whether the # program is present and works. case $2 in --version|--help) exit $st;; esac # Exit code 63 means version mismatch. This often happens when the user # tries to use an ancient version of a tool on a file that requires a # minimum version. if test $st -eq 63; then msg="probably too old" elif test $st -eq 127; then # Program was missing. msg="missing on your system" else # Program was found and executed, but failed. Give up. exit $st fi perl_URL=http://www.perl.org/ flex_URL=http://flex.sourceforge.net/ gnu_software_URL=http://www.gnu.org/software program_details () { case $1 in aclocal|automake) echo "The '$1' program is part of the GNU Automake package:" echo "<$gnu_software_URL/automake>" echo "It also requires GNU Autoconf, GNU m4 and Perl in order to run:" echo "<$gnu_software_URL/autoconf>" echo "<$gnu_software_URL/m4/>" echo "<$perl_URL>" ;; autoconf|autom4te|autoheader) echo "The '$1' program is part of the GNU Autoconf package:" echo "<$gnu_software_URL/autoconf/>" echo "It also requires GNU m4 and Perl in order to run:" echo "<$gnu_software_URL/m4/>" echo "<$perl_URL>" ;; esac } give_advice () { # Normalize program name to check for. normalized_program=`echo "$1" | sed ' s/^gnu-//; t s/^gnu//; t s/^g//; t'` printf '%s\n' "'$1' is $msg." configure_deps="'configure.ac' or m4 files included by 'configure.ac'" case $normalized_program in autoconf*) echo "You should only need it if you modified 'configure.ac'," echo "or m4 files included by it." program_details 'autoconf' ;; autoheader*) echo "You should only need it if you modified 'acconfig.h' or" echo "$configure_deps." program_details 'autoheader' ;; automake*) echo "You should only need it if you modified 'Makefile.am' or" echo "$configure_deps." program_details 'automake' ;; aclocal*) echo "You should only need it if you modified 'acinclude.m4' or" echo "$configure_deps." program_details 'aclocal' ;; autom4te*) echo "You might have modified some maintainer files that require" echo "the 'autom4te' program to be rebuilt." program_details 'autom4te' ;; bison*|yacc*) echo "You should only need it if you modified a '.y' file." echo "You may want to install the GNU Bison package:" echo "<$gnu_software_URL/bison/>" ;; lex*|flex*) echo "You should only need it if you modified a '.l' file." echo "You may want to install the Fast Lexical Analyzer package:" echo "<$flex_URL>" ;; help2man*) echo "You should only need it if you modified a dependency" \ "of a man page." echo "You may want to install the GNU Help2man package:" echo "<$gnu_software_URL/help2man/>" ;; makeinfo*) echo "You should only need it if you modified a '.texi' file, or" echo "any other file indirectly affecting the aspect of the manual." echo "You might want to install the Texinfo package:" echo "<$gnu_software_URL/texinfo/>" echo "The spurious makeinfo call might also be the consequence of" echo "using a buggy 'make' (AIX, DU, IRIX), in which case you might" echo "want to install GNU make:" echo "<$gnu_software_URL/make/>" ;; *) echo "You might have modified some files without having the proper" echo "tools for further handling them. Check the 'README' file, it" echo "often tells you about the needed prerequisites for installing" echo "this package. You may also peek at any GNU archive site, in" echo "case some other package contains this missing '$1' program." ;; esac } give_advice "$1" | sed -e '1s/^/WARNING: /' \ -e '2,$s/^/ /' >&2 # Propagate the correct exit status (expected to be 127 for a program # not found, 63 for a program that failed due to version mismatch). exit $st # Local variables: # eval: (add-hook 'write-file-hooks 'time-stamp) # time-stamp-start: "scriptversion=" # time-stamp-format: "%:y-%02m-%02d.%02H" # time-stamp-time-zone: "UTC" # time-stamp-end: "; # UTC" # End: dar-2.6.8/config.h.in0000644000175000017520000003560713617552303011263 00000000000000/* config.h.in. Generated from configure.ac by autoheader. */ /* backtrace() call supported */ #undef BACKTRACE_AVAILABLE /* header and linking is available to have strong encryption works */ #undef CRYPTO_AVAILABLE /* dar and dar_suite version, definition in configure.ac */ #undef DAR_VERSION /* if defined, activates support for Extended Attributes */ #undef EA_SUPPORT /* Define to 1 if translation of program messages to the user's native language is requested. */ #undef ENABLE_NLS /* furtive read mode is available */ #undef FURTIVE_READ_MODE_AVAILABLE /* minimum version expected of GPGME */ #undef GPGME_MIN_VERSION /* GPGME is available to support public key based ciphering */ #undef GPGME_SUPPORT /* Define to 1 if you have the header file. */ #undef HAVE_ARPA_INET_H /* Define to 1 if you have the header file. */ #undef HAVE_ATTR_XATTR_H /* bzlib.h header file is available */ #undef HAVE_BZLIB_H /* whether the system has support for POSIX.1e capabilities */ #undef HAVE_CAPABILITIES /* Define to 1 if you have the Mac OS X function CFLocaleCopyCurrent in the CoreFoundation framework. */ #undef HAVE_CFLOCALECOPYCURRENT /* Define to 1 if you have the Mac OS X function CFPreferencesCopyAppValue in the CoreFoundation framework. */ #undef HAVE_CFPREFERENCESCOPYAPPVALUE /* Define to 1 if you have the `ctime_r' function. */ #undef HAVE_CTIME_R /* Define to 1 if you have the header file. */ #undef HAVE_CTYPE_H /* libcurl.h header file is available */ #undef HAVE_CURL_CURL_H /* Define if the GNU dcgettext() function is already present or preinstalled. */ #undef HAVE_DCGETTEXT /* Define to 1 if you have the declaration of `sys_siglist', and to 0 if you don't. */ #undef HAVE_DECL_SYS_SIGLIST /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_DIRENT_H /* Define to 1 if you have the header file. */ #undef HAVE_DLFCN_H /* whether the system has the necessary routine to handle Door files */ #undef HAVE_DOOR /* Define to 1 if you have the header file. */ #undef HAVE_ERRNO_H /* Define to 1 if you have the header file. */ #undef HAVE_EXECINFO_H /* Define to 1 if you have the header file. */ #undef HAVE_FCNTL_H /* whether the system provides fdatasync() system call */ #undef HAVE_FDATASYNC /* Define to 1 if you have the `fdopendir' function. */ #undef HAVE_FDOPENDIR /* Define to 1 if your system has a working POSIX `fnmatch' function. */ #undef HAVE_FNMATCH /* Define to 1 if you have the header file. */ #undef HAVE_FNMATCH_H /* Define to 1 if you have the `fork' function. */ #undef HAVE_FORK /* gcrypt.h header file is available */ #undef HAVE_GCRYPT_H /* Define to 1 if you have the `getgrnam_r' function. */ #undef HAVE_GETGRNAM_R /* Define to 1 if you have the header file. */ #undef HAVE_GETOPT_H /* a getopt() call is declared in */ #undef HAVE_GETOPT_IN_UNISTD_H /* whether getopt_long() is available */ #undef HAVE_GETOPT_LONG /* a getopt_long() call is declared in */ #undef HAVE_GETOPT_LONG_IN_UNISTD_H /* Define to 1 if you have the `getpwnam_r' function. */ #undef HAVE_GETPWNAM_R /* Define if the GNU gettext() function is already present or preinstalled. */ #undef HAVE_GETTEXT /* Define to 1 if you have the header file. */ #undef HAVE_GPGME_H /* Define to 1 if you have the header file. */ #undef HAVE_GRP_H /* Define if you have the iconv() function and it works. */ #undef HAVE_ICONV /* inttypes.h header availability */ #undef HAVE_INTTYPES_H /* Define to 1 if you have the `lchown' function. */ #undef HAVE_LCHOWN /* Define to 1 if you have the `attr' library (-lattr). */ #undef HAVE_LIBATTR /* Define to 1 if you have the `bz2' library (-lbz2). */ #undef HAVE_LIBBZ2 /* Define to 1 if you have the `c' library (-lc). */ #undef HAVE_LIBC /* Define to 1 if you have the `cap' library (-lcap). */ #undef HAVE_LIBCAP /* Libcurl library availability */ #undef HAVE_LIBCURL /* Define to 1 if you have the `dl' library (-ldl). */ #undef HAVE_LIBDL /* Define to 1 if you have the `execinfo' library (-lexecinfo). */ #undef HAVE_LIBEXECINFO /* Define to 1 if you have the `gcrypt' library (-lgcrypt). */ #undef HAVE_LIBGCRYPT /* Define to 1 if you have the `gnugetopt' library (-lgnugetopt). */ #undef HAVE_LIBGNUGETOPT /* Define to 1 if you have the `gpgme' library (-lgpgme). */ #undef HAVE_LIBGPGME /* Define to 1 if you have the `gpg-error' library (-lgpg-error). */ #undef HAVE_LIBGPG_ERROR /* Define to 1 if you have the header file. */ #undef HAVE_LIBINTL_H /* Define to 1 if you have the `lzma' library (-llzma). */ #undef HAVE_LIBLZMA /* Define to 1 if you have the `lzo2' library (-llzo2). */ #undef HAVE_LIBLZO2 /* Define to 1 if you have the `nsl' library (-lnsl). */ #undef HAVE_LIBNSL /* Define to 1 if you have the `pthread' library (-lpthread). */ #undef HAVE_LIBPTHREAD /* Define to 1 if you have the `rsync' library (-lrsync). */ #undef HAVE_LIBRSYNC /* librsync.h header file is available */ #undef HAVE_LIBRSYNC_H /* Define to 1 if you have the `socket' library (-lsocket). */ #undef HAVE_LIBSOCKET /* Define to 1 if you have the `threadar' library (-lthreadar). */ #undef HAVE_LIBTHREADAR /* libthreadar.h header file availability */ #undef HAVE_LIBTHREADAR_LIBTHREADAR_HPP /* Define to 1 if you have the `z' library (-lz). */ #undef HAVE_LIBZ /* Define to 1 if you have the header file. */ #undef HAVE_LIMITS_H /* Define to 1 if you have the header file. */ #undef HAVE_LINUX_CAPABILITY_H /* Define to 1 if you have the `localtime_r' function. */ #undef HAVE_LOCALTIME_R /* Define to 1 if `lstat' has the bug that it succeeds when given the zero-length file name argument. */ #undef HAVE_LSTAT_EMPTY_STRING_BUG /* if lutimes() system call is available */ #undef HAVE_LUTIMES /* lzma.h header file is available */ #undef HAVE_LZMA_H /* lzo/lzo1x.h header file is available */ #undef HAVE_LZO_LZO1X_H /* Define to 1 if your system has a GNU libc compatible `malloc' function, and to 0 otherwise. */ #undef HAVE_MALLOC /* Define to 1 if you have the header file. */ #undef HAVE_MEMORY_H /* Define to 1 if you have the `mkdir' function. */ #undef HAVE_MKDIR /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_NDIR_H /* Define to 1 if you have the header file. */ #undef HAVE_NETINET_IN_H /* the optreset external variable exists to reset getopt standard call */ #undef HAVE_OPTRESET /* whether the system has support for posix_fadvise() */ #undef HAVE_POSIX_FADVISE /* Define to 1 if you have the header file. */ #undef HAVE_PTHREAD_H /* Define to 1 if you have the header file. */ #undef HAVE_PWD_H /* Define to 1 if you have the `readdir_r' function. */ #undef HAVE_READDIR_R /* Define to 1 if you have the `regcomp' function. */ #undef HAVE_REGCOMP /* Define to 1 if you have the header file. */ #undef HAVE_REGEX_H /* Define to 1 if you have the `rmdir' function. */ #undef HAVE_RMDIR /* Define to 1 if you have the header file. */ #undef HAVE_SIGNAL_H /* Define to 1 if `stat' has the bug that it succeeds when given the zero-length file name argument. */ #undef HAVE_STAT_EMPTY_STRING_BUG /* Define to 1 if you have the header file. */ #undef HAVE_STDDEF_H /* Define to 1 if you have the header file. */ #undef HAVE_STDINT_H /* Define to 1 if you have the header file. */ #undef HAVE_STDIO_H /* Define to 1 if you have the header file. */ #undef HAVE_STDLIB_H /* Define to 1 if you have the `strerror_r' function. */ #undef HAVE_STRERROR_R /* strerror_r() returns a char * */ #undef HAVE_STRERROR_R_CHAR_PTR /* Define to 1 if you have the `strerr-or' function. */ #undef HAVE_STRERR_OR /* Define to 1 if you have the header file. */ #undef HAVE_STRINGS_H /* Define to 1 if you have the header file. */ #undef HAVE_STRING_H /* Define to 1 if `st_rdev' is a member of `struct stat'. */ #undef HAVE_STRUCT_STAT_ST_RDEV /* whether the system provides syncfs() system call */ #undef HAVE_SYNCFS /* Define to 1 if you have the header file. */ #undef HAVE_SYS_CAPABILITY_H /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_SYS_DIR_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_IOCTL_H /* Define to 1 if you have the header file, and it defines `DIR'. */ #undef HAVE_SYS_NDIR_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_SOCKET_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_STAT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TIME_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_TYPES_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_UN_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_UTSNAME_H /* Define to 1 if you have that is POSIX.1 compatible. */ #undef HAVE_SYS_WAIT_H /* Define to 1 if you have the header file. */ #undef HAVE_SYS_XATTR_H /* Define to 1 if you have the header file. */ #undef HAVE_TERMIOS_H /* Define to 1 if you have the header file. */ #undef HAVE_TIME_H /* Define to 1 if you have the header file. */ #undef HAVE_UNISTD_H /* Define to 1 if you have the `utime' function. */ #undef HAVE_UTIME /* Define to 1 if you have the header file. */ #undef HAVE_UTIMES_H /* Define to 1 if you have the header file. */ #undef HAVE_UTIME_H /* Define to 1 if `utime(file, NULL)' sets file's timestamp to the present. */ #undef HAVE_UTIME_NULL /* Define to 1 if you have the `vfork' function. */ #undef HAVE_VFORK /* Define to 1 if you have the header file. */ #undef HAVE_VFORK_H /* Define to 1 if you have the header file. */ #undef HAVE_WCHAR_H /* Define to 1 if you have the header file. */ #undef HAVE_WCTYPE_H /* Define to 1 if `fork' works. */ #undef HAVE_WORKING_FORK /* Define to 1 if `vfork' works. */ #undef HAVE_WORKING_VFORK /* zlib.h header file is available */ #undef HAVE_ZLIB_H /* Define as const if the declaration of iconv() needs const. */ #undef ICONV_CONST /* header and linking is available to have libbz2 functions */ #undef LIBBZ2_AVAILABLE /* libcurl is usable */ #undef LIBCURL_AVAILABLE /* if defined, activates the support for HFS+ create time FSA */ #undef LIBDAR_BIRTHTIME /* if defined, builds a very slow executable */ #undef LIBDAR_DEBUG_MEMORY /* activation of speed optimization for large directories */ #undef LIBDAR_FAST_DIR /* support for timestamps microsecond read accuracy */ #undef LIBDAR_MICROSECOND_READ_ACCURACY /* support for timestamps microsecond write accuracy */ #undef LIBDAR_MICROSECOND_WRITE_ACCURACY /* if defined, activates the ext2/3 nodump flag feature */ #undef LIBDAR_NODUMP_FEATURE /* if defined, informs the code that no optimization has been used for compilation */ #undef LIBDAR_NO_OPTIMIZATION /* header and linking is available to have liblzma functions */ #undef LIBLZMA_AVAILABLE /* header and linking is available to have lzo functions */ #undef LIBLZO2_AVAILABLE /* librsync is usable */ #undef LIBRSYNC_AVAILABLE /* when libthreadar could be found and linked against */ #undef LIBTHREADAR_AVAILABLE /* header and linking is available to have libz functions */ #undef LIBZ_AVAILABLE /* Define to 1 if `lstat' dereferences a symlink specified with a trailing slash. */ #undef LSTAT_FOLLOWS_SLASHED_SYMLINK /* Define to the sub-directory where libtool stores uninstalled libraries. */ #undef LT_OBJDIR /* Define to 1 if `major', `minor', and `makedev' are declared in . */ #undef MAJOR_IN_MKDEV /* Define to 1 if `major', `minor', and `makedev' are declared in . */ #undef MAJOR_IN_SYSMACROS /* libgcrypt minimum version */ #undef MIN_VERSION_GCRYPT /* ligcrypt minimum version without hash bug */ #undef MIN_VERSION_GCRYPT_HASH_BUG /* Some *_r() stdlib call are missing to permit complete thread-safe support by libdar */ #undef MISSING_REENTRANT_LIBCALL /* POSIX mutex (pthread_mutex_t) is available */ #undef MUTEX_WORKS /* if defined, activates support for Mac OS X Extended Attributes */ #undef OSX_EA_SUPPORT /* manually set CPU's registers' size */ #undef OS_BITS /* Name of package */ #undef PACKAGE /* Define to the address where bug reports for this package should be sent. */ #undef PACKAGE_BUGREPORT /* Define to the full name of this package. */ #undef PACKAGE_NAME /* Define to the full name and version of this package. */ #undef PACKAGE_STRING /* Define to the one symbol short name of this package. */ #undef PACKAGE_TARNAME /* Define to the home page for this package. */ #undef PACKAGE_URL /* Define to the version of this package. */ #undef PACKAGE_VERSION /* Define as the return type of signal handlers (`int' or `void'). */ #undef RETSIGTYPE /* The size of `off_t', as computed by sizeof. */ #undef SIZEOF_OFF_T /* The size of `size_t', as computed by sizeof. */ #undef SIZEOF_SIZE_T /* The size of `time_t', as computed by sizeof. */ #undef SIZEOF_TIME_T /* Define to 1 if you have the ANSI C header files. */ #undef STDC_HEADERS /* Define to 1 if you can safely include both and . */ #undef TIME_WITH_SYS_TIME /* Version number of package */ #undef VERSION /* activate makedev(), major() and minor() when _XOPEN_SOURCE is set */ #undef _BSD_SOURCE /* Define on Darwin to activate all library features */ #undef _DARWIN_C_SOURCE /* Enable large inode numbers on Mac OS X 10.5. */ #ifndef _DARWIN_USE_64_BIT_INODE # define _DARWIN_USE_64_BIT_INODE 1 #endif /* disabling warning about _BSD_SOURCE to be deprecated */ #undef _DEFAULT_SOURCE /* Number of bits in a file offset, on hosts where this is settable. */ #undef _FILE_OFFSET_BITS /* Define on Linux to activate all library features */ #undef _GNU_SOURCE /* Define for large files, on AIX-style hosts. */ #undef _LARGE_FILES /* Define on NetBSD to activate all library features */ #undef _NETBSD_SOURCE /* activates POSIX.1-2008 symbols in order to allow microsecond time setting, as well as ctime_r() call */ #undef _XOPEN_SOURCE /* Define on FreeBSD to activate all library features */ #undef __BSD_VISIBLE /* Define to empty if `const' does not conform to ANSI C. */ #undef const /* Define to `__inline__' or `__inline' if that's what the C compiler calls it, or to nothing if 'inline' is not supported under any name. */ #ifndef __cplusplus #undef inline #endif /* Define to rpl_malloc if the replacement function should be used. */ #undef malloc /* Define to `long int' if does not define. */ #undef off_t /* Define to `int' if does not define. */ #undef pid_t /* Define to `unsigned int' if does not define. */ #undef size_t /* Define as `fork' if `vfork' does not work. */ #undef vfork dar-2.6.8/AUTHORS0000644000175000017520000001007113606627706010304 00000000000000 D i s k A R c h i v e - D A R --------- ----- - Original Design & Development: Denis Corbin Several patches are from Wesley Leggette, Dave Vasilevsky, Nick Alcock, Brian May, Martin Jost, Jurgen Menden, Todd Vierling Omer Enbar, David Rose, Alex Kohlmeyer, Dietrich Rothe, Moritz Franosch, John Little, Chris Martin, Michael Roitzsch, Andrea Palazzi, Dwayne C. Litzenberger, Erik Wasser, Sonni Norlov, David Fries, Jan-Pascal van Best. Translations of program messages are from: Peter Landgren for Swedish Markus Kamp for German Denis Corbin for French http://dar.linux.free.fr/ (main site) http://dar.sourceforge.net/ (mirror site) https://github.com/Edrusb/DAR (source code repository) No answer to support requests will be done out of mailing-list or other public area: your questions and their answers may be of interest to others. For all and to all, if you need support thanks to read the link below: http://dar.linux.free.fr/doc/index.html#ask_for_support Sharing must be both directions. Else for non support requests only, you are welcome to send an email to Denis at dar.linux@free.fr paying attention to add the following string (the quotes are not necessary) "[EARTH IS BEAUTIFUL]" in the subject of your email, to be able to pass anti-spam filter. Here follows an extract of Denis Corbin's resume: 1990-1992 Classes Prepas (math. sup. math. spe. M) 1992-1995 ENSEIRB - Ecole Nationale Superieur d'Electronique Informatique et Radiocommunications de Bordeaux Promo I-1995. 1995 3 months Erasmus project in Bologna (Italy). 1995-1996 Military Service: 28e Regiment Transmission Armee de Terre Formation Equipex-RITA, operator on the RITTER network. Military training PMTE (Preparation Militaire Terre Encadrement). 1997-2000 Software developer, Team leader, Project leader for Netcomsystems (which was renamed Spirent Communications in 1999) Designed software for the Smartbits network tester. Denis worked in cooperation with Californian team based in Calabasas. 2000-2002 Network Design, architecture and network support for DCN of Alcatel Submarine Networks. 2002-2003 DNS maintainer and Firewall admin for the GPRS network for Bouygues Telecom. 2003-2004 Network Design and support for SFR's wap and SMS platform, Managed change of ISP connectivity (activating BGP dual homing) for SFR with no loss of service. Nov. 2003 Cisco Certified CCNA 2004-2005 Validation responsible of the hosting infrastructure that provides data to UMTS mobile phones for SFR. Dec. 2004 Cisco Certified CCNP 2005-2011 Network and security engineer at Telindus France. July 2008 Cisco Certified Internetwork Expert, CCIE #21568 R&S June 2009 Checkpoint Certified Security Expert (CCSE) 2011-2015 Pre-sales engineer at Telindus for WAN optimization (Riverbed Solution), LAN Campus and Datacenter design and architecture (Cisco Nexus and Catalysts solutions). Nov. 2011 Riverbed certified RCSA Apr. 2012 Cisco Certified Sales Expert Oct. 2014 Network Solution Architect at HP (today at Hewlett Packard Entreprise) Feb. 2015 HP Certified ASE Feb. 2016 HPE Certified Master ASE Nov. 2016 System Engineer at Aruba a Hewlett Packard Enterprise company Jul. 2018 Recertified CCIE (for 10 years now) Sep. 2018 Certified Aruba ACSPv1 Dec. 2018 System Engineer for the "Composable Fabric BU" at HPE (aka Plexxi) dar-2.6.8/configure0000755000175000017520000332655713617552303011161 00000000000000#! /bin/sh # Guess values for system-dependent variables and create Makefiles. # Generated by GNU Autoconf 2.69 for DAR 2.6.8. # # Report bugs to . # # # Copyright (C) 1992-1996, 1998-2012 Free Software Foundation, Inc. # # # This configure script is free software; the Free Software Foundation # gives unlimited permission to copy, distribute and modify it. ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # Use a proper internal environment variable to ensure we don't fall # into an infinite loop, continuously re-executing ourselves. if test x"${_as_can_reexec}" != xno && test "x$CONFIG_SHELL" != x; then _as_can_reexec=no; export _as_can_reexec; # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 as_fn_exit 255 fi # We don't want this to propagate to other subprocesses. { _as_can_reexec=; unset _as_can_reexec;} if test "x$CONFIG_SHELL" = x; then as_bourne_compatible="if test -n \"\${ZSH_VERSION+set}\" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on \${1+\"\$@\"}, which # is contrary to our usage. Disable this feature. alias -g '\${1+\"\$@\"}'='\"\$@\"' setopt NO_GLOB_SUBST else case \`(set -o) 2>/dev/null\` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi " as_required="as_fn_return () { (exit \$1); } as_fn_success () { as_fn_return 0; } as_fn_failure () { as_fn_return 1; } as_fn_ret_success () { return 0; } as_fn_ret_failure () { return 1; } exitcode=0 as_fn_success || { exitcode=1; echo as_fn_success failed.; } as_fn_failure && { exitcode=1; echo as_fn_failure succeeded.; } as_fn_ret_success || { exitcode=1; echo as_fn_ret_success failed.; } as_fn_ret_failure && { exitcode=1; echo as_fn_ret_failure succeeded.; } if ( set x; as_fn_ret_success y && test x = \"\$1\" ); then : else exitcode=1; echo positional parameters were not saved. fi test x\$exitcode = x0 || exit 1 test -x / || exit 1" as_suggested=" as_lineno_1=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_1a=\$LINENO as_lineno_2=";as_suggested=$as_suggested$LINENO;as_suggested=$as_suggested" as_lineno_2a=\$LINENO eval 'test \"x\$as_lineno_1'\$as_run'\" != \"x\$as_lineno_2'\$as_run'\" && test \"x\`expr \$as_lineno_1'\$as_run' + 1\`\" = \"x\$as_lineno_2'\$as_run'\"' || exit 1 test \$(( 1 + 1 )) = 2 || exit 1 test -n \"\${ZSH_VERSION+set}\${BASH_VERSION+set}\" || ( ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO ECHO=\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO\$ECHO PATH=/empty FPATH=/empty; export PATH FPATH test \"X\`printf %s \$ECHO\`\" = \"X\$ECHO\" \\ || test \"X\`print -r -- \$ECHO\`\" = \"X\$ECHO\" ) || exit 1" if (eval "$as_required") 2>/dev/null; then : as_have_required=yes else as_have_required=no fi if test x$as_have_required = xyes && (eval "$as_suggested") 2>/dev/null; then : else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR as_found=false for as_dir in /bin$PATH_SEPARATOR/usr/bin$PATH_SEPARATOR$PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. as_found=: case $as_dir in #( /*) for as_base in sh bash ksh sh5; do # Try only shells that exist, to save several forks. as_shell=$as_dir/$as_base if { test -f "$as_shell" || test -f "$as_shell.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$as_shell"; } 2>/dev/null; then : CONFIG_SHELL=$as_shell as_have_required=yes if { $as_echo "$as_bourne_compatible""$as_suggested" | as_run=a "$as_shell"; } 2>/dev/null; then : break 2 fi fi done;; esac as_found=false done $as_found || { if { test -f "$SHELL" || test -f "$SHELL.exe"; } && { $as_echo "$as_bourne_compatible""$as_required" | as_run=a "$SHELL"; } 2>/dev/null; then : CONFIG_SHELL=$SHELL as_have_required=yes fi; } IFS=$as_save_IFS if test "x$CONFIG_SHELL" != x; then : export CONFIG_SHELL # We cannot yet assume a decent shell, so we have to provide a # neutralization value for shells without unset; and this also # works around shells that cannot unset nonexistent variables. # Preserve -v and -x to the replacement shell. BASH_ENV=/dev/null ENV=/dev/null (unset BASH_ENV) >/dev/null 2>&1 && unset BASH_ENV ENV case $- in # (((( *v*x* | *x*v* ) as_opts=-vx ;; *v* ) as_opts=-v ;; *x* ) as_opts=-x ;; * ) as_opts= ;; esac exec $CONFIG_SHELL $as_opts "$as_myself" ${1+"$@"} # Admittedly, this is quite paranoid, since all the known shells bail # out after a failed `exec'. $as_echo "$0: could not re-execute with $CONFIG_SHELL" >&2 exit 255 fi if test x$as_have_required = xno; then : $as_echo "$0: This script requires a shell more modern than all" $as_echo "$0: the shells that I found on your system." if test x${ZSH_VERSION+set} = xset ; then $as_echo "$0: In particular, zsh $ZSH_VERSION has bugs and should" $as_echo "$0: be upgraded to zsh 4.3.4 or later." else $as_echo "$0: Please tell bug-autoconf@gnu.org and $0: [http://sourceforge.net/tracker/?group_id=65612&atid=511612] $0: about your system, including any error possibly output $0: before this message. Then install a modern shell, or $0: manually run the script under such a shell if you do $0: have one." fi exit 1 fi fi fi SHELL=${CONFIG_SHELL-/bin/sh} export SHELL # Unset more variables known to interfere with behavior of common tools. CLICOLOR_FORCE= GREP_OPTIONS= unset CLICOLOR_FORCE GREP_OPTIONS ## --------------------- ## ## M4sh Shell Functions. ## ## --------------------- ## # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits as_lineno_1=$LINENO as_lineno_1a=$LINENO as_lineno_2=$LINENO as_lineno_2a=$LINENO eval 'test "x$as_lineno_1'$as_run'" != "x$as_lineno_2'$as_run'" && test "x`expr $as_lineno_1'$as_run' + 1`" = "x$as_lineno_2'$as_run'"' || { # Blame Lee E. McMahon (1931-1989) for sed's syntax. :-) sed -n ' p /[$]LINENO/= ' <$as_myself | sed ' s/[$]LINENO.*/&-/ t lineno b :lineno N :loop s/[$]LINENO\([^'$as_cr_alnum'_].*\n\)\(.*\)/\2\1\2/ t loop s/-\n.*// ' >$as_me.lineno && chmod +x "$as_me.lineno" || { $as_echo "$as_me: error: cannot create $as_me.lineno; rerun with a POSIX shell" >&2; as_fn_exit 1; } # If we had to re-execute with $CONFIG_SHELL, we're ensured to have # already done that, so ensure we don't try to do so again and fall # in an infinite loop. This has already happened in practice. _as_can_reexec=no; export _as_can_reexec # Don't try to exec as it changes $[0], causing all sort of problems # (the dirname of $[0] is not the place where we might find the # original and so on. Autoconf is especially sensitive to this). . "./$as_me.lineno" # Exit status is that of the last command. exit } ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" SHELL=${CONFIG_SHELL-/bin/sh} test -n "$DJDIR" || exec 7<&0 &1 # Name of the host. # hostname on some systems (SVR3.2, old GNU/Linux) returns a bogus exit status, # so uname gets run too. ac_hostname=`(hostname || uname -n) 2>/dev/null | sed 1q` # # Initializations. # ac_default_prefix=/usr/local ac_clean_files= ac_config_libobj_dir=. LIBOBJS= cross_compiling=no subdirs= MFLAGS= MAKEFLAGS= # Identity of this package. PACKAGE_NAME='DAR' PACKAGE_TARNAME='dar' PACKAGE_VERSION='2.6.8' PACKAGE_STRING='DAR 2.6.8' PACKAGE_BUGREPORT='http://sourceforge.net/tracker/?group_id=65612&atid=511612' PACKAGE_URL='' ac_unique_file="src/libdar/catalogue.cpp" gt_needs= # Factoring default headers for most tests. ac_includes_default="\ #include #ifdef HAVE_SYS_TYPES_H # include #endif #ifdef HAVE_SYS_STAT_H # include #endif #ifdef STDC_HEADERS # include # include #else # ifdef HAVE_STDLIB_H # include # endif #endif #ifdef HAVE_STRING_H # if !defined STDC_HEADERS && defined HAVE_MEMORY_H # include # endif # include #endif #ifdef HAVE_STRINGS_H # include #endif #ifdef HAVE_INTTYPES_H # include #endif #ifdef HAVE_STDINT_H # include #endif #ifdef HAVE_UNISTD_H # include #endif" ac_header_list= ac_default_prefix=/usr/local ac_subst_vars='am__EXEEXT_FALSE am__EXEEXT_TRUE LTLIBOBJS HAS_DOT DOXYGEN_PROG UPX_PROG CXXSTDFLAGS PYTHON_BINDING_FALSE PYTHON_BINDING_TRUE WITH_LIBTHREADAR_FALSE WITH_LIBTHREADAR_TRUE BSD_SED_FALSE BSD_SED_TRUE PROFILING_FALSE PROFILING_TRUE USE_GROFF_FALSE USE_GROFF_TRUE USE_DOXYGEN_FALSE USE_DOXYGEN_TRUE USE_UPX_FALSE USE_UPX_TRUE BUILD_MODE64_FALSE BUILD_MODE64_TRUE BUILD_MODE32_FALSE BUILD_MODE32_TRUE DEBUG_STATIC_FALSE DEBUG_STATIC_TRUE BUILD_DAR_STATIC_FALSE BUILD_DAR_STATIC_TRUE MAKE_ALL_DIR_FALSE MAKE_ALL_DIR_TRUE PYFLAGS PYEXT LIBTHREADAR_LIBS LIBTHREADAR_CFLAGS GPGME_LIBS GPGME_CFLAGS GPGME_CONFIG upx groff tmp dot doxygen LIBCURL_LIBS LIBCURL_CFLAGS PKG_CONFIG_LIBDIR PKG_CONFIG_PATH PKG_CONFIG LIBOBJS CPP LT_SYS_LIBRARY_PATH OTOOL64 OTOOL LIPO NMEDIT DSYMUTIL MANIFEST_TOOL RANLIB ac_ct_AR AR DLLTOOL OBJDUMP LN_S NM ac_ct_DUMPBIN DUMPBIN LD FGREP LIBTOOL pkgconfigdir POSUB LTLIBINTL LIBINTL INTLLIBS LTLIBICONV LIBICONV INTL_MACOSX_LIBS EGREP GREP CXXCPP am__fastdepCXX_FALSE am__fastdepCXX_TRUE CXXDEPMODE ac_ct_CXX CXXFLAGS CXX host_os host_vendor host_cpu host build_os build_vendor build_cpu build am__fastdepCC_FALSE am__fastdepCC_TRUE CCDEPMODE am__nodep AMDEPBACKSLASH AMDEP_FALSE AMDEP_TRUE am__quote am__include DEPDIR OBJEXT EXEEXT ac_ct_CC CPPFLAGS LDFLAGS CFLAGS CC XGETTEXT_EXTRA_OPTIONS MSGMERGE XGETTEXT_015 XGETTEXT GMSGFMT_015 MSGFMT_015 GMSGFMT MSGFMT GETTEXT_MACRO_VERSION USE_NLS SED AM_BACKSLASH AM_DEFAULT_VERBOSITY AM_DEFAULT_V AM_V am__untar am__tar AMTAR am__leading_dot SET_MAKE AWK mkdir_p MKDIR_P INSTALL_STRIP_PROGRAM STRIP install_sh MAKEINFO AUTOHEADER AUTOMAKE AUTOCONF ACLOCAL VERSION PACKAGE CYGPATH_W am__isrc INSTALL_DATA INSTALL_SCRIPT INSTALL_PROGRAM target_alias host_alias build_alias LIBS ECHO_T ECHO_N ECHO_C DEFS mandir localedir libdir psdir pdfdir dvidir htmldir infodir docdir oldincludedir includedir runstatedir localstatedir sharedstatedir sysconfdir datadir datarootdir libexecdir sbindir bindir program_transform_name prefix exec_prefix PACKAGE_URL PACKAGE_BUGREPORT PACKAGE_STRING PACKAGE_VERSION PACKAGE_TARNAME PACKAGE_NAME PATH_SEPARATOR SHELL' ac_subst_files='' ac_user_opts=' enable_option_checking enable_silent_rules enable_nls enable_dependency_tracking with_gnu_ld enable_rpath with_libiconv_prefix with_libintl_prefix with_pkgconfigdir enable_shared enable_static with_pic enable_fast_install with_aix_soname with_sysroot enable_libtool_lock enable_libdl_linking enable_largefile enable_libz_linking enable_libbz2_linking enable_liblzo2_linking enable_libxz_linking enable_libgcrypt_linking enable_ea_support enable_nodump_flag enable_birthtime enable_gnugetopt enable_librsync_linking enable_libcurl_linking enable_fadvise enable_examples enable_os_bits enable_mode enable_furtive_read enable_debug enable_pedantic enable_build_html enable_upx enable_fast_dir enable_gpgme_linking with_gpgme_prefix enable_thread_safe enable_execinfo enable_profiling enable_debug_memory enable_dar_static enable_threadar enable_python_binding ' ac_precious_vars='build_alias host_alias target_alias CC CFLAGS LDFLAGS LIBS CPPFLAGS CXX CXXFLAGS CCC CXXCPP LT_SYS_LIBRARY_PATH CPP PKG_CONFIG PKG_CONFIG_PATH PKG_CONFIG_LIBDIR LIBCURL_CFLAGS LIBCURL_LIBS LIBTHREADAR_CFLAGS LIBTHREADAR_LIBS' # Initialize some variables set by options. ac_init_help= ac_init_version=false ac_unrecognized_opts= ac_unrecognized_sep= # The variables have the same names as the options, with # dashes changed to underlines. cache_file=/dev/null exec_prefix=NONE no_create= no_recursion= prefix=NONE program_prefix=NONE program_suffix=NONE program_transform_name=s,x,x, silent= site= srcdir= verbose= x_includes=NONE x_libraries=NONE # Installation directory options. # These are left unexpanded so users can "make install exec_prefix=/foo" # and all the variables that are supposed to be based on exec_prefix # by default will actually change. # Use braces instead of parens because sh, perl, etc. also accept them. # (The list follows the same order as the GNU Coding Standards.) bindir='${exec_prefix}/bin' sbindir='${exec_prefix}/sbin' libexecdir='${exec_prefix}/libexec' datarootdir='${prefix}/share' datadir='${datarootdir}' sysconfdir='${prefix}/etc' sharedstatedir='${prefix}/com' localstatedir='${prefix}/var' runstatedir='${localstatedir}/run' includedir='${prefix}/include' oldincludedir='/usr/include' docdir='${datarootdir}/doc/${PACKAGE_TARNAME}' infodir='${datarootdir}/info' htmldir='${docdir}' dvidir='${docdir}' pdfdir='${docdir}' psdir='${docdir}' libdir='${exec_prefix}/lib' localedir='${datarootdir}/locale' mandir='${datarootdir}/man' ac_prev= ac_dashdash= for ac_option do # If the previous option needs an argument, assign it. if test -n "$ac_prev"; then eval $ac_prev=\$ac_option ac_prev= continue fi case $ac_option in *=?*) ac_optarg=`expr "X$ac_option" : '[^=]*=\(.*\)'` ;; *=) ac_optarg= ;; *) ac_optarg=yes ;; esac # Accept the important Cygnus configure options, so we can diagnose typos. case $ac_dashdash$ac_option in --) ac_dashdash=yes ;; -bindir | --bindir | --bindi | --bind | --bin | --bi) ac_prev=bindir ;; -bindir=* | --bindir=* | --bindi=* | --bind=* | --bin=* | --bi=*) bindir=$ac_optarg ;; -build | --build | --buil | --bui | --bu) ac_prev=build_alias ;; -build=* | --build=* | --buil=* | --bui=* | --bu=*) build_alias=$ac_optarg ;; -cache-file | --cache-file | --cache-fil | --cache-fi \ | --cache-f | --cache- | --cache | --cach | --cac | --ca | --c) ac_prev=cache_file ;; -cache-file=* | --cache-file=* | --cache-fil=* | --cache-fi=* \ | --cache-f=* | --cache-=* | --cache=* | --cach=* | --cac=* | --ca=* | --c=*) cache_file=$ac_optarg ;; --config-cache | -C) cache_file=config.cache ;; -datadir | --datadir | --datadi | --datad) ac_prev=datadir ;; -datadir=* | --datadir=* | --datadi=* | --datad=*) datadir=$ac_optarg ;; -datarootdir | --datarootdir | --datarootdi | --datarootd | --dataroot \ | --dataroo | --dataro | --datar) ac_prev=datarootdir ;; -datarootdir=* | --datarootdir=* | --datarootdi=* | --datarootd=* \ | --dataroot=* | --dataroo=* | --dataro=* | --datar=*) datarootdir=$ac_optarg ;; -disable-* | --disable-*) ac_useropt=`expr "x$ac_option" : 'x-*disable-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--disable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=no ;; -docdir | --docdir | --docdi | --doc | --do) ac_prev=docdir ;; -docdir=* | --docdir=* | --docdi=* | --doc=* | --do=*) docdir=$ac_optarg ;; -dvidir | --dvidir | --dvidi | --dvid | --dvi | --dv) ac_prev=dvidir ;; -dvidir=* | --dvidir=* | --dvidi=* | --dvid=* | --dvi=* | --dv=*) dvidir=$ac_optarg ;; -enable-* | --enable-*) ac_useropt=`expr "x$ac_option" : 'x-*enable-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid feature name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "enable_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--enable-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval enable_$ac_useropt=\$ac_optarg ;; -exec-prefix | --exec_prefix | --exec-prefix | --exec-prefi \ | --exec-pref | --exec-pre | --exec-pr | --exec-p | --exec- \ | --exec | --exe | --ex) ac_prev=exec_prefix ;; -exec-prefix=* | --exec_prefix=* | --exec-prefix=* | --exec-prefi=* \ | --exec-pref=* | --exec-pre=* | --exec-pr=* | --exec-p=* | --exec-=* \ | --exec=* | --exe=* | --ex=*) exec_prefix=$ac_optarg ;; -gas | --gas | --ga | --g) # Obsolete; use --with-gas. with_gas=yes ;; -help | --help | --hel | --he | -h) ac_init_help=long ;; -help=r* | --help=r* | --hel=r* | --he=r* | -hr*) ac_init_help=recursive ;; -help=s* | --help=s* | --hel=s* | --he=s* | -hs*) ac_init_help=short ;; -host | --host | --hos | --ho) ac_prev=host_alias ;; -host=* | --host=* | --hos=* | --ho=*) host_alias=$ac_optarg ;; -htmldir | --htmldir | --htmldi | --htmld | --html | --htm | --ht) ac_prev=htmldir ;; -htmldir=* | --htmldir=* | --htmldi=* | --htmld=* | --html=* | --htm=* \ | --ht=*) htmldir=$ac_optarg ;; -includedir | --includedir | --includedi | --included | --include \ | --includ | --inclu | --incl | --inc) ac_prev=includedir ;; -includedir=* | --includedir=* | --includedi=* | --included=* | --include=* \ | --includ=* | --inclu=* | --incl=* | --inc=*) includedir=$ac_optarg ;; -infodir | --infodir | --infodi | --infod | --info | --inf) ac_prev=infodir ;; -infodir=* | --infodir=* | --infodi=* | --infod=* | --info=* | --inf=*) infodir=$ac_optarg ;; -libdir | --libdir | --libdi | --libd) ac_prev=libdir ;; -libdir=* | --libdir=* | --libdi=* | --libd=*) libdir=$ac_optarg ;; -libexecdir | --libexecdir | --libexecdi | --libexecd | --libexec \ | --libexe | --libex | --libe) ac_prev=libexecdir ;; -libexecdir=* | --libexecdir=* | --libexecdi=* | --libexecd=* | --libexec=* \ | --libexe=* | --libex=* | --libe=*) libexecdir=$ac_optarg ;; -localedir | --localedir | --localedi | --localed | --locale) ac_prev=localedir ;; -localedir=* | --localedir=* | --localedi=* | --localed=* | --locale=*) localedir=$ac_optarg ;; -localstatedir | --localstatedir | --localstatedi | --localstated \ | --localstate | --localstat | --localsta | --localst | --locals) ac_prev=localstatedir ;; -localstatedir=* | --localstatedir=* | --localstatedi=* | --localstated=* \ | --localstate=* | --localstat=* | --localsta=* | --localst=* | --locals=*) localstatedir=$ac_optarg ;; -mandir | --mandir | --mandi | --mand | --man | --ma | --m) ac_prev=mandir ;; -mandir=* | --mandir=* | --mandi=* | --mand=* | --man=* | --ma=* | --m=*) mandir=$ac_optarg ;; -nfp | --nfp | --nf) # Obsolete; use --without-fp. with_fp=no ;; -no-create | --no-create | --no-creat | --no-crea | --no-cre \ | --no-cr | --no-c | -n) no_create=yes ;; -no-recursion | --no-recursion | --no-recursio | --no-recursi \ | --no-recurs | --no-recur | --no-recu | --no-rec | --no-re | --no-r) no_recursion=yes ;; -oldincludedir | --oldincludedir | --oldincludedi | --oldincluded \ | --oldinclude | --oldinclud | --oldinclu | --oldincl | --oldinc \ | --oldin | --oldi | --old | --ol | --o) ac_prev=oldincludedir ;; -oldincludedir=* | --oldincludedir=* | --oldincludedi=* | --oldincluded=* \ | --oldinclude=* | --oldinclud=* | --oldinclu=* | --oldincl=* | --oldinc=* \ | --oldin=* | --oldi=* | --old=* | --ol=* | --o=*) oldincludedir=$ac_optarg ;; -prefix | --prefix | --prefi | --pref | --pre | --pr | --p) ac_prev=prefix ;; -prefix=* | --prefix=* | --prefi=* | --pref=* | --pre=* | --pr=* | --p=*) prefix=$ac_optarg ;; -program-prefix | --program-prefix | --program-prefi | --program-pref \ | --program-pre | --program-pr | --program-p) ac_prev=program_prefix ;; -program-prefix=* | --program-prefix=* | --program-prefi=* \ | --program-pref=* | --program-pre=* | --program-pr=* | --program-p=*) program_prefix=$ac_optarg ;; -program-suffix | --program-suffix | --program-suffi | --program-suff \ | --program-suf | --program-su | --program-s) ac_prev=program_suffix ;; -program-suffix=* | --program-suffix=* | --program-suffi=* \ | --program-suff=* | --program-suf=* | --program-su=* | --program-s=*) program_suffix=$ac_optarg ;; -program-transform-name | --program-transform-name \ | --program-transform-nam | --program-transform-na \ | --program-transform-n | --program-transform- \ | --program-transform | --program-transfor \ | --program-transfo | --program-transf \ | --program-trans | --program-tran \ | --progr-tra | --program-tr | --program-t) ac_prev=program_transform_name ;; -program-transform-name=* | --program-transform-name=* \ | --program-transform-nam=* | --program-transform-na=* \ | --program-transform-n=* | --program-transform-=* \ | --program-transform=* | --program-transfor=* \ | --program-transfo=* | --program-transf=* \ | --program-trans=* | --program-tran=* \ | --progr-tra=* | --program-tr=* | --program-t=*) program_transform_name=$ac_optarg ;; -pdfdir | --pdfdir | --pdfdi | --pdfd | --pdf | --pd) ac_prev=pdfdir ;; -pdfdir=* | --pdfdir=* | --pdfdi=* | --pdfd=* | --pdf=* | --pd=*) pdfdir=$ac_optarg ;; -psdir | --psdir | --psdi | --psd | --ps) ac_prev=psdir ;; -psdir=* | --psdir=* | --psdi=* | --psd=* | --ps=*) psdir=$ac_optarg ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) silent=yes ;; -runstatedir | --runstatedir | --runstatedi | --runstated \ | --runstate | --runstat | --runsta | --runst | --runs \ | --run | --ru | --r) ac_prev=runstatedir ;; -runstatedir=* | --runstatedir=* | --runstatedi=* | --runstated=* \ | --runstate=* | --runstat=* | --runsta=* | --runst=* | --runs=* \ | --run=* | --ru=* | --r=*) runstatedir=$ac_optarg ;; -sbindir | --sbindir | --sbindi | --sbind | --sbin | --sbi | --sb) ac_prev=sbindir ;; -sbindir=* | --sbindir=* | --sbindi=* | --sbind=* | --sbin=* \ | --sbi=* | --sb=*) sbindir=$ac_optarg ;; -sharedstatedir | --sharedstatedir | --sharedstatedi \ | --sharedstated | --sharedstate | --sharedstat | --sharedsta \ | --sharedst | --shareds | --shared | --share | --shar \ | --sha | --sh) ac_prev=sharedstatedir ;; -sharedstatedir=* | --sharedstatedir=* | --sharedstatedi=* \ | --sharedstated=* | --sharedstate=* | --sharedstat=* | --sharedsta=* \ | --sharedst=* | --shareds=* | --shared=* | --share=* | --shar=* \ | --sha=* | --sh=*) sharedstatedir=$ac_optarg ;; -site | --site | --sit) ac_prev=site ;; -site=* | --site=* | --sit=*) site=$ac_optarg ;; -srcdir | --srcdir | --srcdi | --srcd | --src | --sr) ac_prev=srcdir ;; -srcdir=* | --srcdir=* | --srcdi=* | --srcd=* | --src=* | --sr=*) srcdir=$ac_optarg ;; -sysconfdir | --sysconfdir | --sysconfdi | --sysconfd | --sysconf \ | --syscon | --sysco | --sysc | --sys | --sy) ac_prev=sysconfdir ;; -sysconfdir=* | --sysconfdir=* | --sysconfdi=* | --sysconfd=* | --sysconf=* \ | --syscon=* | --sysco=* | --sysc=* | --sys=* | --sy=*) sysconfdir=$ac_optarg ;; -target | --target | --targe | --targ | --tar | --ta | --t) ac_prev=target_alias ;; -target=* | --target=* | --targe=* | --targ=* | --tar=* | --ta=* | --t=*) target_alias=$ac_optarg ;; -v | -verbose | --verbose | --verbos | --verbo | --verb) verbose=yes ;; -version | --version | --versio | --versi | --vers | -V) ac_init_version=: ;; -with-* | --with-*) ac_useropt=`expr "x$ac_option" : 'x-*with-\([^=]*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--with-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=\$ac_optarg ;; -without-* | --without-*) ac_useropt=`expr "x$ac_option" : 'x-*without-\(.*\)'` # Reject names that are not valid shell variable names. expr "x$ac_useropt" : ".*[^-+._$as_cr_alnum]" >/dev/null && as_fn_error $? "invalid package name: $ac_useropt" ac_useropt_orig=$ac_useropt ac_useropt=`$as_echo "$ac_useropt" | sed 's/[-+.]/_/g'` case $ac_user_opts in *" "with_$ac_useropt" "*) ;; *) ac_unrecognized_opts="$ac_unrecognized_opts$ac_unrecognized_sep--without-$ac_useropt_orig" ac_unrecognized_sep=', ';; esac eval with_$ac_useropt=no ;; --x) # Obsolete; use --with-x. with_x=yes ;; -x-includes | --x-includes | --x-include | --x-includ | --x-inclu \ | --x-incl | --x-inc | --x-in | --x-i) ac_prev=x_includes ;; -x-includes=* | --x-includes=* | --x-include=* | --x-includ=* | --x-inclu=* \ | --x-incl=* | --x-inc=* | --x-in=* | --x-i=*) x_includes=$ac_optarg ;; -x-libraries | --x-libraries | --x-librarie | --x-librari \ | --x-librar | --x-libra | --x-libr | --x-lib | --x-li | --x-l) ac_prev=x_libraries ;; -x-libraries=* | --x-libraries=* | --x-librarie=* | --x-librari=* \ | --x-librar=* | --x-libra=* | --x-libr=* | --x-lib=* | --x-li=* | --x-l=*) x_libraries=$ac_optarg ;; -*) as_fn_error $? "unrecognized option: \`$ac_option' Try \`$0 --help' for more information" ;; *=*) ac_envvar=`expr "x$ac_option" : 'x\([^=]*\)='` # Reject names that are not valid shell variable names. case $ac_envvar in #( '' | [0-9]* | *[!_$as_cr_alnum]* ) as_fn_error $? "invalid variable name: \`$ac_envvar'" ;; esac eval $ac_envvar=\$ac_optarg export $ac_envvar ;; *) # FIXME: should be removed in autoconf 3.0. $as_echo "$as_me: WARNING: you should use --build, --host, --target" >&2 expr "x$ac_option" : ".*[^-._$as_cr_alnum]" >/dev/null && $as_echo "$as_me: WARNING: invalid host type: $ac_option" >&2 : "${build_alias=$ac_option} ${host_alias=$ac_option} ${target_alias=$ac_option}" ;; esac done if test -n "$ac_prev"; then ac_option=--`echo $ac_prev | sed 's/_/-/g'` as_fn_error $? "missing argument to $ac_option" fi if test -n "$ac_unrecognized_opts"; then case $enable_option_checking in no) ;; fatal) as_fn_error $? "unrecognized options: $ac_unrecognized_opts" ;; *) $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2 ;; esac fi # Check all directory arguments for consistency. for ac_var in exec_prefix prefix bindir sbindir libexecdir datarootdir \ datadir sysconfdir sharedstatedir localstatedir includedir \ oldincludedir docdir infodir htmldir dvidir pdfdir psdir \ libdir localedir mandir runstatedir do eval ac_val=\$$ac_var # Remove trailing slashes. case $ac_val in */ ) ac_val=`expr "X$ac_val" : 'X\(.*[^/]\)' \| "X$ac_val" : 'X\(.*\)'` eval $ac_var=\$ac_val;; esac # Be sure to have absolute directory names. case $ac_val in [\\/$]* | ?:[\\/]* ) continue;; NONE | '' ) case $ac_var in *prefix ) continue;; esac;; esac as_fn_error $? "expected an absolute directory name for --$ac_var: $ac_val" done # There might be people who depend on the old broken behavior: `$host' # used to hold the argument of --host etc. # FIXME: To remove some day. build=$build_alias host=$host_alias target=$target_alias # FIXME: To remove some day. if test "x$host_alias" != x; then if test "x$build_alias" = x; then cross_compiling=maybe elif test "x$build_alias" != "x$host_alias"; then cross_compiling=yes fi fi ac_tool_prefix= test -n "$host_alias" && ac_tool_prefix=$host_alias- test "$silent" = yes && exec 6>/dev/null ac_pwd=`pwd` && test -n "$ac_pwd" && ac_ls_di=`ls -di .` && ac_pwd_ls_di=`cd "$ac_pwd" && ls -di .` || as_fn_error $? "working directory cannot be determined" test "X$ac_ls_di" = "X$ac_pwd_ls_di" || as_fn_error $? "pwd does not report name of working directory" # Find the source files, if location was not specified. if test -z "$srcdir"; then ac_srcdir_defaulted=yes # Try the directory containing this script, then the parent directory. ac_confdir=`$as_dirname -- "$as_myself" || $as_expr X"$as_myself" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_myself" : 'X\(//\)[^/]' \| \ X"$as_myself" : 'X\(//\)$' \| \ X"$as_myself" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_myself" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` srcdir=$ac_confdir if test ! -r "$srcdir/$ac_unique_file"; then srcdir=.. fi else ac_srcdir_defaulted=no fi if test ! -r "$srcdir/$ac_unique_file"; then test "$ac_srcdir_defaulted" = yes && srcdir="$ac_confdir or .." as_fn_error $? "cannot find sources ($ac_unique_file) in $srcdir" fi ac_msg="sources are in $srcdir, but \`cd $srcdir' does not work" ac_abs_confdir=`( cd "$srcdir" && test -r "./$ac_unique_file" || as_fn_error $? "$ac_msg" pwd)` # When building in place, set srcdir=. if test "$ac_abs_confdir" = "$ac_pwd"; then srcdir=. fi # Remove unnecessary trailing slashes from srcdir. # Double slashes in file names in object file debugging info # mess up M-x gdb in Emacs. case $srcdir in */) srcdir=`expr "X$srcdir" : 'X\(.*[^/]\)' \| "X$srcdir" : 'X\(.*\)'`;; esac for ac_var in $ac_precious_vars; do eval ac_env_${ac_var}_set=\${${ac_var}+set} eval ac_env_${ac_var}_value=\$${ac_var} eval ac_cv_env_${ac_var}_set=\${${ac_var}+set} eval ac_cv_env_${ac_var}_value=\$${ac_var} done # # Report the --help message. # if test "$ac_init_help" = "long"; then # Omit some internal or obsolete options to make the list less imposing. # This message is too long to be a string in the A/UX 3.1 sh. cat <<_ACEOF \`configure' configures DAR 2.6.8 to adapt to many kinds of systems. Usage: $0 [OPTION]... [VAR=VALUE]... To assign environment variables (e.g., CC, CFLAGS...), specify them as VAR=VALUE. See below for descriptions of some of the useful variables. Defaults for the options are specified in brackets. Configuration: -h, --help display this help and exit --help=short display options specific to this package --help=recursive display the short help of all the included packages -V, --version display version information and exit -q, --quiet, --silent do not print \`checking ...' messages --cache-file=FILE cache test results in FILE [disabled] -C, --config-cache alias for \`--cache-file=config.cache' -n, --no-create do not create output files --srcdir=DIR find the sources in DIR [configure dir or \`..'] Installation directories: --prefix=PREFIX install architecture-independent files in PREFIX [$ac_default_prefix] --exec-prefix=EPREFIX install architecture-dependent files in EPREFIX [PREFIX] By default, \`make install' will install all the files in \`$ac_default_prefix/bin', \`$ac_default_prefix/lib' etc. You can specify an installation prefix other than \`$ac_default_prefix' using \`--prefix', for instance \`--prefix=\$HOME'. For better control, use the options below. Fine tuning of the installation directories: --bindir=DIR user executables [EPREFIX/bin] --sbindir=DIR system admin executables [EPREFIX/sbin] --libexecdir=DIR program executables [EPREFIX/libexec] --sysconfdir=DIR read-only single-machine data [PREFIX/etc] --sharedstatedir=DIR modifiable architecture-independent data [PREFIX/com] --localstatedir=DIR modifiable single-machine data [PREFIX/var] --runstatedir=DIR modifiable per-process data [LOCALSTATEDIR/run] --libdir=DIR object code libraries [EPREFIX/lib] --includedir=DIR C header files [PREFIX/include] --oldincludedir=DIR C header files for non-gcc [/usr/include] --datarootdir=DIR read-only arch.-independent data root [PREFIX/share] --datadir=DIR read-only architecture-independent data [DATAROOTDIR] --infodir=DIR info documentation [DATAROOTDIR/info] --localedir=DIR locale-dependent data [DATAROOTDIR/locale] --mandir=DIR man documentation [DATAROOTDIR/man] --docdir=DIR documentation root [DATAROOTDIR/doc/dar] --htmldir=DIR html documentation [DOCDIR] --dvidir=DIR dvi documentation [DOCDIR] --pdfdir=DIR pdf documentation [DOCDIR] --psdir=DIR ps documentation [DOCDIR] _ACEOF cat <<\_ACEOF Program names: --program-prefix=PREFIX prepend PREFIX to installed program names --program-suffix=SUFFIX append SUFFIX to installed program names --program-transform-name=PROGRAM run sed PROGRAM on installed program names System types: --build=BUILD configure for building on BUILD [guessed] --host=HOST cross-compile to build programs to run on HOST [BUILD] _ACEOF fi if test -n "$ac_init_help"; then case $ac_init_help in short | recursive ) echo "Configuration of DAR 2.6.8:";; esac cat <<\_ACEOF Optional Features: --disable-option-checking ignore unrecognized --enable/--with options --disable-FEATURE do not include FEATURE (same as --enable-FEATURE=no) --enable-FEATURE[=ARG] include FEATURE [ARG=yes] --enable-silent-rules less verbose build output (undo: "make V=1") --disable-silent-rules verbose build output (undo: "make V=0") --disable-nls do not use Native Language Support --enable-dependency-tracking do not reject slow dependency extractors --disable-dependency-tracking speeds up one-time build --disable-rpath do not hardcode runtime library paths --enable-shared[=PKGS] build shared libraries [default=yes] --enable-static[=PKGS] build static libraries [default=yes] --enable-fast-install[=PKGS] optimize for fast installation [default=yes] --disable-libtool-lock avoid locking (might break parallel builds) --disable-libdl-linking ignore any libdl and avoid linking against it --disable-largefile omit support for large files --disable-libz-linking disable linking with libz and disable libz compression support --disable-libbz2-linking disable linking with libbz2 and disables libbz2 compression support --disable-liblzo2-linking disable linking with liblzo2 and disables lzo compression support --disable-libxz-linking disable linking with libxz/liblzma and disable libxz compression support --disable-libgcrypt-linking disable linking with libgcrypt which disables strong encryption support --disable-ea-support disable Extended Attributes support --disable-nodump-flag disable the ext2/3/4 Filesystem Specific Attribute support, in particular the --nodump feature --disable-birthtime disable the HFS+ Filesystem Specific Attribute support --disable-gnugetopt avoid linking with libgnugetopt --disable-librsync-linking disable linking with librsync and disable delta compression support --disable-libcurl-linking ignore libcurl and avoid linking against it --disable-fadvise avoid using fadvise(2) system call --enable-examples buld example and testing programs --enable-os-bits=arg arg is 32 or 64. If for some reason, one wants to overcome detected system value --enable-mode=arg where arg is either 32 or infinint. Makes dar internally use 32 bits or limitless integers in place of 64 bits integers (which is the case if this option is not given) --disable-furtive-read Ignore furtive read mode availability on systems that support it --enable-debug build targets with debugging option and no optimization --enable-pedantic enable pedantic syntaxical check at compilation, use only for debugging purposes ! --disable-build-html don't build programming documentation (in particular libdar API documentation) and html man page --disable-upx by default configure looks for UPX and if available make executables compressed at installation time, you can disable this feature --disable-fast-dir disable optimization for large directories, doing so has a little positive impact on memory requirement but a huge drawback on execution time --disable-gpgme-linking disable linking with gpgme which disables asymetric crypto algorithms --disable-thread-safe libdar is thread safe if POSIX mutex are available, you can manually disable the use of POSIX mutex, the resulting libdar library will not be thread-safe anymore --disable-execinfo disable reporting stack information on self diagnostic bugs even --enable-profiling enable executable profiling --enable-debug-memory log memory allocations and releases to /tmp/dar_debug_mem_allocation.txt this debugging option lead to a slow executable --disable-dar-static avoids building dar_static, a dar statically linked version --disable-threadar avoid linking with libthreadar if available to prevent the use several threads inside libdar --disable-python-binding ignore python binding even if it is possible to build it Optional Packages: --with-PACKAGE[=ARG] use PACKAGE [ARG=yes] --without-PACKAGE do not use PACKAGE (same as --with-PACKAGE=no) --with-gnu-ld assume the C compiler uses GNU ld [default=no] --with-libiconv-prefix[=DIR] search for libiconv in DIR/include and DIR/lib --without-libiconv-prefix don't search for libiconv in includedir and libdir --with-libintl-prefix[=DIR] search for libintl in DIR/include and DIR/lib --without-libintl-prefix don't search for libintl in includedir and libdir --with-pkgconfigdir=DIR defines an alternative directory to install pkconfig files, default is '${libdir}/pkgconfig' --with-pic[=PKGS] try to use only PIC/non-PIC objects [default=use both] --with-aix-soname=aix|svr4|both shared library versioning (aka "SONAME") variant to provide on AIX, [default=aix]. --with-gnu-ld assume the C compiler uses GNU ld [default=no] --with-sysroot[=DIR] Search for dependent libraries within DIR (or the compiler's sysroot if not specified). --with-gpgme-prefix=PFX prefix where GPGME is installed (optional) Some influential environment variables: CC C compiler command CFLAGS C compiler flags LDFLAGS linker flags, e.g. -L if you have libraries in a nonstandard directory LIBS libraries to pass to the linker, e.g. -l CPPFLAGS (Objective) C/C++ preprocessor flags, e.g. -I if you have headers in a nonstandard directory CXX C++ compiler command CXXFLAGS C++ compiler flags CXXCPP C++ preprocessor LT_SYS_LIBRARY_PATH User-defined run-time library search path. CPP C preprocessor PKG_CONFIG path to pkg-config utility PKG_CONFIG_PATH directories to add to pkg-config's search path PKG_CONFIG_LIBDIR path overriding pkg-config's built-in search path LIBCURL_CFLAGS C compiler flags for LIBCURL, overriding pkg-config LIBCURL_LIBS linker flags for LIBCURL, overriding pkg-config LIBTHREADAR_CFLAGS C compiler flags for LIBTHREADAR, overriding pkg-config LIBTHREADAR_LIBS linker flags for LIBTHREADAR, overriding pkg-config Use these variables to override the choices made by `configure' or to help it to find libraries and programs with nonstandard names/locations. Report bugs to . _ACEOF ac_status=$? fi if test "$ac_init_help" = "recursive"; then # If there are subdirs, report their specific --help. for ac_dir in : $ac_subdirs_all; do test "x$ac_dir" = x: && continue test -d "$ac_dir" || { cd "$srcdir" && ac_pwd=`pwd` && srcdir=. && test -d "$ac_dir"; } || continue ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix cd "$ac_dir" || { ac_status=$?; continue; } # Check for guested configure. if test -f "$ac_srcdir/configure.gnu"; then echo && $SHELL "$ac_srcdir/configure.gnu" --help=recursive elif test -f "$ac_srcdir/configure"; then echo && $SHELL "$ac_srcdir/configure" --help=recursive else $as_echo "$as_me: WARNING: no configuration information is in $ac_dir" >&2 fi || ac_status=$? cd "$ac_pwd" || { ac_status=$?; break; } done fi test -n "$ac_init_help" && exit $ac_status if $ac_init_version; then cat <<\_ACEOF DAR configure 2.6.8 generated by GNU Autoconf 2.69 Copyright (C) 2012 Free Software Foundation, Inc. This configure script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it. _ACEOF exit fi ## ------------------------ ## ## Autoconf initialization. ## ## ------------------------ ## # ac_fn_c_try_compile LINENO # -------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_compile # ac_fn_cxx_try_compile LINENO # ---------------------------- # Try to compile conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest.$ac_objext; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_compile # ac_fn_cxx_try_cpp LINENO # ------------------------ # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_cxx_preproc_warn_flag$ac_cxx_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_cpp # ac_fn_cxx_try_link LINENO # ------------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_cxx_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_cxx_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || test -x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_link # ac_fn_cxx_try_run LINENO # ------------------------ # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_cxx_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_cxx_try_run # ac_fn_c_try_link LINENO # ----------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_link () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack rm -f conftest.$ac_objext conftest$ac_exeext if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { test -z "$ac_c_werror_flag" || test ! -s conftest.err } && test -s conftest$ac_exeext && { test "$cross_compiling" = yes || test -x conftest$ac_exeext }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi # Delete the IPA/IPO (Inter Procedural Analysis/Optimization) information # created by the PGI compiler (conftest_ipa8_conftest.oo), as it would # interfere with the next link command; also delete a directory that is # left behind by Apple's compiler. We do this before executing the actions. rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_link # ac_fn_c_check_header_compile LINENO HEADER VAR INCLUDES # ------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_c_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_c_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_header_compile # ac_fn_c_try_cpp LINENO # ---------------------- # Try to preprocess conftest.$ac_ext, and return whether this succeeded. ac_fn_c_try_cpp () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_cpp conftest.$ac_ext" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_cpp conftest.$ac_ext") 2>conftest.err ac_status=$? if test -s conftest.err; then grep -v '^ *+' conftest.err >conftest.er1 cat conftest.er1 >&5 mv -f conftest.er1 conftest.err fi $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } > conftest.i && { test -z "$ac_c_preproc_warn_flag$ac_c_werror_flag" || test ! -s conftest.err }; then : ac_retval=0 else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=1 fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_cpp # ac_fn_c_try_run LINENO # ---------------------- # Try to link conftest.$ac_ext, and return whether this succeeded. Assumes # that executables *can* be run. ac_fn_c_try_run () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && { ac_try='./conftest$ac_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then : ac_retval=0 else $as_echo "$as_me: program exited with status $ac_status" >&5 $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 ac_retval=$ac_status fi rm -rf conftest.dSYM conftest_ipa8_conftest.oo eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno as_fn_set_status $ac_retval } # ac_fn_c_try_run # ac_fn_c_check_func LINENO FUNC VAR # ---------------------------------- # Tests whether FUNC exists, setting the cache variable VAR accordingly ac_fn_c_check_func () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main () { return $2 (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_c_check_func # ac_fn_cxx_check_header_mongrel LINENO HEADER VAR INCLUDES # --------------------------------------------------------- # Tests whether HEADER exists, giving a warning if it cannot be compiled using # the include files in INCLUDES and setting the cache variable VAR # accordingly. ac_fn_cxx_check_header_mongrel () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if eval \${$3+:} false; then : { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } else # Is the header compilable? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 usability" >&5 $as_echo_n "checking $2 usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_header_compiler=yes else ac_header_compiler=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_compiler" >&5 $as_echo "$ac_header_compiler" >&6; } # Is the header present? { $as_echo "$as_me:${as_lineno-$LINENO}: checking $2 presence" >&5 $as_echo_n "checking $2 presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include <$2> _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : ac_header_preproc=yes else ac_header_preproc=no fi rm -f conftest.err conftest.i conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_header_preproc" >&5 $as_echo "$ac_header_preproc" >&6; } # So? What about this header? case $ac_header_compiler:$ac_header_preproc:$ac_cxx_preproc_warn_flag in #(( yes:no: ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&5 $as_echo "$as_me: WARNING: $2: accepted by the compiler, rejected by the preprocessor!" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ;; no:yes:* ) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: present but cannot be compiled" >&5 $as_echo "$as_me: WARNING: $2: present but cannot be compiled" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: check for missing prerequisite headers?" >&5 $as_echo "$as_me: WARNING: $2: check for missing prerequisite headers?" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: see the Autoconf documentation" >&5 $as_echo "$as_me: WARNING: $2: see the Autoconf documentation" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&5 $as_echo "$as_me: WARNING: $2: section \"Present But Cannot Be Compiled\"" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $2: proceeding with the compiler's result" >&5 $as_echo "$as_me: WARNING: $2: proceeding with the compiler's result" >&2;} ( $as_echo "## ------------------------------------------------------------------------- ## ## Report this to http://sourceforge.net/tracker/?group_id=65612&atid=511612 ## ## ------------------------------------------------------------------------- ##" ) | sed "s/^/$as_me: WARNING: /" >&2 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else eval "$3=\$ac_header_compiler" fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_header_mongrel # ac_fn_cxx_check_type LINENO TYPE VAR INCLUDES # --------------------------------------------- # Tests whether TYPE exists after having included INCLUDES, setting cache # variable VAR accordingly. ac_fn_cxx_check_type () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else eval "$3=no" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof ($2)) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { if (sizeof (($2))) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else eval "$3=yes" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_type # ac_fn_cxx_check_member LINENO AGGR MEMBER VAR INCLUDES # ------------------------------------------------------ # Tries to find if the field MEMBER exists in type AGGR, after including # INCLUDES, setting cache variable VAR accordingly. ac_fn_cxx_check_member () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2.$3" >&5 $as_echo_n "checking for $2.$3... " >&6; } if eval \${$4+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $5 int main () { static $2 ac_aggr; if (ac_aggr.$3) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$4=yes" else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $5 int main () { static $2 ac_aggr; if (sizeof ac_aggr.$3) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$4=yes" else eval "$4=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$4 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_member # ac_fn_cxx_check_decl LINENO SYMBOL VAR INCLUDES # ----------------------------------------------- # Tests whether SYMBOL is declared in INCLUDES, setting cache variable VAR # accordingly. ac_fn_cxx_check_decl () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack as_decl_name=`echo $2|sed 's/ *(.*//'` as_decl_use=`echo $2|sed -e 's/(/((/' -e 's/)/) 0&/' -e 's/,/) 0& (/g'` { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $as_decl_name is declared" >&5 $as_echo_n "checking whether $as_decl_name is declared... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { #ifndef $as_decl_name #ifdef __cplusplus (void) $as_decl_use; #else (void) $as_decl_name; #endif #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_decl # ac_fn_cxx_compute_int LINENO EXPR VAR INCLUDES # ---------------------------------------------- # Tries to find the compile-time value of EXPR in a program that includes # INCLUDES, setting VAR accordingly. Returns whether the value could be # computed ac_fn_cxx_compute_int () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack if test "$cross_compiling" = yes; then # Depending upon the size, compute the lo and hi bounds. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) >= 0)]; test_array [0] = 0; return test_array [0]; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_lo=0 ac_mid=0 while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; test_array [0] = 0; return test_array [0]; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_hi=$ac_mid; break else as_fn_arith $ac_mid + 1 && ac_lo=$as_val if test $ac_lo -le $ac_mid; then ac_lo= ac_hi= break fi as_fn_arith 2 '*' $ac_mid + 1 && ac_mid=$as_val fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) < 0)]; test_array [0] = 0; return test_array [0]; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_hi=-1 ac_mid=-1 while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) >= $ac_mid)]; test_array [0] = 0; return test_array [0]; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_lo=$ac_mid; break else as_fn_arith '(' $ac_mid ')' - 1 && ac_hi=$as_val if test $ac_mid -le $ac_hi; then ac_lo= ac_hi= break fi as_fn_arith 2 '*' $ac_mid && ac_mid=$as_val fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done else ac_lo= ac_hi= fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext # Binary search between lo and hi bounds. while test "x$ac_lo" != "x$ac_hi"; do as_fn_arith '(' $ac_hi - $ac_lo ')' / 2 + $ac_lo && ac_mid=$as_val cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 int main () { static int test_array [1 - 2 * !(($2) <= $ac_mid)]; test_array [0] = 0; return test_array [0]; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_hi=$ac_mid else as_fn_arith '(' $ac_mid ')' + 1 && ac_lo=$as_val fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext done case $ac_lo in #(( ?*) eval "$3=\$ac_lo"; ac_retval=0 ;; '') ac_retval=1 ;; esac else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 static long int longval () { return $2; } static unsigned long int ulongval () { return $2; } #include #include int main () { FILE *f = fopen ("conftest.val", "w"); if (! f) return 1; if (($2) < 0) { long int i = longval (); if (i != ($2)) return 1; fprintf (f, "%ld", i); } else { unsigned long int i = ulongval (); if (i != ($2)) return 1; fprintf (f, "%lu", i); } /* Do not output a trailing newline, as this causes \r\n confusion on some platforms. */ return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : echo >>conftest.val; read $3 &5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Define $2 to an innocuous variant, in case declares $2. For example, HP-UX 11i declares gettimeofday. */ #define $2 innocuous_$2 /* System header to define __stub macros and hopefully few prototypes, which can conflict with char $2 (); below. Prefer to if __STDC__ is defined, since exists even on freestanding compilers. */ #ifdef __STDC__ # include #else # include #endif #undef $2 /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char $2 (); /* The GNU C library defines this for functions which it implements to always fail with ENOSYS. Some functions are actually named something starting with __ and the normal name is an alias. */ #if defined __stub_$2 || defined __stub___$2 choke me #endif int main () { return $2 (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_func # ac_fn_cxx_check_header_compile LINENO HEADER VAR INCLUDES # --------------------------------------------------------- # Tests whether HEADER exists and can be compiled using the include files in # INCLUDES, setting the cache variable VAR accordingly. ac_fn_cxx_check_header_compile () { as_lineno=${as_lineno-"$1"} as_lineno_stack=as_lineno_stack=$as_lineno_stack { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $2" >&5 $as_echo_n "checking for $2... " >&6; } if eval \${$3+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $4 #include <$2> _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$3=yes" else eval "$3=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$3 { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } eval $as_lineno_stack; ${as_lineno_stack:+:} unset as_lineno } # ac_fn_cxx_check_header_compile cat >config.log <<_ACEOF This file contains any messages produced by compilers while running configure, to aid debugging if configure makes a mistake. It was created by DAR $as_me 2.6.8, which was generated by GNU Autoconf 2.69. Invocation command line was $ $0 $@ _ACEOF exec 5>>config.log { cat <<_ASUNAME ## --------- ## ## Platform. ## ## --------- ## hostname = `(hostname || uname -n) 2>/dev/null | sed 1q` uname -m = `(uname -m) 2>/dev/null || echo unknown` uname -r = `(uname -r) 2>/dev/null || echo unknown` uname -s = `(uname -s) 2>/dev/null || echo unknown` uname -v = `(uname -v) 2>/dev/null || echo unknown` /usr/bin/uname -p = `(/usr/bin/uname -p) 2>/dev/null || echo unknown` /bin/uname -X = `(/bin/uname -X) 2>/dev/null || echo unknown` /bin/arch = `(/bin/arch) 2>/dev/null || echo unknown` /usr/bin/arch -k = `(/usr/bin/arch -k) 2>/dev/null || echo unknown` /usr/convex/getsysinfo = `(/usr/convex/getsysinfo) 2>/dev/null || echo unknown` /usr/bin/hostinfo = `(/usr/bin/hostinfo) 2>/dev/null || echo unknown` /bin/machine = `(/bin/machine) 2>/dev/null || echo unknown` /usr/bin/oslevel = `(/usr/bin/oslevel) 2>/dev/null || echo unknown` /bin/universe = `(/bin/universe) 2>/dev/null || echo unknown` _ASUNAME as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. $as_echo "PATH: $as_dir" done IFS=$as_save_IFS } >&5 cat >&5 <<_ACEOF ## ----------- ## ## Core tests. ## ## ----------- ## _ACEOF # Keep a trace of the command line. # Strip out --no-create and --no-recursion so they do not pile up. # Strip out --silent because we don't want to record it for future runs. # Also quote any args containing shell meta-characters. # Make two passes to allow for proper duplicate-argument suppression. ac_configure_args= ac_configure_args0= ac_configure_args1= ac_must_keep_next=false for ac_pass in 1 2 do for ac_arg do case $ac_arg in -no-create | --no-c* | -n | -no-recursion | --no-r*) continue ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil) continue ;; *\'*) ac_arg=`$as_echo "$ac_arg" | sed "s/'/'\\\\\\\\''/g"` ;; esac case $ac_pass in 1) as_fn_append ac_configure_args0 " '$ac_arg'" ;; 2) as_fn_append ac_configure_args1 " '$ac_arg'" if test $ac_must_keep_next = true; then ac_must_keep_next=false # Got value, back to normal. else case $ac_arg in *=* | --config-cache | -C | -disable-* | --disable-* \ | -enable-* | --enable-* | -gas | --g* | -nfp | --nf* \ | -q | -quiet | --q* | -silent | --sil* | -v | -verb* \ | -with-* | --with-* | -without-* | --without-* | --x) case "$ac_configure_args0 " in "$ac_configure_args1"*" '$ac_arg' "* ) continue ;; esac ;; -* ) ac_must_keep_next=true ;; esac fi as_fn_append ac_configure_args " '$ac_arg'" ;; esac done done { ac_configure_args0=; unset ac_configure_args0;} { ac_configure_args1=; unset ac_configure_args1;} # When interrupted or exit'd, cleanup temporary files, and complete # config.log. We remove comments because anyway the quotes in there # would cause problems or look ugly. # WARNING: Use '\'' to represent an apostrophe within the trap. # WARNING: Do not start the trap code with a newline, due to a FreeBSD 4.0 bug. trap 'exit_status=$? # Save into config.log some information that might help in debugging. { echo $as_echo "## ---------------- ## ## Cache variables. ## ## ---------------- ##" echo # The following way of writing the cache mishandles newlines in values, ( for ac_var in `(set) 2>&1 | sed -n '\''s/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'\''`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space='\'' '\''; set) 2>&1` in #( *${as_nl}ac_space=\ *) sed -n \ "s/'\''/'\''\\\\'\'''\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\''\\2'\''/p" ;; #( *) sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) echo $as_echo "## ----------------- ## ## Output variables. ## ## ----------------- ##" echo for ac_var in $ac_subst_vars do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo if test -n "$ac_subst_files"; then $as_echo "## ------------------- ## ## File substitutions. ## ## ------------------- ##" echo for ac_var in $ac_subst_files do eval ac_val=\$$ac_var case $ac_val in *\'\''*) ac_val=`$as_echo "$ac_val" | sed "s/'\''/'\''\\\\\\\\'\'''\''/g"`;; esac $as_echo "$ac_var='\''$ac_val'\''" done | sort echo fi if test -s confdefs.h; then $as_echo "## ----------- ## ## confdefs.h. ## ## ----------- ##" echo cat confdefs.h echo fi test "$ac_signal" != 0 && $as_echo "$as_me: caught signal $ac_signal" $as_echo "$as_me: exit $exit_status" } >&5 rm -f core *.core core.conftest.* && rm -f -r conftest* confdefs* conf$$* $ac_clean_files && exit $exit_status ' 0 for ac_signal in 1 2 13 15; do trap 'ac_signal='$ac_signal'; as_fn_exit 1' $ac_signal done ac_signal=0 # confdefs.h avoids OS command line length limits that DEFS can exceed. rm -f -r conftest* confdefs.h $as_echo "/* confdefs.h */" > confdefs.h # Predefined preprocessor variables. cat >>confdefs.h <<_ACEOF #define PACKAGE_NAME "$PACKAGE_NAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_TARNAME "$PACKAGE_TARNAME" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_VERSION "$PACKAGE_VERSION" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_STRING "$PACKAGE_STRING" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_BUGREPORT "$PACKAGE_BUGREPORT" _ACEOF cat >>confdefs.h <<_ACEOF #define PACKAGE_URL "$PACKAGE_URL" _ACEOF # Let the site file select an alternate cache file if it wants to. # Prefer an explicitly selected file to automatically selected ones. ac_site_file1=NONE ac_site_file2=NONE if test -n "$CONFIG_SITE"; then # We do not want a PATH search for config.site. case $CONFIG_SITE in #(( -*) ac_site_file1=./$CONFIG_SITE;; */*) ac_site_file1=$CONFIG_SITE;; *) ac_site_file1=./$CONFIG_SITE;; esac elif test "x$prefix" != xNONE; then ac_site_file1=$prefix/share/config.site ac_site_file2=$prefix/etc/config.site else ac_site_file1=$ac_default_prefix/share/config.site ac_site_file2=$ac_default_prefix/etc/config.site fi for ac_site_file in "$ac_site_file1" "$ac_site_file2" do test "x$ac_site_file" = xNONE && continue if test /dev/null != "$ac_site_file" && test -r "$ac_site_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading site script $ac_site_file" >&5 $as_echo "$as_me: loading site script $ac_site_file" >&6;} sed 's/^/| /' "$ac_site_file" >&5 . "$ac_site_file" \ || { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "failed to load site script $ac_site_file See \`config.log' for more details" "$LINENO" 5; } fi done if test -r "$cache_file"; then # Some versions of bash will fail to source /dev/null (special files # actually), so we avoid doing that. DJGPP emulates it as a regular file. if test /dev/null != "$cache_file" && test -f "$cache_file"; then { $as_echo "$as_me:${as_lineno-$LINENO}: loading cache $cache_file" >&5 $as_echo "$as_me: loading cache $cache_file" >&6;} case $cache_file in [\\/]* | ?:[\\/]* ) . "$cache_file";; *) . "./$cache_file";; esac fi else { $as_echo "$as_me:${as_lineno-$LINENO}: creating cache $cache_file" >&5 $as_echo "$as_me: creating cache $cache_file" >&6;} >$cache_file fi gt_needs="$gt_needs " as_fn_append ac_header_list " utime.h" # Check that the precious variables saved in the cache have kept the same # value. ac_cache_corrupted=false for ac_var in $ac_precious_vars; do eval ac_old_set=\$ac_cv_env_${ac_var}_set eval ac_new_set=\$ac_env_${ac_var}_set eval ac_old_val=\$ac_cv_env_${ac_var}_value eval ac_new_val=\$ac_env_${ac_var}_value case $ac_old_set,$ac_new_set in set,) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was set to \`$ac_old_val' in the previous run" >&2;} ac_cache_corrupted=: ;; ,set) { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' was not set in the previous run" >&5 $as_echo "$as_me: error: \`$ac_var' was not set in the previous run" >&2;} ac_cache_corrupted=: ;; ,);; *) if test "x$ac_old_val" != "x$ac_new_val"; then # differences in whitespace do not lead to failure. ac_old_val_w=`echo x $ac_old_val` ac_new_val_w=`echo x $ac_new_val` if test "$ac_old_val_w" != "$ac_new_val_w"; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: \`$ac_var' has changed since the previous run:" >&5 $as_echo "$as_me: error: \`$ac_var' has changed since the previous run:" >&2;} ac_cache_corrupted=: else { $as_echo "$as_me:${as_lineno-$LINENO}: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&5 $as_echo "$as_me: warning: ignoring whitespace changes in \`$ac_var' since the previous run:" >&2;} eval $ac_var=\$ac_old_val fi { $as_echo "$as_me:${as_lineno-$LINENO}: former value: \`$ac_old_val'" >&5 $as_echo "$as_me: former value: \`$ac_old_val'" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: current value: \`$ac_new_val'" >&5 $as_echo "$as_me: current value: \`$ac_new_val'" >&2;} fi;; esac # Pass precious variables to config.status. if test "$ac_new_set" = set; then case $ac_new_val in *\'*) ac_arg=$ac_var=`$as_echo "$ac_new_val" | sed "s/'/'\\\\\\\\''/g"` ;; *) ac_arg=$ac_var=$ac_new_val ;; esac case " $ac_configure_args " in *" '$ac_arg' "*) ;; # Avoid dups. Use of quotes ensures accuracy. *) as_fn_append ac_configure_args " '$ac_arg'" ;; esac fi done if $ac_cache_corrupted; then { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} { $as_echo "$as_me:${as_lineno-$LINENO}: error: changes in the environment can compromise the build" >&5 $as_echo "$as_me: error: changes in the environment can compromise the build" >&2;} as_fn_error $? "run \`make distclean' and/or \`rm $cache_file' and start over" "$LINENO" 5 fi ## -------------------- ## ## Main body of script. ## ## -------------------- ## ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu ac_config_headers="$ac_config_headers config.h" ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu cat >>confdefs.h <<_ACEOF #define DAR_VERSION "2.6.8" _ACEOF am__api_version='1.15' ac_aux_dir= for ac_dir in "$srcdir" "$srcdir/.." "$srcdir/../.."; do if test -f "$ac_dir/install-sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install-sh -c" break elif test -f "$ac_dir/install.sh"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/install.sh -c" break elif test -f "$ac_dir/shtool"; then ac_aux_dir=$ac_dir ac_install_sh="$ac_aux_dir/shtool install -c" break fi done if test -z "$ac_aux_dir"; then as_fn_error $? "cannot find install-sh, install.sh, or shtool in \"$srcdir\" \"$srcdir/..\" \"$srcdir/../..\"" "$LINENO" 5 fi # These three variables are undocumented and unsupported, # and are intended to be withdrawn in a future Autoconf release. # They can cause serious problems if a builder's source tree is in a directory # whose full name contains unusual characters. ac_config_guess="$SHELL $ac_aux_dir/config.guess" # Please don't use this var. ac_config_sub="$SHELL $ac_aux_dir/config.sub" # Please don't use this var. ac_configure="$SHELL $ac_aux_dir/configure" # Please don't use this var. # Find a good install program. We prefer a C program (faster), # so one script is as good as another. But avoid the broken or # incompatible versions: # SysV /etc/install, /usr/sbin/install # SunOS /usr/etc/install # IRIX /sbin/install # AIX /bin/install # AmigaOS /C/install, which installs bootblocks on floppy discs # AIX 4 /usr/bin/installbsd, which doesn't work without a -g flag # AFS /usr/afsws/bin/install, which mishandles nonexistent args # SVR4 /usr/ucb/install, which tries to use the nonexistent group "staff" # OS/2's system install, which has a completely different semantic # ./install, which can be erroneously created by make from ./install.sh. # Reject install programs that cannot install multiple files. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a BSD-compatible install" >&5 $as_echo_n "checking for a BSD-compatible install... " >&6; } if test -z "$INSTALL"; then if ${ac_cv_path_install+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. # Account for people who put trailing slashes in PATH elements. case $as_dir/ in #(( ./ | .// | /[cC]/* | \ /etc/* | /usr/sbin/* | /usr/etc/* | /sbin/* | /usr/afsws/bin/* | \ ?:[\\/]os2[\\/]install[\\/]* | ?:[\\/]OS2[\\/]INSTALL[\\/]* | \ /usr/ucb/* ) ;; *) # OSF1 and SCO ODT 3.0 have their own names for install. # Don't use installbsd from OSF since it installs stuff as root # by default. for ac_prog in ginstall scoinst install; do for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext"; then if test $ac_prog = install && grep dspmsg "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # AIX install. It has an incompatible calling convention. : elif test $ac_prog = install && grep pwplus "$as_dir/$ac_prog$ac_exec_ext" >/dev/null 2>&1; then # program-specific install script used by HP pwplus--don't use. : else rm -rf conftest.one conftest.two conftest.dir echo one > conftest.one echo two > conftest.two mkdir conftest.dir if "$as_dir/$ac_prog$ac_exec_ext" -c conftest.one conftest.two "`pwd`/conftest.dir" && test -s conftest.one && test -s conftest.two && test -s conftest.dir/conftest.one && test -s conftest.dir/conftest.two then ac_cv_path_install="$as_dir/$ac_prog$ac_exec_ext -c" break 3 fi fi fi done done ;; esac done IFS=$as_save_IFS rm -rf conftest.one conftest.two conftest.dir fi if test "${ac_cv_path_install+set}" = set; then INSTALL=$ac_cv_path_install else # As a last resort, use the slow shell script. Don't cache a # value for INSTALL within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. INSTALL=$ac_install_sh fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $INSTALL" >&5 $as_echo "$INSTALL" >&6; } # Use test -z because SunOS4 sh mishandles braces in ${var-val}. # It thinks the first close brace ends the variable substitution. test -z "$INSTALL_PROGRAM" && INSTALL_PROGRAM='${INSTALL}' test -z "$INSTALL_SCRIPT" && INSTALL_SCRIPT='${INSTALL}' test -z "$INSTALL_DATA" && INSTALL_DATA='${INSTALL} -m 644' { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether build environment is sane" >&5 $as_echo_n "checking whether build environment is sane... " >&6; } # Reject unsafe characters in $srcdir or the absolute working directory # name. Accept space and tab only in the latter. am_lf=' ' case `pwd` in *[\\\"\#\$\&\'\`$am_lf]*) as_fn_error $? "unsafe absolute working directory name" "$LINENO" 5;; esac case $srcdir in *[\\\"\#\$\&\'\`$am_lf\ \ ]*) as_fn_error $? "unsafe srcdir value: '$srcdir'" "$LINENO" 5;; esac # Do 'set' in a subshell so we don't clobber the current shell's # arguments. Must try -L first in case configure is actually a # symlink; some systems play weird games with the mod time of symlinks # (eg FreeBSD returns the mod time of the symlink's containing # directory). if ( am_has_slept=no for am_try in 1 2; do echo "timestamp, slept: $am_has_slept" > conftest.file set X `ls -Lt "$srcdir/configure" conftest.file 2> /dev/null` if test "$*" = "X"; then # -L didn't work. set X `ls -t "$srcdir/configure" conftest.file` fi if test "$*" != "X $srcdir/configure conftest.file" \ && test "$*" != "X conftest.file $srcdir/configure"; then # If neither matched, then we have a broken ls. This can happen # if, for instance, CONFIG_SHELL is bash and it inherits a # broken ls alias from the environment. This has actually # happened. Such a system could not be considered "sane". as_fn_error $? "ls -t appears to fail. Make sure there is not a broken alias in your environment" "$LINENO" 5 fi if test "$2" = conftest.file || test $am_try -eq 2; then break fi # Just in case. sleep 1 am_has_slept=yes done test "$2" = conftest.file ) then # Ok. : else as_fn_error $? "newly created file is older than distributed files! Check your system clock" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } # If we didn't sleep, we still need to ensure time stamps of config.status and # generated files are strictly newer. am_sleep_pid= if grep 'slept: no' conftest.file >/dev/null 2>&1; then ( sleep 1 ) & am_sleep_pid=$! fi rm -f conftest.file test "$program_prefix" != NONE && program_transform_name="s&^&$program_prefix&;$program_transform_name" # Use a double $ so make ignores it. test "$program_suffix" != NONE && program_transform_name="s&\$&$program_suffix&;$program_transform_name" # Double any \ or $. # By default was `s,x,x', remove it if useless. ac_script='s/[\\$]/&&/g;s/;s,x,x,$//' program_transform_name=`$as_echo "$program_transform_name" | sed "$ac_script"` # Expand $ac_aux_dir to an absolute path. am_aux_dir=`cd "$ac_aux_dir" && pwd` if test x"${MISSING+set}" != xset; then case $am_aux_dir in *\ * | *\ *) MISSING="\${SHELL} \"$am_aux_dir/missing\"" ;; *) MISSING="\${SHELL} $am_aux_dir/missing" ;; esac fi # Use eval to expand $SHELL if eval "$MISSING --is-lightweight"; then am_missing_run="$MISSING " else am_missing_run= { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: 'missing' script is too old or missing" >&5 $as_echo "$as_me: WARNING: 'missing' script is too old or missing" >&2;} fi if test x"${install_sh+set}" != xset; then case $am_aux_dir in *\ * | *\ *) install_sh="\${SHELL} '$am_aux_dir/install-sh'" ;; *) install_sh="\${SHELL} $am_aux_dir/install-sh" esac fi # Installed binaries are usually stripped using 'strip' when the user # run "make install-strip". However 'strip' might not be the right # tool to use in cross-compilation environments, therefore Automake # will honor the 'STRIP' environment variable to overrule this program. if test "$cross_compiling" != no; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi fi INSTALL_STRIP_PROGRAM="\$(install_sh) -c -s" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a thread-safe mkdir -p" >&5 $as_echo_n "checking for a thread-safe mkdir -p... " >&6; } if test -z "$MKDIR_P"; then if ${ac_cv_path_mkdir+:} false; then : $as_echo_n "(cached) " >&6 else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/opt/sfw/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in mkdir gmkdir; do for ac_exec_ext in '' $ac_executable_extensions; do as_fn_executable_p "$as_dir/$ac_prog$ac_exec_ext" || continue case `"$as_dir/$ac_prog$ac_exec_ext" --version 2>&1` in #( 'mkdir (GNU coreutils) '* | \ 'mkdir (coreutils) '* | \ 'mkdir (fileutils) '4.1*) ac_cv_path_mkdir=$as_dir/$ac_prog$ac_exec_ext break 3;; esac done done done IFS=$as_save_IFS fi test -d ./--version && rmdir ./--version if test "${ac_cv_path_mkdir+set}" = set; then MKDIR_P="$ac_cv_path_mkdir -p" else # As a last resort, use the slow shell script. Don't cache a # value for MKDIR_P within a source directory, because that will # break other packages using the cache if that directory is # removed, or if the value is a relative name. MKDIR_P="$ac_install_sh -d" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MKDIR_P" >&5 $as_echo "$MKDIR_P" >&6; } for ac_prog in gawk mawk nawk awk do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AWK+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AWK"; then ac_cv_prog_AWK="$AWK" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AWK="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AWK=$ac_cv_prog_AWK if test -n "$AWK"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AWK" >&5 $as_echo "$AWK" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AWK" && break done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi rm -rf .tst 2>/dev/null mkdir .tst 2>/dev/null if test -d .tst; then am__leading_dot=. else am__leading_dot=_ fi rmdir .tst 2>/dev/null # Check whether --enable-silent-rules was given. if test "${enable_silent_rules+set}" = set; then : enableval=$enable_silent_rules; fi case $enable_silent_rules in # ((( yes) AM_DEFAULT_VERBOSITY=0;; no) AM_DEFAULT_VERBOSITY=1;; *) AM_DEFAULT_VERBOSITY=1;; esac am_make=${MAKE-make} { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $am_make supports nested variables" >&5 $as_echo_n "checking whether $am_make supports nested variables... " >&6; } if ${am_cv_make_support_nested_variables+:} false; then : $as_echo_n "(cached) " >&6 else if $as_echo 'TRUE=$(BAR$(V)) BAR0=false BAR1=true V=1 am__doit: @$(TRUE) .PHONY: am__doit' | $am_make -f - >/dev/null 2>&1; then am_cv_make_support_nested_variables=yes else am_cv_make_support_nested_variables=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_make_support_nested_variables" >&5 $as_echo "$am_cv_make_support_nested_variables" >&6; } if test $am_cv_make_support_nested_variables = yes; then AM_V='$(V)' AM_DEFAULT_V='$(AM_DEFAULT_VERBOSITY)' else AM_V=$AM_DEFAULT_VERBOSITY AM_DEFAULT_V=$AM_DEFAULT_VERBOSITY fi AM_BACKSLASH='\' if test "`cd $srcdir && pwd`" != "`pwd`"; then # Use -I$(srcdir) only when $(srcdir) != ., so that make's output # is not polluted with repeated "-I." am__isrc=' -I$(srcdir)' # test to see if srcdir already configured if test -f $srcdir/config.status; then as_fn_error $? "source directory already configured; run \"make distclean\" there first" "$LINENO" 5 fi fi # test whether we have cygpath if test -z "$CYGPATH_W"; then if (cygpath --version) >/dev/null 2>/dev/null; then CYGPATH_W='cygpath -w' else CYGPATH_W=echo fi fi # Define the identity of the package. PACKAGE='dar' VERSION='2.6.8' cat >>confdefs.h <<_ACEOF #define PACKAGE "$PACKAGE" _ACEOF cat >>confdefs.h <<_ACEOF #define VERSION "$VERSION" _ACEOF # Some tools Automake needs. ACLOCAL=${ACLOCAL-"${am_missing_run}aclocal-${am__api_version}"} AUTOCONF=${AUTOCONF-"${am_missing_run}autoconf"} AUTOMAKE=${AUTOMAKE-"${am_missing_run}automake-${am__api_version}"} AUTOHEADER=${AUTOHEADER-"${am_missing_run}autoheader"} MAKEINFO=${MAKEINFO-"${am_missing_run}makeinfo"} # For better backward compatibility. To be removed once Automake 1.9.x # dies out for good. For more background, see: # # mkdir_p='$(MKDIR_P)' # We need awk for the "check" target (and possibly the TAP driver). The # system "awk" is bad on some platforms. # Always define AMTAR for backward compatibility. Yes, it's still used # in the wild :-( We should find a proper way to deprecate it ... AMTAR='$${TAR-tar}' # We'll loop over all known methods to create a tar archive until one works. _am_tools='gnutar pax cpio none' am__tar='$${TAR-tar} chof - "$$tardir"' am__untar='$${TAR-tar} xf -' # POSIX will say in a future version that running "rm -f" with no argument # is OK; and we want to be able to make that assumption in our Makefile # recipes. So use an aggressive probe to check that the usage we want is # actually supported "in the wild" to an acceptable degree. # See automake bug#10828. # To make any issue more visible, cause the running configure to be aborted # by default if the 'rm' program in use doesn't match our expectations; the # user can still override this though. if rm -f && rm -fr && rm -rf; then : OK; else cat >&2 <<'END' Oops! Your 'rm' program seems unable to run without file operands specified on the command line, even when the '-f' option is present. This is contrary to the behaviour of most rm programs out there, and not conforming with the upcoming POSIX standard: Please tell bug-automake@gnu.org about your system, including the value of your $PATH and any error possibly output before this message. This can help us improve future automake versions. END if test x"$ACCEPT_INFERIOR_RM_PROGRAM" = x"yes"; then echo 'Configuration will proceed anyway, since you have set the' >&2 echo 'ACCEPT_INFERIOR_RM_PROGRAM variable to "yes"' >&2 echo >&2 else cat >&2 <<'END' Aborting the configuration process, to ensure you take notice of the issue. You can download and install GNU coreutils to get an 'rm' implementation that behaves properly: . If you want to complete the configuration process using your problematic 'rm' anyway, export the environment variable ACCEPT_INFERIOR_RM_PROGRAM to "yes", and re-run configure. END as_fn_error $? "Your 'rm' program is bad, sorry." "$LINENO" 5 fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 $as_echo_n "checking for a sed that does not truncate output... " >&6; } if ${ac_cv_path_SED+:} false; then : $as_echo_n "(cached) " >&6 else ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ for ac_i in 1 2 3 4 5 6 7; do ac_script="$ac_script$as_nl$ac_script" done echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed { ac_script=; unset ac_script;} if test -z "$SED"; then ac_path_SED_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_SED" || continue # Check for GNU ac_path_SED and select it if it is found. # Check for GNU $ac_path_SED case `"$ac_path_SED" --version 2>&1` in *GNU*) ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo '' >> "conftest.nl" "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_SED_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_SED="$ac_path_SED" ac_path_SED_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_SED_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_SED"; then as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 fi else ac_cv_path_SED=$SED fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 $as_echo "$ac_cv_path_SED" >&6; } SED="$ac_cv_path_SED" rm -f conftest.sed { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether NLS is requested" >&5 $as_echo_n "checking whether NLS is requested... " >&6; } # Check whether --enable-nls was given. if test "${enable_nls+set}" = set; then : enableval=$enable_nls; USE_NLS=$enableval else USE_NLS=yes fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_NLS" >&5 $as_echo "$USE_NLS" >&6; } GETTEXT_MACRO_VERSION=0.19 # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which # contains only /bin. Note that ksh looks also at the FPATH variable, # so we have to set that as well for the test. PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "msgfmt", so it can be a program name with args. set dummy msgfmt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_MSGFMT+:} false; then : $as_echo_n "(cached) " >&6 else case "$MSGFMT" in [\\/]* | ?:[\\/]*) ac_cv_path_MSGFMT="$MSGFMT" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --statistics /dev/null >&5 2>&1 && (if $ac_dir/$ac_word --statistics /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi); then ac_cv_path_MSGFMT="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_MSGFMT" && ac_cv_path_MSGFMT=":" ;; esac fi MSGFMT="$ac_cv_path_MSGFMT" if test "$MSGFMT" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MSGFMT" >&5 $as_echo "$MSGFMT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi # Extract the first word of "gmsgfmt", so it can be a program name with args. set dummy gmsgfmt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_GMSGFMT+:} false; then : $as_echo_n "(cached) " >&6 else case $GMSGFMT in [\\/]* | ?:[\\/]*) ac_cv_path_GMSGFMT="$GMSGFMT" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_GMSGFMT="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_GMSGFMT" && ac_cv_path_GMSGFMT="$MSGFMT" ;; esac fi GMSGFMT=$ac_cv_path_GMSGFMT if test -n "$GMSGFMT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GMSGFMT" >&5 $as_echo "$GMSGFMT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi case `$MSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) MSGFMT_015=: ;; *) MSGFMT_015=$MSGFMT ;; esac case `$GMSGFMT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) GMSGFMT_015=: ;; *) GMSGFMT_015=$GMSGFMT ;; esac # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which # contains only /bin. Note that ksh looks also at the FPATH variable, # so we have to set that as well for the test. PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "xgettext", so it can be a program name with args. set dummy xgettext; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_XGETTEXT+:} false; then : $as_echo_n "(cached) " >&6 else case "$XGETTEXT" in [\\/]* | ?:[\\/]*) ac_cv_path_XGETTEXT="$XGETTEXT" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null >&5 2>&1 && (if $ac_dir/$ac_word --omit-header --copyright-holder= --msgid-bugs-address= /dev/null 2>&1 >/dev/null | grep usage >/dev/null; then exit 1; else exit 0; fi); then ac_cv_path_XGETTEXT="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_XGETTEXT" && ac_cv_path_XGETTEXT=":" ;; esac fi XGETTEXT="$ac_cv_path_XGETTEXT" if test "$XGETTEXT" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $XGETTEXT" >&5 $as_echo "$XGETTEXT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f messages.po case `$XGETTEXT --version | sed 1q | sed -e 's,^[^0-9]*,,'` in '' | 0.[0-9] | 0.[0-9].* | 0.1[0-4] | 0.1[0-4].*) XGETTEXT_015=: ;; *) XGETTEXT_015=$XGETTEXT ;; esac # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which # contains only /bin. Note that ksh looks also at the FPATH variable, # so we have to set that as well for the test. PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi # Find out how to test for executable files. Don't use a zero-byte file, # as systems may use methods other than mode bits to determine executability. cat >conf$$.file <<_ASEOF #! /bin/sh exit 0 _ASEOF chmod +x conf$$.file if test -x conf$$.file >/dev/null 2>&1; then ac_executable_p="test -x" else ac_executable_p="test -f" fi rm -f conf$$.file # Extract the first word of "msgmerge", so it can be a program name with args. set dummy msgmerge; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_MSGMERGE+:} false; then : $as_echo_n "(cached) " >&6 else case "$MSGMERGE" in [\\/]* | ?:[\\/]*) ac_cv_path_MSGMERGE="$MSGMERGE" # Let the user override the test with a path. ;; *) ac_save_IFS="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$ac_save_IFS" test -z "$ac_dir" && ac_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if $ac_executable_p "$ac_dir/$ac_word$ac_exec_ext"; then echo "$as_me: trying $ac_dir/$ac_word..." >&5 if $ac_dir/$ac_word --update -q /dev/null /dev/null >&5 2>&1; then ac_cv_path_MSGMERGE="$ac_dir/$ac_word$ac_exec_ext" break 2 fi fi done done IFS="$ac_save_IFS" test -z "$ac_cv_path_MSGMERGE" && ac_cv_path_MSGMERGE=":" ;; esac fi MSGMERGE="$ac_cv_path_MSGMERGE" if test "$MSGMERGE" != ":"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MSGMERGE" >&5 $as_echo "$MSGMERGE" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$localedir" || localedir='${datadir}/locale' test -n "${XGETTEXT_EXTRA_OPTIONS+set}" || XGETTEXT_EXTRA_OPTIONS= ac_config_commands="$ac_config_commands po-directories" if test "X$prefix" = "XNONE"; then acl_final_prefix="$ac_default_prefix" else acl_final_prefix="$prefix" fi if test "X$exec_prefix" = "XNONE"; then acl_final_exec_prefix='${prefix}' else acl_final_exec_prefix="$exec_prefix" fi acl_save_prefix="$prefix" prefix="$acl_final_prefix" eval acl_final_exec_prefix=\"$acl_final_exec_prefix\" prefix="$acl_save_prefix" DEPDIR="${am__leading_dot}deps" ac_config_commands="$ac_config_commands depfiles" am_make=${MAKE-make} cat > confinc << 'END' am__doit: @echo this is the am__doit target .PHONY: am__doit END # If we don't find an include directive, just comment out the code. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for style of include used by $am_make" >&5 $as_echo_n "checking for style of include used by $am_make... " >&6; } am__include="#" am__quote= _am_result=none # First try GNU make style include. echo "include confinc" > confmf # Ignore all kinds of additional output from 'make'. case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=include am__quote= _am_result=GNU ;; esac # Now try BSD make style include. if test "$am__include" = "#"; then echo '.include "confinc"' > confmf case `$am_make -s -f confmf 2> /dev/null` in #( *the\ am__doit\ target*) am__include=.include am__quote="\"" _am_result=BSD ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $_am_result" >&5 $as_echo "$_am_result" >&6; } rm -f confinc confmf # Check whether --enable-dependency-tracking was given. if test "${enable_dependency_tracking+set}" = set; then : enableval=$enable_dependency_tracking; fi if test "x$enable_dependency_tracking" != xno; then am_depcomp="$ac_aux_dir/depcomp" AMDEPBACKSLASH='\' am__nodep='_no' fi if test "x$enable_dependency_tracking" != xno; then AMDEP_TRUE= AMDEP_FALSE='#' else AMDEP_TRUE='#' AMDEP_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "no acceptable C compiler found in \$PATH See \`config.log' for more details" "$LINENO" 5; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files a.out a.out.dSYM a.exe b.out" # Try to create an executable without -o first, disregard a.out. # It will help us diagnose broken compilers, and finding out an intuition # of exeext. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler works" >&5 $as_echo_n "checking whether the C compiler works... " >&6; } ac_link_default=`$as_echo "$ac_link" | sed 's/ -o *conftest[^ ]*//'` # The possible output files: ac_files="a.out conftest.exe conftest a.exe a_out.exe b.out conftest.*" ac_rmfiles= for ac_file in $ac_files do case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; * ) ac_rmfiles="$ac_rmfiles $ac_file";; esac done rm -f $ac_rmfiles if { { ac_try="$ac_link_default" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link_default") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # Autoconf-2.13 could set the ac_cv_exeext variable to `no'. # So ignore a value of `no', otherwise this would lead to `EXEEXT = no' # in a Makefile. We should not override ac_cv_exeext if it was cached, # so that the user can short-circuit this test for compilers unknown to # Autoconf. for ac_file in $ac_files '' do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; [ab].out ) # We found the default executable, but exeext='' is most # certainly right. break;; *.* ) if test "${ac_cv_exeext+set}" = set && test "$ac_cv_exeext" != no; then :; else ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` fi # We set ac_cv_exeext here because the later test for it is not # safe: cross compilers may not add the suffix if given an `-o' # argument, so we may need to know it at that point already. # Even if this section looks crufty: it has the advantage of # actually working. break;; * ) break;; esac done test "$ac_cv_exeext" = no && ac_cv_exeext= else ac_file='' fi if test -z "$ac_file"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "C compiler cannot create executables See \`config.log' for more details" "$LINENO" 5; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler default output file name" >&5 $as_echo_n "checking for C compiler default output file name... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_file" >&5 $as_echo "$ac_file" >&6; } ac_exeext=$ac_cv_exeext rm -f -r a.out a.out.dSYM a.exe conftest$ac_cv_exeext b.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of executables" >&5 $as_echo_n "checking for suffix of executables... " >&6; } if { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : # If both `conftest.exe' and `conftest' are `present' (well, observable) # catch `conftest.exe'. For instance with Cygwin, `ls conftest' will # work properly (i.e., refer to `conftest.exe'), while it won't with # `rm'. for ac_file in conftest.exe conftest conftest.*; do test -f "$ac_file" || continue case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM | *.o | *.obj ) ;; *.* ) ac_cv_exeext=`expr "$ac_file" : '[^.]*\(\..*\)'` break;; * ) break;; esac done else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of executables: cannot compile and link See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest conftest$ac_cv_exeext { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_exeext" >&5 $as_echo "$ac_cv_exeext" >&6; } rm -f conftest.$ac_ext EXEEXT=$ac_cv_exeext ac_exeext=$EXEEXT cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { FILE *f = fopen ("conftest.out", "w"); return ferror (f) || fclose (f) != 0; ; return 0; } _ACEOF ac_clean_files="$ac_clean_files conftest.out" # Check that the compiler produces executables we can run. If not, either # the compiler is broken, or we cross compile. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are cross compiling" >&5 $as_echo_n "checking whether we are cross compiling... " >&6; } if test "$cross_compiling" != yes; then { { ac_try="$ac_link" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_link") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if { ac_try='./conftest$ac_cv_exeext' { { case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_try") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; }; then cross_compiling=no else if test "$cross_compiling" = maybe; then cross_compiling=yes else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run C compiled programs. If you meant to cross compile, use \`--host'. See \`config.log' for more details" "$LINENO" 5; } fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $cross_compiling" >&5 $as_echo "$cross_compiling" >&6; } rm -f conftest.$ac_ext conftest$ac_cv_exeext conftest.out ac_clean_files=$ac_clean_files_save { $as_echo "$as_me:${as_lineno-$LINENO}: checking for suffix of object files" >&5 $as_echo_n "checking for suffix of object files... " >&6; } if ${ac_cv_objext+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF rm -f conftest.o conftest.obj if { { ac_try="$ac_compile" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compile") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then : for ac_file in conftest.o conftest.obj conftest.*; do test -f "$ac_file" || continue; case $ac_file in *.$ac_ext | *.xcoff | *.tds | *.d | *.pdb | *.xSYM | *.bb | *.bbg | *.map | *.inf | *.dSYM ) ;; *) ac_cv_objext=`expr "$ac_file" : '.*\.\(.*\)'` break;; esac done else $as_echo "$as_me: failed program was:" >&5 sed 's/^/| /' conftest.$ac_ext >&5 { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot compute suffix of object files: cannot compile See \`config.log' for more details" "$LINENO" 5; } fi rm -f conftest.$ac_cv_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_objext" >&5 $as_echo "$ac_cv_objext" >&6; } OBJEXT=$ac_cv_objext ac_objext=$OBJEXT { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if ${ac_cv_c_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if ${ac_cv_prog_cc_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if ${ac_cv_prog_cc_c89+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 $as_echo_n "checking whether $CC understands -c and -o together... " >&6; } if ${am_cv_prog_cc_c_o+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF # Make sure it works both with $CC and with simple cc. # Following AC_PROG_CC_C_O, we do the test twice because some # compilers refuse to overwrite an existing .o file with -o, # though they will create one. am_cv_prog_cc_c_o=yes for am_i in 1 2; do if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } \ && test -f conftest2.$ac_objext; then : OK else am_cv_prog_cc_c_o=no break fi done rm -f core conftest* unset am_i fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 $as_echo "$am_cv_prog_cc_c_o" >&6; } if test "$am_cv_prog_cc_c_o" != yes; then # Losing compiler, so override with the script. # FIXME: It is wrong to rewrite CC. # But if we don't then we get into trouble of one sort or another. # A longer-term fix would be to have automake use am__CC in this case, # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" CC="$am_aux_dir/compile $CC" fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CC_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi # Make sure we can run config.sub. $SHELL "$ac_aux_dir/config.sub" sun4 >/dev/null 2>&1 || as_fn_error $? "cannot run $SHELL $ac_aux_dir/config.sub" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking build system type" >&5 $as_echo_n "checking build system type... " >&6; } if ${ac_cv_build+:} false; then : $as_echo_n "(cached) " >&6 else ac_build_alias=$build_alias test "x$ac_build_alias" = x && ac_build_alias=`$SHELL "$ac_aux_dir/config.guess"` test "x$ac_build_alias" = x && as_fn_error $? "cannot guess build type; you must specify one" "$LINENO" 5 ac_cv_build=`$SHELL "$ac_aux_dir/config.sub" $ac_build_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $ac_build_alias failed" "$LINENO" 5 fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_build" >&5 $as_echo "$ac_cv_build" >&6; } case $ac_cv_build in *-*-*) ;; *) as_fn_error $? "invalid value of canonical build" "$LINENO" 5;; esac build=$ac_cv_build ac_save_IFS=$IFS; IFS='-' set x $ac_cv_build shift build_cpu=$1 build_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: build_os=$* IFS=$ac_save_IFS case $build_os in *\ *) build_os=`echo "$build_os" | sed 's/ /-/g'`;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking host system type" >&5 $as_echo_n "checking host system type... " >&6; } if ${ac_cv_host+:} false; then : $as_echo_n "(cached) " >&6 else if test "x$host_alias" = x; then ac_cv_host=$ac_cv_build else ac_cv_host=`$SHELL "$ac_aux_dir/config.sub" $host_alias` || as_fn_error $? "$SHELL $ac_aux_dir/config.sub $host_alias failed" "$LINENO" 5 fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_host" >&5 $as_echo "$ac_cv_host" >&6; } case $ac_cv_host in *-*-*) ;; *) as_fn_error $? "invalid value of canonical host" "$LINENO" 5;; esac host=$ac_cv_host ac_save_IFS=$IFS; IFS='-' set x $ac_cv_host shift host_cpu=$1 host_vendor=$2 shift; shift # Remember, the first character of IFS is used to create $*, # except with old shells: host_os=$* IFS=$ac_save_IFS case $host_os in *\ *) host_os=`echo "$host_os" | sed 's/ /-/g'`;; esac # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test "$withval" = no || with_gnu_ld=yes else with_gnu_ld=no fi # Prepare PATH_SEPARATOR. # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then # Determine PATH_SEPARATOR by trying to find /bin/sh in a PATH which # contains only /bin. Note that ksh looks also at the FPATH variable, # so we have to set that as well for the test. PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 \ || PATH_SEPARATOR=';' } fi ac_prog=ld if test "$GCC" = yes; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`echo "$ac_prog"| sed 's%\\\\%/%g'` while echo "$ac_prog" | grep "$re_direlt" > /dev/null 2>&1; do ac_prog=`echo $ac_prog| sed "s%$re_direlt%/%"` done test -z "$LD" && LD="$ac_prog" ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test "$with_gnu_ld" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${acl_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then acl_save_ifs="$IFS"; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS="$acl_save_ifs" test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then acl_cv_path_LD="$ac_dir/$ac_prog" # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$acl_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${acl_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$acl_cv_prog_gnu_ld" >&6; } with_gnu_ld=$acl_cv_prog_gnu_ld { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shared library run path origin" >&5 $as_echo_n "checking for shared library run path origin... " >&6; } if ${acl_cv_rpath+:} false; then : $as_echo_n "(cached) " >&6 else CC="$CC" GCC="$GCC" LDFLAGS="$LDFLAGS" LD="$LD" with_gnu_ld="$with_gnu_ld" \ ${CONFIG_SHELL-/bin/sh} "$ac_aux_dir/config.rpath" "$host" > conftest.sh . ./conftest.sh rm -f ./conftest.sh acl_cv_rpath=done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $acl_cv_rpath" >&5 $as_echo "$acl_cv_rpath" >&6; } wl="$acl_cv_wl" acl_libext="$acl_cv_libext" acl_shlibext="$acl_cv_shlibext" acl_libname_spec="$acl_cv_libname_spec" acl_library_names_spec="$acl_cv_library_names_spec" acl_hardcode_libdir_flag_spec="$acl_cv_hardcode_libdir_flag_spec" acl_hardcode_libdir_separator="$acl_cv_hardcode_libdir_separator" acl_hardcode_direct="$acl_cv_hardcode_direct" acl_hardcode_minus_L="$acl_cv_hardcode_minus_L" # Check whether --enable-rpath was given. if test "${enable_rpath+set}" = set; then : enableval=$enable_rpath; : else enable_rpath=yes fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if ${ac_cv_cxx_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if ${ac_cv_prog_cxx_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CXX_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 $as_echo_n "checking how to run the C++ preprocessor... " >&6; } if test -z "$CXXCPP"; then if ${ac_cv_prog_CXXCPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" do ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CXXCPP=$CXXCPP fi CXXCPP=$ac_cv_prog_CXXCPP else ac_cv_prog_CXXCPP=$CXXCPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 $as_echo "$CXXCPP" >&6; } ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking for grep that handles long lines and -e" >&5 $as_echo_n "checking for grep that handles long lines and -e... " >&6; } if ${ac_cv_path_GREP+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$GREP"; then ac_path_GREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in grep ggrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_GREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_GREP" || continue # Check for GNU ac_path_GREP and select it if it is found. # Check for GNU $ac_path_GREP case `"$ac_path_GREP" --version 2>&1` in *GNU*) ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'GREP' >> "conftest.nl" "$ac_path_GREP" -e 'GREP$' -e '-(cannot match)-' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_GREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_GREP="$ac_path_GREP" ac_path_GREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_GREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_GREP"; then as_fn_error $? "no acceptable grep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_GREP=$GREP fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_GREP" >&5 $as_echo "$ac_cv_path_GREP" >&6; } GREP="$ac_cv_path_GREP" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for egrep" >&5 $as_echo_n "checking for egrep... " >&6; } if ${ac_cv_path_EGREP+:} false; then : $as_echo_n "(cached) " >&6 else if echo a | $GREP -E '(a|b)' >/dev/null 2>&1 then ac_cv_path_EGREP="$GREP -E" else if test -z "$EGREP"; then ac_path_EGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in egrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_EGREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_EGREP" || continue # Check for GNU ac_path_EGREP and select it if it is found. # Check for GNU $ac_path_EGREP case `"$ac_path_EGREP" --version 2>&1` in *GNU*) ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'EGREP' >> "conftest.nl" "$ac_path_EGREP" 'EGREP$' < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_EGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_EGREP="$ac_path_EGREP" ac_path_EGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_EGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_EGREP"; then as_fn_error $? "no acceptable egrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_EGREP=$EGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_EGREP" >&5 $as_echo "$ac_cv_path_EGREP" >&6; } EGREP="$ac_cv_path_EGREP" acl_libdirstem=lib acl_libdirstem2= case "$host_os" in solaris*) { $as_echo "$as_me:${as_lineno-$LINENO}: checking for 64-bit host" >&5 $as_echo_n "checking for 64-bit host... " >&6; } if ${gl_cv_solaris_64bit+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef _LP64 sixtyfour bits #endif _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "sixtyfour bits" >/dev/null 2>&1; then : gl_cv_solaris_64bit=yes else gl_cv_solaris_64bit=no fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gl_cv_solaris_64bit" >&5 $as_echo "$gl_cv_solaris_64bit" >&6; } if test $gl_cv_solaris_64bit = yes; then acl_libdirstem=lib/64 case "$host_cpu" in sparc*) acl_libdirstem2=lib/sparcv9 ;; i*86 | x86_64) acl_libdirstem2=lib/amd64 ;; esac fi ;; *) searchpath=`(LC_ALL=C $CC -print-search-dirs) 2>/dev/null | sed -n -e 's,^libraries: ,,p' | sed -e 's,^=,,'` if test -n "$searchpath"; then acl_save_IFS="${IFS= }"; IFS=":" for searchdir in $searchpath; do if test -d "$searchdir"; then case "$searchdir" in */lib64/ | */lib64 ) acl_libdirstem=lib64 ;; */../ | */.. ) # Better ignore directories of this form. They are misleading. ;; *) searchdir=`cd "$searchdir" && pwd` case "$searchdir" in */lib64 ) acl_libdirstem=lib64 ;; esac ;; esac fi done IFS="$acl_save_IFS" fi ;; esac test -n "$acl_libdirstem2" || acl_libdirstem2="$acl_libdirstem" use_additional=yes acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" # Check whether --with-libiconv-prefix was given. if test "${with_libiconv_prefix+set}" = set; then : withval=$with_libiconv_prefix; if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" if test "$acl_libdirstem2" != "$acl_libdirstem" \ && ! test -d "$withval/$acl_libdirstem"; then additional_libdir="$withval/$acl_libdirstem2" fi fi fi fi LIBICONV= LTLIBICONV= INCICONV= LIBICONV_PREFIX= HAVE_LIBICONV= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='iconv ' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./+-|ABCDEFGHIJKLMNOPQRSTUVWXYZ____|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIBICONV="${LIBICONV}${LIBICONV:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }$value" else : fi else found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then if test "$enable_rpath" = no \ || test "X$found_dir" = "X/usr/$acl_libdirstem" \ || test "X$found_dir" = "X/usr/$acl_libdirstem2"; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi if test "$acl_hardcode_direct" = yes; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else haveit= for x in $LDFLAGS $LIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIBICONV="${LIBICONV}${LIBICONV:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_so" else LIBICONV="${LIBICONV}${LIBICONV:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then LIBICONV="${LIBICONV}${LIBICONV:+ }$found_a" else LIBICONV="${LIBICONV}${LIBICONV:+ }-L$found_dir -l$name" fi fi additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` if test "$name" = 'iconv'; then LIBICONV_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; */$acl_libdirstem2 | */$acl_libdirstem2/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'` if test "$name" = 'iconv'; then LIBICONV_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INCICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then INCICONV="${INCICONV}${INCICONV:+ }-I$additional_includedir" fi fi fi fi fi if test -n "$found_la"; then save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` if test "X$additional_libdir" != "X/usr/$acl_libdirstem" \ && test "X$additional_libdir" != "X/usr/$acl_libdirstem2"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem" \ || test "X$additional_libdir" = "X/usr/local/$acl_libdirstem2"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LIBICONV="${LIBICONV}${LIBICONV:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIBICONV; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) LIBICONV="${LIBICONV}${LIBICONV:+ }$dep" LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }$dep" ;; esac done fi else LIBICONV="${LIBICONV}${LIBICONV:+ }-l$name" LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBICONV="${LIBICONV}${LIBICONV:+ }$flag" else for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBICONV="${LIBICONV}${LIBICONV:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then for found_dir in $ltrpathdirs; do LTLIBICONV="${LTLIBICONV}${LTLIBICONV:+ }-R$found_dir" done fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CFPreferencesCopyAppValue" >&5 $as_echo_n "checking for CFPreferencesCopyAppValue... " >&6; } if ${gt_cv_func_CFPreferencesCopyAppValue+:} false; then : $as_echo_n "(cached) " >&6 else gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { CFPreferencesCopyAppValue(NULL, NULL) ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : gt_cv_func_CFPreferencesCopyAppValue=yes else gt_cv_func_CFPreferencesCopyAppValue=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$gt_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_cv_func_CFPreferencesCopyAppValue" >&5 $as_echo "$gt_cv_func_CFPreferencesCopyAppValue" >&6; } if test $gt_cv_func_CFPreferencesCopyAppValue = yes; then $as_echo "#define HAVE_CFPREFERENCESCOPYAPPVALUE 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for CFLocaleCopyCurrent" >&5 $as_echo_n "checking for CFLocaleCopyCurrent... " >&6; } if ${gt_cv_func_CFLocaleCopyCurrent+:} false; then : $as_echo_n "(cached) " >&6 else gt_save_LIBS="$LIBS" LIBS="$LIBS -Wl,-framework -Wl,CoreFoundation" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { CFLocaleCopyCurrent(); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : gt_cv_func_CFLocaleCopyCurrent=yes else gt_cv_func_CFLocaleCopyCurrent=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$gt_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_cv_func_CFLocaleCopyCurrent" >&5 $as_echo "$gt_cv_func_CFLocaleCopyCurrent" >&6; } if test $gt_cv_func_CFLocaleCopyCurrent = yes; then $as_echo "#define HAVE_CFLOCALECOPYCURRENT 1" >>confdefs.h fi INTL_MACOSX_LIBS= if test $gt_cv_func_CFPreferencesCopyAppValue = yes || test $gt_cv_func_CFLocaleCopyCurrent = yes; then INTL_MACOSX_LIBS="-Wl,-framework -Wl,CoreFoundation" fi LIBINTL= LTLIBINTL= POSUB= case " $gt_needs " in *" need-formatstring-macros "*) gt_api_version=3 ;; *" need-ngettext "*) gt_api_version=2 ;; *) gt_api_version=1 ;; esac gt_func_gnugettext_libc="gt_cv_func_gnugettext${gt_api_version}_libc" gt_func_gnugettext_libintl="gt_cv_func_gnugettext${gt_api_version}_libintl" if test "$USE_NLS" = "yes"; then gt_use_preinstalled_gnugettext=no if test $gt_api_version -ge 3; then gt_revision_test_code=' #ifndef __GNU_GETTEXT_SUPPORTED_REVISION #define __GNU_GETTEXT_SUPPORTED_REVISION(major) ((major) == 0 ? 0 : -1) #endif typedef int array [2 * (__GNU_GETTEXT_SUPPORTED_REVISION(0) >= 1) - 1]; ' else gt_revision_test_code= fi if test $gt_api_version -ge 2; then gt_expression_test_code=' + * ngettext ("", "", 0)' else gt_expression_test_code= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU gettext in libc" >&5 $as_echo_n "checking for GNU gettext in libc... " >&6; } if eval \${$gt_func_gnugettext_libc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifndef __GNU_GETTEXT_SUPPORTED_REVISION extern int _nl_msg_cat_cntr; extern int *_nl_domain_bindings; #define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_domain_bindings) #else #define __GNU_GETTEXT_SYMBOL_EXPRESSION 0 #endif $gt_revision_test_code int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : eval "$gt_func_gnugettext_libc=yes" else eval "$gt_func_gnugettext_libc=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi eval ac_res=\$$gt_func_gnugettext_libc { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" != "yes"; }; then am_save_CPPFLAGS="$CPPFLAGS" for element in $INCICONV; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv" >&5 $as_echo_n "checking for iconv... " >&6; } if ${am_cv_func_iconv+:} false; then : $as_echo_n "(cached) " >&6 else am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_lib_iconv=yes am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$am_save_LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv" >&5 $as_echo "$am_cv_func_iconv" >&6; } if test "$am_cv_func_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working iconv" >&5 $as_echo_n "checking for working iconv... " >&6; } if ${am_cv_func_iconv_works+:} false; then : $as_echo_n "(cached) " >&6 else am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi am_cv_func_iconv_works=no for ac_iconv_const in '' 'const'; do if test "$cross_compiling" = yes; then : case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #ifndef ICONV_CONST # define ICONV_CONST $ac_iconv_const #endif int main () { int result = 0; /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 1; iconv_close (cd_utf8_to_88591); } } /* Test against Solaris 10 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646"); if (cd_ascii_to_88591 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\263"; char buf[10]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_ascii_to_88591, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 2; iconv_close (cd_ascii_to_88591); } } /* Test against AIX 6.1..7.1 bug: Buffer overrun. */ { iconv_t cd_88591_to_utf8 = iconv_open ("UTF-8", "ISO-8859-1"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\304"; static char buf[2] = { (char)0xDE, (char)0xAD }; ICONV_CONST char *inptr = input; size_t inbytesleft = 1; char *outptr = buf; size_t outbytesleft = 1; size_t res = iconv (cd_88591_to_utf8, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res != (size_t)(-1) || outptr - buf > 1 || buf[1] != (char)0xAD) result |= 4; iconv_close (cd_88591_to_utf8); } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) result |= 8; iconv_close (cd_88591_to_utf8); } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) result |= 16; return result; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : am_cv_func_iconv_works=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi test "$am_cv_func_iconv_works" = no || break done LIBS="$am_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv_works" >&5 $as_echo "$am_cv_func_iconv_works" >&6; } case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then $as_echo "#define HAVE_ICONV 1" >>confdefs.h fi if test "$am_cv_lib_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libiconv" >&5 $as_echo_n "checking how to link with libiconv... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBICONV" >&5 $as_echo "$LIBICONV" >&6; } else CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi use_additional=yes acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" # Check whether --with-libintl-prefix was given. if test "${with_libintl_prefix+set}" = set; then : withval=$with_libintl_prefix; if test "X$withval" = "Xno"; then use_additional=no else if test "X$withval" = "X"; then acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval additional_includedir=\"$includedir\" eval additional_libdir=\"$libdir\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" else additional_includedir="$withval/include" additional_libdir="$withval/$acl_libdirstem" if test "$acl_libdirstem2" != "$acl_libdirstem" \ && ! test -d "$withval/$acl_libdirstem"; then additional_libdir="$withval/$acl_libdirstem2" fi fi fi fi LIBINTL= LTLIBINTL= INCINTL= LIBINTL_PREFIX= HAVE_LIBINTL= rpathdirs= ltrpathdirs= names_already_handled= names_next_round='intl ' while test -n "$names_next_round"; do names_this_round="$names_next_round" names_next_round= for name in $names_this_round; do already_handled= for n in $names_already_handled; do if test "$n" = "$name"; then already_handled=yes break fi done if test -z "$already_handled"; then names_already_handled="$names_already_handled $name" uppername=`echo "$name" | sed -e 'y|abcdefghijklmnopqrstuvwxyz./+-|ABCDEFGHIJKLMNOPQRSTUVWXYZ____|'` eval value=\"\$HAVE_LIB$uppername\" if test -n "$value"; then if test "$value" = yes; then eval value=\"\$LIB$uppername\" test -z "$value" || LIBINTL="${LIBINTL}${LIBINTL:+ }$value" eval value=\"\$LTLIB$uppername\" test -z "$value" || LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }$value" else : fi else found_dir= found_la= found_so= found_a= eval libname=\"$acl_libname_spec\" # typically: libname=lib$name if test -n "$acl_shlibext"; then shrext=".$acl_shlibext" # typically: shrext=.so else shrext= fi if test $use_additional = yes; then dir="$additional_libdir" if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi fi if test "X$found_dir" = "X"; then for x in $LDFLAGS $LTLIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" case "$x" in -L*) dir=`echo "X$x" | sed -e 's/^X-L//'` if test -n "$acl_shlibext"; then if test -f "$dir/$libname$shrext"; then found_dir="$dir" found_so="$dir/$libname$shrext" else if test "$acl_library_names_spec" = '$libname$shrext$versuffix'; then ver=`(cd "$dir" && \ for f in "$libname$shrext".*; do echo "$f"; done \ | sed -e "s,^$libname$shrext\\\\.,," \ | sort -t '.' -n -r -k1,1 -k2,2 -k3,3 -k4,4 -k5,5 \ | sed 1q ) 2>/dev/null` if test -n "$ver" && test -f "$dir/$libname$shrext.$ver"; then found_dir="$dir" found_so="$dir/$libname$shrext.$ver" fi else eval library_names=\"$acl_library_names_spec\" for f in $library_names; do if test -f "$dir/$f"; then found_dir="$dir" found_so="$dir/$f" break fi done fi fi fi if test "X$found_dir" = "X"; then if test -f "$dir/$libname.$acl_libext"; then found_dir="$dir" found_a="$dir/$libname.$acl_libext" fi fi if test "X$found_dir" != "X"; then if test -f "$dir/$libname.la"; then found_la="$dir/$libname.la" fi fi ;; esac if test "X$found_dir" != "X"; then break fi done fi if test "X$found_dir" != "X"; then LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-L$found_dir -l$name" if test "X$found_so" != "X"; then if test "$enable_rpath" = no \ || test "X$found_dir" = "X/usr/$acl_libdirstem" \ || test "X$found_dir" = "X/usr/$acl_libdirstem2"; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else haveit= for x in $ltrpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $found_dir" fi if test "$acl_hardcode_direct" = yes; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else if test -n "$acl_hardcode_libdir_flag_spec" && test "$acl_hardcode_minus_L" = no; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" haveit= for x in $rpathdirs; do if test "X$x" = "X$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $found_dir" fi else haveit= for x in $LDFLAGS $LIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$found_dir"; then haveit=yes break fi done if test -z "$haveit"; then LIBINTL="${LIBINTL}${LIBINTL:+ }-L$found_dir" fi if test "$acl_hardcode_minus_L" != no; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_so" else LIBINTL="${LIBINTL}${LIBINTL:+ }-l$name" fi fi fi fi else if test "X$found_a" != "X"; then LIBINTL="${LIBINTL}${LIBINTL:+ }$found_a" else LIBINTL="${LIBINTL}${LIBINTL:+ }-L$found_dir -l$name" fi fi additional_includedir= case "$found_dir" in */$acl_libdirstem | */$acl_libdirstem/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem/"'*$,,'` if test "$name" = 'intl'; then LIBINTL_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; */$acl_libdirstem2 | */$acl_libdirstem2/) basedir=`echo "X$found_dir" | sed -e 's,^X,,' -e "s,/$acl_libdirstem2/"'*$,,'` if test "$name" = 'intl'; then LIBINTL_PREFIX="$basedir" fi additional_includedir="$basedir/include" ;; esac if test "X$additional_includedir" != "X"; then if test "X$additional_includedir" != "X/usr/include"; then haveit= if test "X$additional_includedir" = "X/usr/local/include"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then for x in $CPPFLAGS $INCINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-I$additional_includedir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_includedir"; then INCINTL="${INCINTL}${INCINTL:+ }-I$additional_includedir" fi fi fi fi fi if test -n "$found_la"; then save_libdir="$libdir" case "$found_la" in */* | *\\*) . "$found_la" ;; *) . "./$found_la" ;; esac libdir="$save_libdir" for dep in $dependency_libs; do case "$dep" in -L*) additional_libdir=`echo "X$dep" | sed -e 's/^X-L//'` if test "X$additional_libdir" != "X/usr/$acl_libdirstem" \ && test "X$additional_libdir" != "X/usr/$acl_libdirstem2"; then haveit= if test "X$additional_libdir" = "X/usr/local/$acl_libdirstem" \ || test "X$additional_libdir" = "X/usr/local/$acl_libdirstem2"; then if test -n "$GCC"; then case $host_os in linux* | gnu* | k*bsd*-gnu) haveit=yes;; esac fi fi if test -z "$haveit"; then haveit= for x in $LDFLAGS $LIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LIBINTL="${LIBINTL}${LIBINTL:+ }-L$additional_libdir" fi fi haveit= for x in $LDFLAGS $LTLIBINTL; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X-L$additional_libdir"; then haveit=yes break fi done if test -z "$haveit"; then if test -d "$additional_libdir"; then LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-L$additional_libdir" fi fi fi fi ;; -R*) dir=`echo "X$dep" | sed -e 's/^X-R//'` if test "$enable_rpath" != no; then haveit= for x in $rpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then rpathdirs="$rpathdirs $dir" fi haveit= for x in $ltrpathdirs; do if test "X$x" = "X$dir"; then haveit=yes break fi done if test -z "$haveit"; then ltrpathdirs="$ltrpathdirs $dir" fi fi ;; -l*) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's/^X-l//'` ;; *.la) names_next_round="$names_next_round "`echo "X$dep" | sed -e 's,^X.*/,,' -e 's,^lib,,' -e 's,\.la$,,'` ;; *) LIBINTL="${LIBINTL}${LIBINTL:+ }$dep" LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }$dep" ;; esac done fi else LIBINTL="${LIBINTL}${LIBINTL:+ }-l$name" LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-l$name" fi fi fi done done if test "X$rpathdirs" != "X"; then if test -n "$acl_hardcode_libdir_separator"; then alldirs= for found_dir in $rpathdirs; do alldirs="${alldirs}${alldirs:+$acl_hardcode_libdir_separator}$found_dir" done acl_save_libdir="$libdir" libdir="$alldirs" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBINTL="${LIBINTL}${LIBINTL:+ }$flag" else for found_dir in $rpathdirs; do acl_save_libdir="$libdir" libdir="$found_dir" eval flag=\"$acl_hardcode_libdir_flag_spec\" libdir="$acl_save_libdir" LIBINTL="${LIBINTL}${LIBINTL:+ }$flag" done fi fi if test "X$ltrpathdirs" != "X"; then for found_dir in $ltrpathdirs; do LTLIBINTL="${LTLIBINTL}${LTLIBINTL:+ }-R$found_dir" done fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU gettext in libintl" >&5 $as_echo_n "checking for GNU gettext in libintl... " >&6; } if eval \${$gt_func_gnugettext_libintl+:} false; then : $as_echo_n "(cached) " >&6 else gt_save_CPPFLAGS="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $INCINTL" gt_save_LIBS="$LIBS" LIBS="$LIBS $LIBINTL" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifndef __GNU_GETTEXT_SUPPORTED_REVISION extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); #define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_expand_alias ("")) #else #define __GNU_GETTEXT_SYMBOL_EXPRESSION 0 #endif $gt_revision_test_code int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : eval "$gt_func_gnugettext_libintl=yes" else eval "$gt_func_gnugettext_libintl=no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" != yes; } && test -n "$LIBICONV"; then LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #ifndef __GNU_GETTEXT_SUPPORTED_REVISION extern int _nl_msg_cat_cntr; extern #ifdef __cplusplus "C" #endif const char *_nl_expand_alias (const char *); #define __GNU_GETTEXT_SYMBOL_EXPRESSION (_nl_msg_cat_cntr + *_nl_expand_alias ("")) #else #define __GNU_GETTEXT_SYMBOL_EXPRESSION 0 #endif $gt_revision_test_code int main () { bindtextdomain ("", ""); return * gettext ("")$gt_expression_test_code + __GNU_GETTEXT_SYMBOL_EXPRESSION ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : LIBINTL="$LIBINTL $LIBICONV" LTLIBINTL="$LTLIBINTL $LTLIBICONV" eval "$gt_func_gnugettext_libintl=yes" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi CPPFLAGS="$gt_save_CPPFLAGS" LIBS="$gt_save_LIBS" fi eval ac_res=\$$gt_func_gnugettext_libintl { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } fi if { eval "gt_val=\$$gt_func_gnugettext_libc"; test "$gt_val" = "yes"; } \ || { { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; } \ && test "$PACKAGE" != gettext-runtime \ && test "$PACKAGE" != gettext-tools; }; then gt_use_preinstalled_gnugettext=yes else LIBINTL= LTLIBINTL= INCINTL= fi if test -n "$INTL_MACOSX_LIBS"; then if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then LIBINTL="$LIBINTL $INTL_MACOSX_LIBS" LTLIBINTL="$LTLIBINTL $INTL_MACOSX_LIBS" fi fi if test "$gt_use_preinstalled_gnugettext" = "yes" \ || test "$nls_cv_use_gnu_gettext" = "yes"; then $as_echo "#define ENABLE_NLS 1" >>confdefs.h else USE_NLS=no fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to use NLS" >&5 $as_echo_n "checking whether to use NLS... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $USE_NLS" >&5 $as_echo "$USE_NLS" >&6; } if test "$USE_NLS" = "yes"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking where the gettext function comes from" >&5 $as_echo_n "checking where the gettext function comes from... " >&6; } if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then gt_source="external libintl" else gt_source="libc" fi else gt_source="included intl directory" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $gt_source" >&5 $as_echo "$gt_source" >&6; } fi if test "$USE_NLS" = "yes"; then if test "$gt_use_preinstalled_gnugettext" = "yes"; then if { eval "gt_val=\$$gt_func_gnugettext_libintl"; test "$gt_val" = "yes"; }; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libintl" >&5 $as_echo_n "checking how to link with libintl... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBINTL" >&5 $as_echo "$LIBINTL" >&6; } for element in $INCINTL; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done fi $as_echo "#define HAVE_GETTEXT 1" >>confdefs.h $as_echo "#define HAVE_DCGETTEXT 1" >>confdefs.h fi POSUB=po fi INTLLIBS="$LIBINTL" XGETTEXT_EXTRA_OPTIONS='--keyword=dar_gettext' am_save_CPPFLAGS="$CPPFLAGS" for element in $INCICONV; do haveit= for x in $CPPFLAGS; do acl_save_prefix="$prefix" prefix="$acl_final_prefix" acl_save_exec_prefix="$exec_prefix" exec_prefix="$acl_final_exec_prefix" eval x=\"$x\" exec_prefix="$acl_save_exec_prefix" prefix="$acl_save_prefix" if test "X$x" = "X$element"; then haveit=yes break fi done if test -z "$haveit"; then CPPFLAGS="${CPPFLAGS}${CPPFLAGS:+ }$element" fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv" >&5 $as_echo_n "checking for iconv... " >&6; } if ${am_cv_func_iconv+:} false; then : $as_echo_n "(cached) " >&6 else am_cv_func_iconv="no, consider installing GNU libiconv" am_cv_lib_iconv=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test "$am_cv_func_iconv" != yes; then am_save_LIBS="$LIBS" LIBS="$LIBS $LIBICONV" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { iconv_t cd = iconv_open("",""); iconv(cd,NULL,NULL,NULL,NULL); iconv_close(cd); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : am_cv_lib_iconv=yes am_cv_func_iconv=yes fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS="$am_save_LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv" >&5 $as_echo "$am_cv_func_iconv" >&6; } if test "$am_cv_func_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working iconv" >&5 $as_echo_n "checking for working iconv... " >&6; } if ${am_cv_func_iconv_works+:} false; then : $as_echo_n "(cached) " >&6 else am_save_LIBS="$LIBS" if test $am_cv_lib_iconv = yes; then LIBS="$LIBS $LIBICONV" fi am_cv_func_iconv_works=no for ac_iconv_const in '' 'const'; do if test "$cross_compiling" = yes; then : case "$host_os" in aix* | hpux*) am_cv_func_iconv_works="guessing no" ;; *) am_cv_func_iconv_works="guessing yes" ;; esac else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #ifndef ICONV_CONST # define ICONV_CONST $ac_iconv_const #endif int main () { int result = 0; /* Test against AIX 5.1 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_utf8_to_88591 = iconv_open ("ISO8859-1", "UTF-8"); if (cd_utf8_to_88591 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\342\202\254"; /* EURO SIGN */ char buf[10]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_utf8_to_88591, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 1; iconv_close (cd_utf8_to_88591); } } /* Test against Solaris 10 bug: Failures are not distinguishable from successful returns. */ { iconv_t cd_ascii_to_88591 = iconv_open ("ISO8859-1", "646"); if (cd_ascii_to_88591 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\263"; char buf[10]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_ascii_to_88591, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res == 0) result |= 2; iconv_close (cd_ascii_to_88591); } } /* Test against AIX 6.1..7.1 bug: Buffer overrun. */ { iconv_t cd_88591_to_utf8 = iconv_open ("UTF-8", "ISO-8859-1"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\304"; static char buf[2] = { (char)0xDE, (char)0xAD }; ICONV_CONST char *inptr = input; size_t inbytesleft = 1; char *outptr = buf; size_t outbytesleft = 1; size_t res = iconv (cd_88591_to_utf8, &inptr, &inbytesleft, &outptr, &outbytesleft); if (res != (size_t)(-1) || outptr - buf > 1 || buf[1] != (char)0xAD) result |= 4; iconv_close (cd_88591_to_utf8); } } #if 0 /* This bug could be worked around by the caller. */ /* Test against HP-UX 11.11 bug: Positive return value instead of 0. */ { iconv_t cd_88591_to_utf8 = iconv_open ("utf8", "iso88591"); if (cd_88591_to_utf8 != (iconv_t)(-1)) { static ICONV_CONST char input[] = "\304rger mit b\366sen B\374bchen ohne Augenma\337"; char buf[50]; ICONV_CONST char *inptr = input; size_t inbytesleft = strlen (input); char *outptr = buf; size_t outbytesleft = sizeof (buf); size_t res = iconv (cd_88591_to_utf8, &inptr, &inbytesleft, &outptr, &outbytesleft); if ((int)res > 0) result |= 8; iconv_close (cd_88591_to_utf8); } } #endif /* Test against HP-UX 11.11 bug: No converter from EUC-JP to UTF-8 is provided. */ if (/* Try standardized names. */ iconv_open ("UTF-8", "EUC-JP") == (iconv_t)(-1) /* Try IRIX, OSF/1 names. */ && iconv_open ("UTF-8", "eucJP") == (iconv_t)(-1) /* Try AIX names. */ && iconv_open ("UTF-8", "IBM-eucJP") == (iconv_t)(-1) /* Try HP-UX names. */ && iconv_open ("utf8", "eucJP") == (iconv_t)(-1)) result |= 16; return result; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : am_cv_func_iconv_works=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi test "$am_cv_func_iconv_works" = no || break done LIBS="$am_save_LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_func_iconv_works" >&5 $as_echo "$am_cv_func_iconv_works" >&6; } case "$am_cv_func_iconv_works" in *no) am_func_iconv=no am_cv_lib_iconv=no ;; *) am_func_iconv=yes ;; esac else am_func_iconv=no am_cv_lib_iconv=no fi if test "$am_func_iconv" = yes; then $as_echo "#define HAVE_ICONV 1" >>confdefs.h fi if test "$am_cv_lib_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to link with libiconv" >&5 $as_echo_n "checking how to link with libiconv... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIBICONV" >&5 $as_echo "$LIBICONV" >&6; } else CPPFLAGS="$am_save_CPPFLAGS" LIBICONV= LTLIBICONV= fi if test "$am_cv_func_iconv" = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for iconv declaration" >&5 $as_echo_n "checking for iconv declaration... " >&6; } if ${am_cv_proto_iconv+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include extern #ifdef __cplusplus "C" #endif #if defined(__STDC__) || defined(_MSC_VER) || defined(__cplusplus) size_t iconv (iconv_t cd, char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft); #else size_t iconv(); #endif int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : am_cv_proto_iconv_arg1="" else am_cv_proto_iconv_arg1="const" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext am_cv_proto_iconv="extern size_t iconv (iconv_t cd, $am_cv_proto_iconv_arg1 char * *inbuf, size_t *inbytesleft, char * *outbuf, size_t *outbytesleft);" fi am_cv_proto_iconv=`echo "$am_cv_proto_iconv" | tr -s ' ' | sed -e 's/( /(/'` { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_proto_iconv" >&5 $as_echo " $am_cv_proto_iconv" >&6; } cat >>confdefs.h <<_ACEOF #define ICONV_CONST $am_cv_proto_iconv_arg1 _ACEOF fi #### # configure checks what is available from the operational system: # - it displays things on output for the user running the configure script has status information # - it sets some shell variable that are not used outside the configuration script # - it sets some shell variable that can be susbstitued in Makefile.in files (see AC_SUBST() and AC_CONFIG_FILES()) # also known as "output variables" # - it defines macros that get stored in config.h and used in source code (see AC_DEFINE()) # # header files: # header files are #included in source code if they HAVE_... macro has been defined in config.h # # libraries: # necessary library flags are stored in the "output variables" LIBS substituable shell variable and passed to Makefile.in # as well as other like LIBTHREADAR_LIBS/CFLAGS and LIBCURL_LIBSS/CFLAGS due to the use of PKG_CHECK_MODULE macro # that get substitued in Makefiles.in when the @LIBS@ form is met. # In addition, the AM_PATH_GPGME defines he GPGME_CFLAGS and GPGME_LIBS output variable # # have a specific variable for pkgconfig, setting the default value: pkgconfigdir=${libdir}/pkgconfig # Check whether --with-pkgconfigdir was given. if test "${with_pkgconfigdir+set}" = set; then : withval=$with_pkgconfigdir; if ! -z "$withval" ; then pkgconfigdir=$withval fi fi # Checks for programs. ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu if test -z "$CXX"; then if test -n "$CCC"; then CXX=$CCC else if test -n "$ac_tool_prefix"; then for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CXX"; then ac_cv_prog_CXX="$CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CXX="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CXX=$ac_cv_prog_CXX if test -n "$CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXX" >&5 $as_echo "$CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CXX" && break done fi if test -z "$CXX"; then ac_ct_CXX=$CXX for ac_prog in g++ c++ gpp aCC CC cxx cc++ cl.exe FCC KCC RCC xlC_r xlC do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CXX+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CXX"; then ac_cv_prog_ac_ct_CXX="$ac_ct_CXX" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CXX="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CXX=$ac_cv_prog_ac_ct_CXX if test -n "$ac_ct_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CXX" >&5 $as_echo "$ac_ct_CXX" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CXX" && break done if test "x$ac_ct_CXX" = x; then CXX="g++" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CXX=$ac_ct_CXX fi fi fi fi # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C++ compiler" >&5 $as_echo_n "checking whether we are using the GNU C++ compiler... " >&6; } if ${ac_cv_cxx_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_cxx_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_cxx_compiler_gnu" >&5 $as_echo "$ac_cv_cxx_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GXX=yes else GXX= fi ac_test_CXXFLAGS=${CXXFLAGS+set} ac_save_CXXFLAGS=$CXXFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CXX accepts -g" >&5 $as_echo_n "checking whether $CXX accepts -g... " >&6; } if ${ac_cv_prog_cxx_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_cxx_werror_flag=$ac_cxx_werror_flag ac_cxx_werror_flag=yes ac_cv_prog_cxx_g=no CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes else CXXFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : else ac_cxx_werror_flag=$ac_save_cxx_werror_flag CXXFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_prog_cxx_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cxx_werror_flag=$ac_save_cxx_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cxx_g" >&5 $as_echo "$ac_cv_prog_cxx_g" >&6; } if test "$ac_test_CXXFLAGS" = set; then CXXFLAGS=$ac_save_CXXFLAGS elif test $ac_cv_prog_cxx_g = yes; then if test "$GXX" = yes; then CXXFLAGS="-g -O2" else CXXFLAGS="-g" fi else if test "$GXX" = yes; then CXXFLAGS="-O2" else CXXFLAGS= fi fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CXX" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CXX_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CXX_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CXX_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CXX_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CXX_dependencies_compiler_type" >&5 $as_echo "$am_cv_CXX_dependencies_compiler_type" >&6; } CXXDEPMODE=depmode=$am_cv_CXX_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CXX_dependencies_compiler_type" = gcc3; then am__fastdepCXX_TRUE= am__fastdepCXX_FALSE='#' else am__fastdepCXX_TRUE='#' am__fastdepCXX_FALSE= fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}gcc", so it can be a program name with args. set dummy ${ac_tool_prefix}gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_CC"; then ac_ct_CC=$CC # Extract the first word of "gcc", so it can be a program name with args. set dummy gcc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="gcc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi else CC="$ac_cv_prog_CC" fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}cc", so it can be a program name with args. set dummy ${ac_tool_prefix}cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="${ac_tool_prefix}cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi fi if test -z "$CC"; then # Extract the first word of "cc", so it can be a program name with args. set dummy cc; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else ac_prog_rejected=no as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then if test "$as_dir/$ac_word$ac_exec_ext" = "/usr/ucb/cc"; then ac_prog_rejected=yes continue fi ac_cv_prog_CC="cc" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS if test $ac_prog_rejected = yes; then # We found a bogon in the path, so make sure we never use it. set dummy $ac_cv_prog_CC shift if test $# != 0; then # We chose a different compiler from the bogus one. # However, it has the same basename, so the bogon will be chosen # first if we set CC to just the basename; use the full file name. shift ac_cv_prog_CC="$as_dir/$ac_word${1+' '}$@" fi fi fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$CC"; then if test -n "$ac_tool_prefix"; then for ac_prog in cl.exe do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$CC"; then ac_cv_prog_CC="$CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_CC="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi CC=$ac_cv_prog_CC if test -n "$CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CC" >&5 $as_echo "$CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$CC" && break done fi if test -z "$CC"; then ac_ct_CC=$CC for ac_prog in cl.exe do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_CC+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_CC"; then ac_cv_prog_ac_ct_CC="$ac_ct_CC" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_CC="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_CC=$ac_cv_prog_ac_ct_CC if test -n "$ac_ct_CC"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_CC" >&5 $as_echo "$ac_ct_CC" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_CC" && break done if test "x$ac_ct_CC" = x; then CC="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac CC=$ac_ct_CC fi fi fi test -z "$CC" && { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "no acceptable C compiler found in \$PATH See \`config.log' for more details" "$LINENO" 5; } # Provide some information about the compiler. $as_echo "$as_me:${as_lineno-$LINENO}: checking for C compiler version" >&5 set X $ac_compile ac_compiler=$2 for ac_option in --version -v -V -qversion; do { { ac_try="$ac_compiler $ac_option >&5" case "(($ac_try" in *\"* | *\`* | *\\*) ac_try_echo=\$ac_try;; *) ac_try_echo=$ac_try;; esac eval ac_try_echo="\"\$as_me:${as_lineno-$LINENO}: $ac_try_echo\"" $as_echo "$ac_try_echo"; } >&5 (eval "$ac_compiler $ac_option >&5") 2>conftest.err ac_status=$? if test -s conftest.err; then sed '10a\ ... rest of stderr output deleted ... 10q' conftest.err >conftest.er1 cat conftest.er1 >&5 fi rm -f conftest.er1 conftest.err $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether we are using the GNU C compiler" >&5 $as_echo_n "checking whether we are using the GNU C compiler... " >&6; } if ${ac_cv_c_compiler_gnu+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __GNUC__ choke me #endif ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_compiler_gnu=yes else ac_compiler_gnu=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_c_compiler_gnu=$ac_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_compiler_gnu" >&5 $as_echo "$ac_cv_c_compiler_gnu" >&6; } if test $ac_compiler_gnu = yes; then GCC=yes else GCC= fi ac_test_CFLAGS=${CFLAGS+set} ac_save_CFLAGS=$CFLAGS { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC accepts -g" >&5 $as_echo_n "checking whether $CC accepts -g... " >&6; } if ${ac_cv_prog_cc_g+:} false; then : $as_echo_n "(cached) " >&6 else ac_save_c_werror_flag=$ac_c_werror_flag ac_c_werror_flag=yes ac_cv_prog_cc_g=no CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes else CFLAGS="" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : else ac_c_werror_flag=$ac_save_c_werror_flag CFLAGS="-g" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_g=yes fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_c_werror_flag=$ac_save_c_werror_flag fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_g" >&5 $as_echo "$ac_cv_prog_cc_g" >&6; } if test "$ac_test_CFLAGS" = set; then CFLAGS=$ac_save_CFLAGS elif test $ac_cv_prog_cc_g = yes; then if test "$GCC" = yes; then CFLAGS="-g -O2" else CFLAGS="-g" fi else if test "$GCC" = yes; then CFLAGS="-O2" else CFLAGS= fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $CC option to accept ISO C89" >&5 $as_echo_n "checking for $CC option to accept ISO C89... " >&6; } if ${ac_cv_prog_cc_c89+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_prog_cc_c89=no ac_save_CC=$CC cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include struct stat; /* Most of the following tests are stolen from RCS 5.7's src/conf.sh. */ struct buf { int x; }; FILE * (*rcsopen) (struct buf *, struct stat *, int); static char *e (p, i) char **p; int i; { return p[i]; } static char *f (char * (*g) (char **, int), char **p, ...) { char *s; va_list v; va_start (v,p); s = g (p, va_arg (v,int)); va_end (v); return s; } /* OSF 4.0 Compaq cc is some sort of almost-ANSI by default. It has function prototypes and stuff, but not '\xHH' hex character constants. These don't provoke an error unfortunately, instead are silently treated as 'x'. The following induces an error, until -std is added to get proper ANSI mode. Curiously '\x00'!='x' always comes out true, for an array size at least. It's necessary to write '\x00'==0 to get something that's true only with -std. */ int osf4_cc_array ['\x00' == 0 ? 1 : -1]; /* IBM C 6 for AIX is almost-ANSI by default, but it replaces macro parameters inside strings and character constants. */ #define FOO(x) 'x' int xlc6_cc_array[FOO(a) == 'x' ? 1 : -1]; int test (int i, double x); struct s1 {int (*f) (int a);}; struct s2 {int (*f) (double a);}; int pairnames (int, char **, FILE *(*)(struct buf *, struct stat *, int), int, int); int argc; char **argv; int main () { return f (e, argv, 0) != argv[0] || f (e, argv, 1) != argv[1]; ; return 0; } _ACEOF for ac_arg in '' -qlanglvl=extc89 -qlanglvl=ansi -std \ -Ae "-Aa -D_HPUX_SOURCE" "-Xc -D__EXTENSIONS__" do CC="$ac_save_CC $ac_arg" if ac_fn_c_try_compile "$LINENO"; then : ac_cv_prog_cc_c89=$ac_arg fi rm -f core conftest.err conftest.$ac_objext test "x$ac_cv_prog_cc_c89" != "xno" && break done rm -f conftest.$ac_ext CC=$ac_save_CC fi # AC_CACHE_VAL case "x$ac_cv_prog_cc_c89" in x) { $as_echo "$as_me:${as_lineno-$LINENO}: result: none needed" >&5 $as_echo "none needed" >&6; } ;; xno) { $as_echo "$as_me:${as_lineno-$LINENO}: result: unsupported" >&5 $as_echo "unsupported" >&6; } ;; *) CC="$CC $ac_cv_prog_cc_c89" { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_cc_c89" >&5 $as_echo "$ac_cv_prog_cc_c89" >&6; } ;; esac if test "x$ac_cv_prog_cc_c89" != xno; then : fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC understands -c and -o together" >&5 $as_echo_n "checking whether $CC understands -c and -o together... " >&6; } if ${am_cv_prog_cc_c_o+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF # Make sure it works both with $CC and with simple cc. # Following AC_PROG_CC_C_O, we do the test twice because some # compilers refuse to overwrite an existing .o file with -o, # though they will create one. am_cv_prog_cc_c_o=yes for am_i in 1 2; do if { echo "$as_me:$LINENO: $CC -c conftest.$ac_ext -o conftest2.$ac_objext" >&5 ($CC -c conftest.$ac_ext -o conftest2.$ac_objext) >&5 2>&5 ac_status=$? echo "$as_me:$LINENO: \$? = $ac_status" >&5 (exit $ac_status); } \ && test -f conftest2.$ac_objext; then : OK else am_cv_prog_cc_c_o=no break fi done rm -f core conftest* unset am_i fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_prog_cc_c_o" >&5 $as_echo "$am_cv_prog_cc_c_o" >&6; } if test "$am_cv_prog_cc_c_o" != yes; then # Losing compiler, so override with the script. # FIXME: It is wrong to rewrite CC. # But if we don't then we get into trouble of one sort or another. # A longer-term fix would be to have automake use am__CC in this case, # and then we could set am__CC="\$(top_srcdir)/compile \$(CC)" CC="$am_aux_dir/compile $CC" fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu depcc="$CC" am_compiler_list= { $as_echo "$as_me:${as_lineno-$LINENO}: checking dependency style of $depcc" >&5 $as_echo_n "checking dependency style of $depcc... " >&6; } if ${am_cv_CC_dependencies_compiler_type+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$AMDEP_TRUE" && test -f "$am_depcomp"; then # We make a subdir and do the tests there. Otherwise we can end up # making bogus files that we don't know about and never remove. For # instance it was reported that on HP-UX the gcc test will end up # making a dummy file named 'D' -- because '-MD' means "put the output # in D". rm -rf conftest.dir mkdir conftest.dir # Copy depcomp to subdir because otherwise we won't find it if we're # using a relative directory. cp "$am_depcomp" conftest.dir cd conftest.dir # We will build objects and dependencies in a subdirectory because # it helps to detect inapplicable dependency modes. For instance # both Tru64's cc and ICC support -MD to output dependencies as a # side effect of compilation, but ICC will put the dependencies in # the current directory while Tru64 will put them in the object # directory. mkdir sub am_cv_CC_dependencies_compiler_type=none if test "$am_compiler_list" = ""; then am_compiler_list=`sed -n 's/^#*\([a-zA-Z0-9]*\))$/\1/p' < ./depcomp` fi am__universal=false case " $depcc " in #( *\ -arch\ *\ -arch\ *) am__universal=true ;; esac for depmode in $am_compiler_list; do # Setup a source with many dependencies, because some compilers # like to wrap large dependency lists on column 80 (with \), and # we should not choose a depcomp mode which is confused by this. # # We need to recreate these files for each test, as the compiler may # overwrite some of them when testing with obscure command lines. # This happens at least with the AIX C compiler. : > sub/conftest.c for i in 1 2 3 4 5 6; do echo '#include "conftst'$i'.h"' >> sub/conftest.c # Using ": > sub/conftst$i.h" creates only sub/conftst1.h with # Solaris 10 /bin/sh. echo '/* dummy */' > sub/conftst$i.h done echo "${am__include} ${am__quote}sub/conftest.Po${am__quote}" > confmf # We check with '-c' and '-o' for the sake of the "dashmstdout" # mode. It turns out that the SunPro C++ compiler does not properly # handle '-M -o', and we need to detect this. Also, some Intel # versions had trouble with output in subdirs. am__obj=sub/conftest.${OBJEXT-o} am__minus_obj="-o $am__obj" case $depmode in gcc) # This depmode causes a compiler race in universal mode. test "$am__universal" = false || continue ;; nosideeffect) # After this tag, mechanisms are not by side-effect, so they'll # only be used when explicitly requested. if test "x$enable_dependency_tracking" = xyes; then continue else break fi ;; msvc7 | msvc7msys | msvisualcpp | msvcmsys) # This compiler won't grok '-c -o', but also, the minuso test has # not run yet. These depmodes are late enough in the game, and # so weak that their functioning should not be impacted. am__obj=conftest.${OBJEXT-o} am__minus_obj= ;; none) break ;; esac if depmode=$depmode \ source=sub/conftest.c object=$am__obj \ depfile=sub/conftest.Po tmpdepfile=sub/conftest.TPo \ $SHELL ./depcomp $depcc -c $am__minus_obj sub/conftest.c \ >/dev/null 2>conftest.err && grep sub/conftst1.h sub/conftest.Po > /dev/null 2>&1 && grep sub/conftst6.h sub/conftest.Po > /dev/null 2>&1 && grep $am__obj sub/conftest.Po > /dev/null 2>&1 && ${MAKE-make} -s -f confmf > /dev/null 2>&1; then # icc doesn't choke on unknown options, it will just issue warnings # or remarks (even with -Werror). So we grep stderr for any message # that says an option was ignored or not supported. # When given -MP, icc 7.0 and 7.1 complain thusly: # icc: Command line warning: ignoring option '-M'; no argument required # The diagnosis changed in icc 8.0: # icc: Command line remark: option '-MP' not supported if (grep 'ignoring option' conftest.err || grep 'not supported' conftest.err) >/dev/null 2>&1; then :; else am_cv_CC_dependencies_compiler_type=$depmode break fi fi done cd .. rm -rf conftest.dir else am_cv_CC_dependencies_compiler_type=none fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $am_cv_CC_dependencies_compiler_type" >&5 $as_echo "$am_cv_CC_dependencies_compiler_type" >&6; } CCDEPMODE=depmode=$am_cv_CC_dependencies_compiler_type if test "x$enable_dependency_tracking" != xno \ && test "$am_cv_CC_dependencies_compiler_type" = gcc3; then am__fastdepCC_TRUE= am__fastdepCC_FALSE='#' else am__fastdepCC_TRUE='#' am__fastdepCC_FALSE= fi case `pwd` in *\ * | *\ *) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&5 $as_echo "$as_me: WARNING: Libtool does not cope well with whitespace in \`pwd\`" >&2;} ;; esac macro_version='2.4.6' macro_revision='2.4.6' ltmain=$ac_aux_dir/ltmain.sh # Backslashify metacharacters that are still active within # double-quoted strings. sed_quote_subst='s/\(["`$\\]\)/\\\1/g' # Same as above, but do not quote variable references. double_quote_subst='s/\(["`\\]\)/\\\1/g' # Sed substitution to delay expansion of an escaped shell variable in a # double_quote_subst'ed string. delay_variable_subst='s/\\\\\\\\\\\$/\\\\\\$/g' # Sed substitution to delay expansion of an escaped single quote. delay_single_quote_subst='s/'\''/'\'\\\\\\\'\''/g' # Sed substitution to avoid accidental globbing in evaled expressions no_glob_subst='s/\*/\\\*/g' ECHO='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO ECHO=$ECHO$ECHO$ECHO$ECHO$ECHO$ECHO { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to print strings" >&5 $as_echo_n "checking how to print strings... " >&6; } # Test print first, because it will be a builtin if present. if test "X`( print -r -- -n ) 2>/dev/null`" = X-n && \ test "X`print -r -- $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='print -r --' elif test "X`printf %s $ECHO 2>/dev/null`" = "X$ECHO"; then ECHO='printf %s\n' else # Use this function as a fallback that always works. func_fallback_echo () { eval 'cat <<_LTECHO_EOF $1 _LTECHO_EOF' } ECHO='func_fallback_echo' fi # func_echo_all arg... # Invoke $ECHO with all args, space-separated. func_echo_all () { $ECHO "" } case $ECHO in printf*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: printf" >&5 $as_echo "printf" >&6; } ;; print*) { $as_echo "$as_me:${as_lineno-$LINENO}: result: print -r" >&5 $as_echo "print -r" >&6; } ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: cat" >&5 $as_echo "cat" >&6; } ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a sed that does not truncate output" >&5 $as_echo_n "checking for a sed that does not truncate output... " >&6; } if ${ac_cv_path_SED+:} false; then : $as_echo_n "(cached) " >&6 else ac_script=s/aaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaaa/bbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbbb/ for ac_i in 1 2 3 4 5 6 7; do ac_script="$ac_script$as_nl$ac_script" done echo "$ac_script" 2>/dev/null | sed 99q >conftest.sed { ac_script=; unset ac_script;} if test -z "$SED"; then ac_path_SED_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in sed gsed; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_SED="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_SED" || continue # Check for GNU ac_path_SED and select it if it is found. # Check for GNU $ac_path_SED case `"$ac_path_SED" --version 2>&1` in *GNU*) ac_cv_path_SED="$ac_path_SED" ac_path_SED_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo '' >> "conftest.nl" "$ac_path_SED" -f conftest.sed < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_SED_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_SED="$ac_path_SED" ac_path_SED_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_SED_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_SED"; then as_fn_error $? "no acceptable sed could be found in \$PATH" "$LINENO" 5 fi else ac_cv_path_SED=$SED fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_SED" >&5 $as_echo "$ac_cv_path_SED" >&6; } SED="$ac_cv_path_SED" rm -f conftest.sed test -z "$SED" && SED=sed Xsed="$SED -e 1s/^X//" { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgrep" >&5 $as_echo_n "checking for fgrep... " >&6; } if ${ac_cv_path_FGREP+:} false; then : $as_echo_n "(cached) " >&6 else if echo 'ab*c' | $GREP -F 'ab*c' >/dev/null 2>&1 then ac_cv_path_FGREP="$GREP -F" else if test -z "$FGREP"; then ac_path_FGREP_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH$PATH_SEPARATOR/usr/xpg4/bin do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in fgrep; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_FGREP="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_FGREP" || continue # Check for GNU ac_path_FGREP and select it if it is found. # Check for GNU $ac_path_FGREP case `"$ac_path_FGREP" --version 2>&1` in *GNU*) ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_found=:;; *) ac_count=0 $as_echo_n 0123456789 >"conftest.in" while : do cat "conftest.in" "conftest.in" >"conftest.tmp" mv "conftest.tmp" "conftest.in" cp "conftest.in" "conftest.nl" $as_echo 'FGREP' >> "conftest.nl" "$ac_path_FGREP" FGREP < "conftest.nl" >"conftest.out" 2>/dev/null || break diff "conftest.out" "conftest.nl" >/dev/null 2>&1 || break as_fn_arith $ac_count + 1 && ac_count=$as_val if test $ac_count -gt ${ac_path_FGREP_max-0}; then # Best one so far, save it but keep looking for a better one ac_cv_path_FGREP="$ac_path_FGREP" ac_path_FGREP_max=$ac_count fi # 10*(2^10) chars as input seems more than enough test $ac_count -gt 10 && break done rm -f conftest.in conftest.tmp conftest.nl conftest.out;; esac $ac_path_FGREP_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_FGREP"; then as_fn_error $? "no acceptable fgrep could be found in $PATH$PATH_SEPARATOR/usr/xpg4/bin" "$LINENO" 5 fi else ac_cv_path_FGREP=$FGREP fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_FGREP" >&5 $as_echo "$ac_cv_path_FGREP" >&6; } FGREP="$ac_cv_path_FGREP" test -z "$GREP" && GREP=grep # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test no = "$withval" || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test yes = "$GCC"; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return, which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD=$ac_prog ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test yes = "$with_gnu_ld"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${lt_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD=$ac_dir/$ac_prog # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${lt_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld { $as_echo "$as_me:${as_lineno-$LINENO}: checking for BSD- or MS-compatible name lister (nm)" >&5 $as_echo_n "checking for BSD- or MS-compatible name lister (nm)... " >&6; } if ${lt_cv_path_NM+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$NM"; then # Let the user override the test. lt_cv_path_NM=$NM else lt_nm_to_check=${ac_tool_prefix}nm if test -n "$ac_tool_prefix" && test "$build" = "$host"; then lt_nm_to_check="$lt_nm_to_check nm" fi for lt_tmp_nm in $lt_nm_to_check; do lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH /usr/ccs/bin/elf /usr/ccs/bin /usr/ucb /bin; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. tmp_nm=$ac_dir/$lt_tmp_nm if test -f "$tmp_nm" || test -f "$tmp_nm$ac_exeext"; then # Check to see if the nm accepts a BSD-compat flag. # Adding the 'sed 1q' prevents false positives on HP-UX, which says: # nm: unknown option "B" ignored # Tru64's nm complains that /dev/null is an invalid object file # MSYS converts /dev/null to NUL, MinGW nm treats NUL as empty case $build_os in mingw*) lt_bad_file=conftest.nm/nofile ;; *) lt_bad_file=/dev/null ;; esac case `"$tmp_nm" -B $lt_bad_file 2>&1 | sed '1q'` in *$lt_bad_file* | *'Invalid file or object type'*) lt_cv_path_NM="$tmp_nm -B" break 2 ;; *) case `"$tmp_nm" -p /dev/null 2>&1 | sed '1q'` in */dev/null*) lt_cv_path_NM="$tmp_nm -p" break 2 ;; *) lt_cv_path_NM=${lt_cv_path_NM="$tmp_nm"} # keep the first match, but continue # so that we can try to find one that supports BSD flags ;; esac ;; esac fi done IFS=$lt_save_ifs done : ${lt_cv_path_NM=no} fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_NM" >&5 $as_echo "$lt_cv_path_NM" >&6; } if test no != "$lt_cv_path_NM"; then NM=$lt_cv_path_NM else # Didn't find any BSD compatible name lister, look for dumpbin. if test -n "$DUMPBIN"; then : # Let the user override the test. else if test -n "$ac_tool_prefix"; then for ac_prog in dumpbin "link -dump" do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DUMPBIN+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DUMPBIN"; then ac_cv_prog_DUMPBIN="$DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DUMPBIN="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DUMPBIN=$ac_cv_prog_DUMPBIN if test -n "$DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DUMPBIN" >&5 $as_echo "$DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$DUMPBIN" && break done fi if test -z "$DUMPBIN"; then ac_ct_DUMPBIN=$DUMPBIN for ac_prog in dumpbin "link -dump" do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DUMPBIN+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DUMPBIN"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_ct_DUMPBIN" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DUMPBIN="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DUMPBIN=$ac_cv_prog_ac_ct_DUMPBIN if test -n "$ac_ct_DUMPBIN"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DUMPBIN" >&5 $as_echo "$ac_ct_DUMPBIN" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_DUMPBIN" && break done if test "x$ac_ct_DUMPBIN" = x; then DUMPBIN=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DUMPBIN=$ac_ct_DUMPBIN fi fi case `$DUMPBIN -symbols -headers /dev/null 2>&1 | sed '1q'` in *COFF*) DUMPBIN="$DUMPBIN -symbols -headers" ;; *) DUMPBIN=: ;; esac fi if test : != "$DUMPBIN"; then NM=$DUMPBIN fi fi test -z "$NM" && NM=nm { $as_echo "$as_me:${as_lineno-$LINENO}: checking the name lister ($NM) interface" >&5 $as_echo_n "checking the name lister ($NM) interface... " >&6; } if ${lt_cv_nm_interface+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_nm_interface="BSD nm" echo "int some_variable = 0;" > conftest.$ac_ext (eval echo "\"\$as_me:$LINENO: $ac_compile\"" >&5) (eval "$ac_compile" 2>conftest.err) cat conftest.err >&5 (eval echo "\"\$as_me:$LINENO: $NM \\\"conftest.$ac_objext\\\"\"" >&5) (eval "$NM \"conftest.$ac_objext\"" 2>conftest.err > conftest.out) cat conftest.err >&5 (eval echo "\"\$as_me:$LINENO: output\"" >&5) cat conftest.out >&5 if $GREP 'External.*some_variable' conftest.out > /dev/null; then lt_cv_nm_interface="MS dumpbin" fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_nm_interface" >&5 $as_echo "$lt_cv_nm_interface" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ln -s works" >&5 $as_echo_n "checking whether ln -s works... " >&6; } LN_S=$as_ln_s if test "$LN_S" = "ln -s"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no, using $LN_S" >&5 $as_echo "no, using $LN_S" >&6; } fi # find the maximum length of command line arguments { $as_echo "$as_me:${as_lineno-$LINENO}: checking the maximum length of command line arguments" >&5 $as_echo_n "checking the maximum length of command line arguments... " >&6; } if ${lt_cv_sys_max_cmd_len+:} false; then : $as_echo_n "(cached) " >&6 else i=0 teststring=ABCD case $build_os in msdosdjgpp*) # On DJGPP, this test can blow up pretty badly due to problems in libc # (any single argument exceeding 2000 bytes causes a buffer overrun # during glob expansion). Even if it were fixed, the result of this # check would be larger than it should be. lt_cv_sys_max_cmd_len=12288; # 12K is about right ;; gnu*) # Under GNU Hurd, this test is not required because there is # no limit to the length of command line arguments. # Libtool will interpret -1 as no limit whatsoever lt_cv_sys_max_cmd_len=-1; ;; cygwin* | mingw* | cegcc*) # On Win9x/ME, this test blows up -- it succeeds, but takes # about 5 minutes as the teststring grows exponentially. # Worse, since 9x/ME are not pre-emptively multitasking, # you end up with a "frozen" computer, even though with patience # the test eventually succeeds (with a max line length of 256k). # Instead, let's just punt: use the minimum linelength reported by # all of the supported platforms: 8192 (on NT/2K/XP). lt_cv_sys_max_cmd_len=8192; ;; mint*) # On MiNT this can take a long time and run out of memory. lt_cv_sys_max_cmd_len=8192; ;; amigaos*) # On AmigaOS with pdksh, this test takes hours, literally. # So we just punt and use a minimum line length of 8192. lt_cv_sys_max_cmd_len=8192; ;; bitrig* | darwin* | dragonfly* | freebsd* | netbsd* | openbsd*) # This has been around since 386BSD, at least. Likely further. if test -x /sbin/sysctl; then lt_cv_sys_max_cmd_len=`/sbin/sysctl -n kern.argmax` elif test -x /usr/sbin/sysctl; then lt_cv_sys_max_cmd_len=`/usr/sbin/sysctl -n kern.argmax` else lt_cv_sys_max_cmd_len=65536 # usable default for all BSDs fi # And add a safety zone lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` ;; interix*) # We know the value 262144 and hardcode it with a safety zone (like BSD) lt_cv_sys_max_cmd_len=196608 ;; os2*) # The test takes a long time on OS/2. lt_cv_sys_max_cmd_len=8192 ;; osf*) # Dr. Hans Ekkehard Plesser reports seeing a kernel panic running configure # due to this test when exec_disable_arg_limit is 1 on Tru64. It is not # nice to cause kernel panics so lets avoid the loop below. # First set a reasonable default. lt_cv_sys_max_cmd_len=16384 # if test -x /sbin/sysconfig; then case `/sbin/sysconfig -q proc exec_disable_arg_limit` in *1*) lt_cv_sys_max_cmd_len=-1 ;; esac fi ;; sco3.2v5*) lt_cv_sys_max_cmd_len=102400 ;; sysv5* | sco5v6* | sysv4.2uw2*) kargmax=`grep ARG_MAX /etc/conf/cf.d/stune 2>/dev/null` if test -n "$kargmax"; then lt_cv_sys_max_cmd_len=`echo $kargmax | sed 's/.*[ ]//'` else lt_cv_sys_max_cmd_len=32768 fi ;; *) lt_cv_sys_max_cmd_len=`(getconf ARG_MAX) 2> /dev/null` if test -n "$lt_cv_sys_max_cmd_len" && \ test undefined != "$lt_cv_sys_max_cmd_len"; then lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 4` lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \* 3` else # Make teststring a little bigger before we do anything with it. # a 1K string should be a reasonable start. for i in 1 2 3 4 5 6 7 8; do teststring=$teststring$teststring done SHELL=${SHELL-${CONFIG_SHELL-/bin/sh}} # If test is not a shell built-in, we'll probably end up computing a # maximum length that is only half of the actual maximum length, but # we can't tell. while { test X`env echo "$teststring$teststring" 2>/dev/null` \ = "X$teststring$teststring"; } >/dev/null 2>&1 && test 17 != "$i" # 1/2 MB should be enough do i=`expr $i + 1` teststring=$teststring$teststring done # Only check the string length outside the loop. lt_cv_sys_max_cmd_len=`expr "X$teststring" : ".*" 2>&1` teststring= # Add a significant safety factor because C++ compilers can tack on # massive amounts of additional arguments before passing them to the # linker. It appears as though 1/2 is a usable value. lt_cv_sys_max_cmd_len=`expr $lt_cv_sys_max_cmd_len \/ 2` fi ;; esac fi if test -n "$lt_cv_sys_max_cmd_len"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sys_max_cmd_len" >&5 $as_echo "$lt_cv_sys_max_cmd_len" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: none" >&5 $as_echo "none" >&6; } fi max_cmd_len=$lt_cv_sys_max_cmd_len : ${CP="cp -f"} : ${MV="mv -f"} : ${RM="rm -f"} if ( (MAIL=60; unset MAIL) || exit) >/dev/null 2>&1; then lt_unset=unset else lt_unset=false fi # test EBCDIC or ASCII case `echo X|tr X '\101'` in A) # ASCII based system # \n is not interpreted correctly by Solaris 8 /usr/ucb/tr lt_SP2NL='tr \040 \012' lt_NL2SP='tr \015\012 \040\040' ;; *) # EBCDIC based system lt_SP2NL='tr \100 \n' lt_NL2SP='tr \r\n \100\100' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to $host format" >&5 $as_echo_n "checking how to convert $build file names to $host format... " >&6; } if ${lt_cv_to_host_file_cmd+:} false; then : $as_echo_n "(cached) " >&6 else case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_w32 ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_cygwin_to_w32 ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_w32 ;; esac ;; *-*-cygwin* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_host_file_cmd=func_convert_file_msys_to_cygwin ;; *-*-cygwin* ) lt_cv_to_host_file_cmd=func_convert_file_noop ;; * ) # otherwise, assume *nix lt_cv_to_host_file_cmd=func_convert_file_nix_to_cygwin ;; esac ;; * ) # unhandled hosts (and "normal" native builds) lt_cv_to_host_file_cmd=func_convert_file_noop ;; esac fi to_host_file_cmd=$lt_cv_to_host_file_cmd { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_host_file_cmd" >&5 $as_echo "$lt_cv_to_host_file_cmd" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to convert $build file names to toolchain format" >&5 $as_echo_n "checking how to convert $build file names to toolchain format... " >&6; } if ${lt_cv_to_tool_file_cmd+:} false; then : $as_echo_n "(cached) " >&6 else #assume ordinary cross tools, or native build. lt_cv_to_tool_file_cmd=func_convert_file_noop case $host in *-*-mingw* ) case $build in *-*-mingw* ) # actually msys lt_cv_to_tool_file_cmd=func_convert_file_msys_to_w32 ;; esac ;; esac fi to_tool_file_cmd=$lt_cv_to_tool_file_cmd { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_to_tool_file_cmd" >&5 $as_echo "$lt_cv_to_tool_file_cmd" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $LD option to reload object files" >&5 $as_echo_n "checking for $LD option to reload object files... " >&6; } if ${lt_cv_ld_reload_flag+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_reload_flag='-r' fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_reload_flag" >&5 $as_echo "$lt_cv_ld_reload_flag" >&6; } reload_flag=$lt_cv_ld_reload_flag case $reload_flag in "" | " "*) ;; *) reload_flag=" $reload_flag" ;; esac reload_cmds='$LD$reload_flag -o $output$reload_objs' case $host_os in cygwin* | mingw* | pw32* | cegcc*) if test yes != "$GCC"; then reload_cmds=false fi ;; darwin*) if test yes = "$GCC"; then reload_cmds='$LTCC $LTCFLAGS -nostdlib $wl-r -o $output$reload_objs' else reload_cmds='$LD$reload_flag -o $output$reload_objs' fi ;; esac if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}objdump", so it can be a program name with args. set dummy ${ac_tool_prefix}objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OBJDUMP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OBJDUMP"; then ac_cv_prog_OBJDUMP="$OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OBJDUMP="${ac_tool_prefix}objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OBJDUMP=$ac_cv_prog_OBJDUMP if test -n "$OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OBJDUMP" >&5 $as_echo "$OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OBJDUMP"; then ac_ct_OBJDUMP=$OBJDUMP # Extract the first word of "objdump", so it can be a program name with args. set dummy objdump; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OBJDUMP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OBJDUMP"; then ac_cv_prog_ac_ct_OBJDUMP="$ac_ct_OBJDUMP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OBJDUMP="objdump" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OBJDUMP=$ac_cv_prog_ac_ct_OBJDUMP if test -n "$ac_ct_OBJDUMP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OBJDUMP" >&5 $as_echo "$ac_ct_OBJDUMP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OBJDUMP" = x; then OBJDUMP="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OBJDUMP=$ac_ct_OBJDUMP fi else OBJDUMP="$ac_cv_prog_OBJDUMP" fi test -z "$OBJDUMP" && OBJDUMP=objdump { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to recognize dependent libraries" >&5 $as_echo_n "checking how to recognize dependent libraries... " >&6; } if ${lt_cv_deplibs_check_method+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_file_magic_cmd='$MAGIC_CMD' lt_cv_file_magic_test_file= lt_cv_deplibs_check_method='unknown' # Need to set the preceding variable on all platforms that support # interlibrary dependencies. # 'none' -- dependencies not supported. # 'unknown' -- same as none, but documents that we really don't know. # 'pass_all' -- all dependencies passed with no checks. # 'test_compile' -- check by making test program. # 'file_magic [[regex]]' -- check by looking for files in library path # that responds to the $file_magic_cmd with a given extended regex. # If you have 'file' or equivalent on your system and you're not sure # whether 'pass_all' will *always* work, you probably want this one. case $host_os in aix[4-9]*) lt_cv_deplibs_check_method=pass_all ;; beos*) lt_cv_deplibs_check_method=pass_all ;; bsdi[45]*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib)' lt_cv_file_magic_cmd='/usr/bin/file -L' lt_cv_file_magic_test_file=/shlib/libc.so ;; cygwin*) # func_win32_libid is a shell function defined in ltmain.sh lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' ;; mingw* | pw32*) # Base MSYS/MinGW do not provide the 'file' command needed by # func_win32_libid shell function, so use a weaker test based on 'objdump', # unless we find 'file', for example because we are cross-compiling. if ( file / ) >/dev/null 2>&1; then lt_cv_deplibs_check_method='file_magic ^x86 archive import|^x86 DLL' lt_cv_file_magic_cmd='func_win32_libid' else # Keep this pattern in sync with the one in func_win32_libid. lt_cv_deplibs_check_method='file_magic file format (pei*-i386(.*architecture: i386)?|pe-arm-wince|pe-x86-64)' lt_cv_file_magic_cmd='$OBJDUMP -f' fi ;; cegcc*) # use the weaker test based on 'objdump'. See mingw*. lt_cv_deplibs_check_method='file_magic file format pe-arm-.*little(.*architecture: arm)?' lt_cv_file_magic_cmd='$OBJDUMP -f' ;; darwin* | rhapsody*) lt_cv_deplibs_check_method=pass_all ;; freebsd* | dragonfly*) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then case $host_cpu in i*86 ) # Not sure whether the presence of OpenBSD here was a mistake. # Let's accept both of them until this is cleared up. lt_cv_deplibs_check_method='file_magic (FreeBSD|OpenBSD|DragonFly)/i[3-9]86 (compact )?demand paged shared library' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=`echo /usr/lib/libc.so.*` ;; esac else lt_cv_deplibs_check_method=pass_all fi ;; haiku*) lt_cv_deplibs_check_method=pass_all ;; hpux10.20* | hpux11*) lt_cv_file_magic_cmd=/usr/bin/file case $host_cpu in ia64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF-[0-9][0-9]) shared object file - IA64' lt_cv_file_magic_test_file=/usr/lib/hpux32/libc.so ;; hppa*64*) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|ELF[ -][0-9][0-9])(-bit)?( [LM]SB)? shared object( file)?[, -]* PA-RISC [0-9]\.[0-9]' lt_cv_file_magic_test_file=/usr/lib/pa20_64/libc.sl ;; *) lt_cv_deplibs_check_method='file_magic (s[0-9][0-9][0-9]|PA-RISC[0-9]\.[0-9]) shared library' lt_cv_file_magic_test_file=/usr/lib/libc.sl ;; esac ;; interix[3-9]*) # PIC code is broken on Interix 3.x, that's why |\.a not |_pic\.a here lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|\.a)$' ;; irix5* | irix6* | nonstopux*) case $LD in *-32|*"-32 ") libmagic=32-bit;; *-n32|*"-n32 ") libmagic=N32;; *-64|*"-64 ") libmagic=64-bit;; *) libmagic=never-match;; esac lt_cv_deplibs_check_method=pass_all ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) lt_cv_deplibs_check_method=pass_all ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ > /dev/null; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so|_pic\.a)$' fi ;; newos6*) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (executable|dynamic lib)' lt_cv_file_magic_cmd=/usr/bin/file lt_cv_file_magic_test_file=/usr/lib/libnls.so ;; *nto* | *qnx*) lt_cv_deplibs_check_method=pass_all ;; openbsd* | bitrig*) if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|\.so|_pic\.a)$' else lt_cv_deplibs_check_method='match_pattern /lib[^/]+(\.so\.[0-9]+\.[0-9]+|_pic\.a)$' fi ;; osf3* | osf4* | osf5*) lt_cv_deplibs_check_method=pass_all ;; rdos*) lt_cv_deplibs_check_method=pass_all ;; solaris*) lt_cv_deplibs_check_method=pass_all ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) lt_cv_deplibs_check_method=pass_all ;; sysv4 | sysv4.3*) case $host_vendor in motorola) lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [ML]SB (shared object|dynamic lib) M[0-9][0-9]* Version [0-9]' lt_cv_file_magic_test_file=`echo /usr/lib/libc.so*` ;; ncr) lt_cv_deplibs_check_method=pass_all ;; sequent) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method='file_magic ELF [0-9][0-9]*-bit [LM]SB (shared object|dynamic lib )' ;; sni) lt_cv_file_magic_cmd='/bin/file' lt_cv_deplibs_check_method="file_magic ELF [0-9][0-9]*-bit [LM]SB dynamic lib" lt_cv_file_magic_test_file=/lib/libc.so ;; siemens) lt_cv_deplibs_check_method=pass_all ;; pc) lt_cv_deplibs_check_method=pass_all ;; esac ;; tpf*) lt_cv_deplibs_check_method=pass_all ;; os2*) lt_cv_deplibs_check_method=pass_all ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_deplibs_check_method" >&5 $as_echo "$lt_cv_deplibs_check_method" >&6; } file_magic_glob= want_nocaseglob=no if test "$build" = "$host"; then case $host_os in mingw* | pw32*) if ( shopt | grep nocaseglob ) >/dev/null 2>&1; then want_nocaseglob=yes else file_magic_glob=`echo aAbBcCdDeEfFgGhHiIjJkKlLmMnNoOpPqQrRsStTuUvVwWxXyYzZ | $SED -e "s/\(..\)/s\/[\1]\/[\1]\/g;/g"` fi ;; esac fi file_magic_cmd=$lt_cv_file_magic_cmd deplibs_check_method=$lt_cv_deplibs_check_method test -z "$deplibs_check_method" && deplibs_check_method=unknown if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dlltool", so it can be a program name with args. set dummy ${ac_tool_prefix}dlltool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DLLTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DLLTOOL"; then ac_cv_prog_DLLTOOL="$DLLTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DLLTOOL="${ac_tool_prefix}dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DLLTOOL=$ac_cv_prog_DLLTOOL if test -n "$DLLTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DLLTOOL" >&5 $as_echo "$DLLTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_DLLTOOL"; then ac_ct_DLLTOOL=$DLLTOOL # Extract the first word of "dlltool", so it can be a program name with args. set dummy dlltool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DLLTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DLLTOOL"; then ac_cv_prog_ac_ct_DLLTOOL="$ac_ct_DLLTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DLLTOOL="dlltool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DLLTOOL=$ac_cv_prog_ac_ct_DLLTOOL if test -n "$ac_ct_DLLTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DLLTOOL" >&5 $as_echo "$ac_ct_DLLTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_DLLTOOL" = x; then DLLTOOL="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DLLTOOL=$ac_ct_DLLTOOL fi else DLLTOOL="$ac_cv_prog_DLLTOOL" fi test -z "$DLLTOOL" && DLLTOOL=dlltool { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to associate runtime and link libraries" >&5 $as_echo_n "checking how to associate runtime and link libraries... " >&6; } if ${lt_cv_sharedlib_from_linklib_cmd+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_sharedlib_from_linklib_cmd='unknown' case $host_os in cygwin* | mingw* | pw32* | cegcc*) # two different shell functions defined in ltmain.sh; # decide which one to use based on capabilities of $DLLTOOL case `$DLLTOOL --help 2>&1` in *--identify-strict*) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib ;; *) lt_cv_sharedlib_from_linklib_cmd=func_cygming_dll_for_implib_fallback ;; esac ;; *) # fallback: assume linklib IS sharedlib lt_cv_sharedlib_from_linklib_cmd=$ECHO ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_sharedlib_from_linklib_cmd" >&5 $as_echo "$lt_cv_sharedlib_from_linklib_cmd" >&6; } sharedlib_from_linklib_cmd=$lt_cv_sharedlib_from_linklib_cmd test -z "$sharedlib_from_linklib_cmd" && sharedlib_from_linklib_cmd=$ECHO if test -n "$ac_tool_prefix"; then for ac_prog in ar do # Extract the first word of "$ac_tool_prefix$ac_prog", so it can be a program name with args. set dummy $ac_tool_prefix$ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_AR+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$AR"; then ac_cv_prog_AR="$AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_AR="$ac_tool_prefix$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi AR=$ac_cv_prog_AR if test -n "$AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $AR" >&5 $as_echo "$AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$AR" && break done fi if test -z "$AR"; then ac_ct_AR=$AR for ac_prog in ar do # Extract the first word of "$ac_prog", so it can be a program name with args. set dummy $ac_prog; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_AR+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_AR"; then ac_cv_prog_ac_ct_AR="$ac_ct_AR" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_AR="$ac_prog" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_AR=$ac_cv_prog_ac_ct_AR if test -n "$ac_ct_AR"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_AR" >&5 $as_echo "$ac_ct_AR" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -n "$ac_ct_AR" && break done if test "x$ac_ct_AR" = x; then AR="false" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac AR=$ac_ct_AR fi fi : ${AR=ar} : ${AR_FLAGS=cru} { $as_echo "$as_me:${as_lineno-$LINENO}: checking for archiver @FILE support" >&5 $as_echo_n "checking for archiver @FILE support... " >&6; } if ${lt_cv_ar_at_file+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ar_at_file=no cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : echo conftest.$ac_objext > conftest.lst lt_ar_try='$AR $AR_FLAGS libconftest.a @conftest.lst >&5' { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 (eval $lt_ar_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if test 0 -eq "$ac_status"; then # Ensure the archiver fails upon bogus file names. rm -f conftest.$ac_objext libconftest.a { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$lt_ar_try\""; } >&5 (eval $lt_ar_try) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } if test 0 -ne "$ac_status"; then lt_cv_ar_at_file=@ fi fi rm -f conftest.* libconftest.a fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ar_at_file" >&5 $as_echo "$lt_cv_ar_at_file" >&6; } if test no = "$lt_cv_ar_at_file"; then archiver_list_spec= else archiver_list_spec=$lt_cv_ar_at_file fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}strip", so it can be a program name with args. set dummy ${ac_tool_prefix}strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$STRIP"; then ac_cv_prog_STRIP="$STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_STRIP="${ac_tool_prefix}strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi STRIP=$ac_cv_prog_STRIP if test -n "$STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $STRIP" >&5 $as_echo "$STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_STRIP"; then ac_ct_STRIP=$STRIP # Extract the first word of "strip", so it can be a program name with args. set dummy strip; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_STRIP+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_STRIP"; then ac_cv_prog_ac_ct_STRIP="$ac_ct_STRIP" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_STRIP="strip" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_STRIP=$ac_cv_prog_ac_ct_STRIP if test -n "$ac_ct_STRIP"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_STRIP" >&5 $as_echo "$ac_ct_STRIP" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_STRIP" = x; then STRIP=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac STRIP=$ac_ct_STRIP fi else STRIP="$ac_cv_prog_STRIP" fi test -z "$STRIP" && STRIP=: if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 $as_echo "$RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 $as_echo "$ac_ct_RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then RANLIB=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB fi else RANLIB="$ac_cv_prog_RANLIB" fi test -z "$RANLIB" && RANLIB=: # Determine commands to create old-style static archives. old_archive_cmds='$AR $AR_FLAGS $oldlib$oldobjs' old_postinstall_cmds='chmod 644 $oldlib' old_postuninstall_cmds= if test -n "$RANLIB"; then case $host_os in bitrig* | openbsd*) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB -t \$tool_oldlib" ;; *) old_postinstall_cmds="$old_postinstall_cmds~\$RANLIB \$tool_oldlib" ;; esac old_archive_cmds="$old_archive_cmds~\$RANLIB \$tool_oldlib" fi case $host_os in darwin*) lock_old_archive_extraction=yes ;; *) lock_old_archive_extraction=no ;; esac # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Check for command to grab the raw symbol name followed by C symbol from nm. { $as_echo "$as_me:${as_lineno-$LINENO}: checking command to parse $NM output from $compiler object" >&5 $as_echo_n "checking command to parse $NM output from $compiler object... " >&6; } if ${lt_cv_sys_global_symbol_pipe+:} false; then : $as_echo_n "(cached) " >&6 else # These are sane defaults that work on at least a few old systems. # [They come from Ultrix. What could be older than Ultrix?!! ;)] # Character class describing NM global symbol codes. symcode='[BCDEGRST]' # Regexp to match symbols that can be accessed directly from C. sympat='\([_A-Za-z][_A-Za-z0-9]*\)' # Define system-specific variables. case $host_os in aix*) symcode='[BCDT]' ;; cygwin* | mingw* | pw32* | cegcc*) symcode='[ABCDGISTW]' ;; hpux*) if test ia64 = "$host_cpu"; then symcode='[ABCDEGRST]' fi ;; irix* | nonstopux*) symcode='[BCDEGRST]' ;; osf*) symcode='[BCDEGQRST]' ;; solaris*) symcode='[BDRT]' ;; sco3.2v5*) symcode='[DT]' ;; sysv4.2uw2*) symcode='[DT]' ;; sysv5* | sco5v6* | unixware* | OpenUNIX*) symcode='[ABDT]' ;; sysv4) symcode='[DFNSTU]' ;; esac # If we're using GNU nm, then use its standard symbol codes. case `$NM -V 2>&1` in *GNU* | *'with BFD'*) symcode='[ABCDGIRSTW]' ;; esac if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Gets list of data symbols to import. lt_cv_sys_global_symbol_to_import="sed -n -e 's/^I .* \(.*\)$/\1/p'" # Adjust the below global symbol transforms to fixup imported variables. lt_cdecl_hook=" -e 's/^I .* \(.*\)$/extern __declspec(dllimport) char \1;/p'" lt_c_name_hook=" -e 's/^I .* \(.*\)$/ {\"\1\", (void *) 0},/p'" lt_c_name_lib_hook="\ -e 's/^I .* \(lib.*\)$/ {\"\1\", (void *) 0},/p'\ -e 's/^I .* \(.*\)$/ {\"lib\1\", (void *) 0},/p'" else # Disable hooks by default. lt_cv_sys_global_symbol_to_import= lt_cdecl_hook= lt_c_name_hook= lt_c_name_lib_hook= fi # Transform an extracted symbol line into a proper C declaration. # Some systems (esp. on ia64) link data and code symbols differently, # so use this general approach. lt_cv_sys_global_symbol_to_cdecl="sed -n"\ $lt_cdecl_hook\ " -e 's/^T .* \(.*\)$/extern int \1();/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/extern char \1;/p'" # Transform an extracted symbol line into symbol name and symbol address lt_cv_sys_global_symbol_to_c_name_address="sed -n"\ $lt_c_name_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/p'" # Transform an extracted symbol line into symbol name with lib prefix and # symbol address. lt_cv_sys_global_symbol_to_c_name_address_lib_prefix="sed -n"\ $lt_c_name_lib_hook\ " -e 's/^: \(.*\) .*$/ {\"\1\", (void *) 0},/p'"\ " -e 's/^$symcode$symcode* .* \(lib.*\)$/ {\"\1\", (void *) \&\1},/p'"\ " -e 's/^$symcode$symcode* .* \(.*\)$/ {\"lib\1\", (void *) \&\1},/p'" # Handle CRLF in mingw tool chain opt_cr= case $build_os in mingw*) opt_cr=`$ECHO 'x\{0,1\}' | tr x '\015'` # option cr in regexp ;; esac # Try without a prefix underscore, then with it. for ac_symprfx in "" "_"; do # Transform symcode, sympat, and symprfx into a raw symbol and a C symbol. symxfrm="\\1 $ac_symprfx\\2 \\2" # Write the raw and C identifiers. if test "$lt_cv_nm_interface" = "MS dumpbin"; then # Fake it for dumpbin and say T for any non-static function, # D for any global variable and I for any imported variable. # Also find C++ and __fastcall symbols from MSVC++, # which start with @ or ?. lt_cv_sys_global_symbol_pipe="$AWK '"\ " {last_section=section; section=\$ 3};"\ " /^COFF SYMBOL TABLE/{for(i in hide) delete hide[i]};"\ " /Section length .*#relocs.*(pick any)/{hide[last_section]=1};"\ " /^ *Symbol name *: /{split(\$ 0,sn,\":\"); si=substr(sn[2],2)};"\ " /^ *Type *: code/{print \"T\",si,substr(si,length(prfx))};"\ " /^ *Type *: data/{print \"I\",si,substr(si,length(prfx))};"\ " \$ 0!~/External *\|/{next};"\ " / 0+ UNDEF /{next}; / UNDEF \([^|]\)*()/{next};"\ " {if(hide[section]) next};"\ " {f=\"D\"}; \$ 0~/\(\).*\|/{f=\"T\"};"\ " {split(\$ 0,a,/\||\r/); split(a[2],s)};"\ " s[1]~/^[@?]/{print f,s[1],s[1]; next};"\ " s[1]~prfx {split(s[1],t,\"@\"); print f,t[1],substr(t[1],length(prfx))}"\ " ' prfx=^$ac_symprfx" else lt_cv_sys_global_symbol_pipe="sed -n -e 's/^.*[ ]\($symcode$symcode*\)[ ][ ]*$ac_symprfx$sympat$opt_cr$/$symxfrm/p'" fi lt_cv_sys_global_symbol_pipe="$lt_cv_sys_global_symbol_pipe | sed '/ __gnu_lto/d'" # Check to see that the pipe works correctly. pipe_works=no rm -f conftest* cat > conftest.$ac_ext <<_LT_EOF #ifdef __cplusplus extern "C" { #endif char nm_test_var; void nm_test_func(void); void nm_test_func(void){} #ifdef __cplusplus } #endif int main(){nm_test_var='a';nm_test_func();return(0);} _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Now try to grab the symbols. nlist=conftest.nm if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist\""; } >&5 (eval $NM conftest.$ac_objext \| "$lt_cv_sys_global_symbol_pipe" \> $nlist) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "$nlist"; then # Try sorting and uniquifying the output. if sort "$nlist" | uniq > "$nlist"T; then mv -f "$nlist"T "$nlist" else rm -f "$nlist"T fi # Make sure that we snagged all the symbols we need. if $GREP ' nm_test_var$' "$nlist" >/dev/null; then if $GREP ' nm_test_func$' "$nlist" >/dev/null; then cat <<_LT_EOF > conftest.$ac_ext /* Keep this code in sync between libtool.m4, ltmain, lt_system.h, and tests. */ #if defined _WIN32 || defined __CYGWIN__ || defined _WIN32_WCE /* DATA imports from DLLs on WIN32 can't be const, because runtime relocations are performed -- see ld's documentation on pseudo-relocs. */ # define LT_DLSYM_CONST #elif defined __osf__ /* This system does not cope well with relocations in const data. */ # define LT_DLSYM_CONST #else # define LT_DLSYM_CONST const #endif #ifdef __cplusplus extern "C" { #endif _LT_EOF # Now generate the symbol file. eval "$lt_cv_sys_global_symbol_to_cdecl"' < "$nlist" | $GREP -v main >> conftest.$ac_ext' cat <<_LT_EOF >> conftest.$ac_ext /* The mapping between symbol names and symbols. */ LT_DLSYM_CONST struct { const char *name; void *address; } lt__PROGRAM__LTX_preloaded_symbols[] = { { "@PROGRAM@", (void *) 0 }, _LT_EOF $SED "s/^$symcode$symcode* .* \(.*\)$/ {\"\1\", (void *) \&\1},/" < "$nlist" | $GREP -v main >> conftest.$ac_ext cat <<\_LT_EOF >> conftest.$ac_ext {0, (void *) 0} }; /* This works around a problem in FreeBSD linker */ #ifdef FREEBSD_WORKAROUND static const void *lt_preloaded_setup() { return lt__PROGRAM__LTX_preloaded_symbols; } #endif #ifdef __cplusplus } #endif _LT_EOF # Now try linking the two files. mv conftest.$ac_objext conftstm.$ac_objext lt_globsym_save_LIBS=$LIBS lt_globsym_save_CFLAGS=$CFLAGS LIBS=conftstm.$ac_objext CFLAGS="$CFLAGS$lt_prog_compiler_no_builtin_flag" if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s conftest$ac_exeext; then pipe_works=yes fi LIBS=$lt_globsym_save_LIBS CFLAGS=$lt_globsym_save_CFLAGS else echo "cannot find nm_test_func in $nlist" >&5 fi else echo "cannot find nm_test_var in $nlist" >&5 fi else echo "cannot run $lt_cv_sys_global_symbol_pipe" >&5 fi else echo "$progname: failed program was:" >&5 cat conftest.$ac_ext >&5 fi rm -rf conftest* conftst* # Do not use the global_symbol_pipe unless it works. if test yes = "$pipe_works"; then break else lt_cv_sys_global_symbol_pipe= fi done fi if test -z "$lt_cv_sys_global_symbol_pipe"; then lt_cv_sys_global_symbol_to_cdecl= fi if test -z "$lt_cv_sys_global_symbol_pipe$lt_cv_sys_global_symbol_to_cdecl"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 $as_echo "failed" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } fi # Response file support. if test "$lt_cv_nm_interface" = "MS dumpbin"; then nm_file_list_spec='@' elif $NM --help 2>/dev/null | grep '[@]FILE' >/dev/null; then nm_file_list_spec='@' fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sysroot" >&5 $as_echo_n "checking for sysroot... " >&6; } # Check whether --with-sysroot was given. if test "${with_sysroot+set}" = set; then : withval=$with_sysroot; else with_sysroot=no fi lt_sysroot= case $with_sysroot in #( yes) if test yes = "$GCC"; then lt_sysroot=`$CC --print-sysroot 2>/dev/null` fi ;; #( /*) lt_sysroot=`echo "$with_sysroot" | sed -e "$sed_quote_subst"` ;; #( no|'') ;; #( *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_sysroot" >&5 $as_echo "$with_sysroot" >&6; } as_fn_error $? "The sysroot must be an absolute path." "$LINENO" 5 ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: ${lt_sysroot:-no}" >&5 $as_echo "${lt_sysroot:-no}" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for a working dd" >&5 $as_echo_n "checking for a working dd... " >&6; } if ${ac_cv_path_lt_DD+:} false; then : $as_echo_n "(cached) " >&6 else printf 0123456789abcdef0123456789abcdef >conftest.i cat conftest.i conftest.i >conftest2.i : ${lt_DD:=$DD} if test -z "$lt_DD"; then ac_path_lt_DD_found=false # Loop through the user's path and test for each of PROGNAME-LIST as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_prog in dd; do for ac_exec_ext in '' $ac_executable_extensions; do ac_path_lt_DD="$as_dir/$ac_prog$ac_exec_ext" as_fn_executable_p "$ac_path_lt_DD" || continue if "$ac_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && ac_cv_path_lt_DD="$ac_path_lt_DD" ac_path_lt_DD_found=: fi $ac_path_lt_DD_found && break 3 done done done IFS=$as_save_IFS if test -z "$ac_cv_path_lt_DD"; then : fi else ac_cv_path_lt_DD=$lt_DD fi rm -f conftest.i conftest2.i conftest.out fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_path_lt_DD" >&5 $as_echo "$ac_cv_path_lt_DD" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to truncate binary pipes" >&5 $as_echo_n "checking how to truncate binary pipes... " >&6; } if ${lt_cv_truncate_bin+:} false; then : $as_echo_n "(cached) " >&6 else printf 0123456789abcdef0123456789abcdef >conftest.i cat conftest.i conftest.i >conftest2.i lt_cv_truncate_bin= if "$ac_cv_path_lt_DD" bs=32 count=1 conftest.out 2>/dev/null; then cmp -s conftest.i conftest.out \ && lt_cv_truncate_bin="$ac_cv_path_lt_DD bs=4096 count=1" fi rm -f conftest.i conftest2.i conftest.out test -z "$lt_cv_truncate_bin" && lt_cv_truncate_bin="$SED -e 4q" fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_truncate_bin" >&5 $as_echo "$lt_cv_truncate_bin" >&6; } # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. func_cc_basename () { for cc_temp in $*""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` } # Check whether --enable-libtool-lock was given. if test "${enable_libtool_lock+set}" = set; then : enableval=$enable_libtool_lock; fi test no = "$enable_libtool_lock" || enable_libtool_lock=yes # Some flags need to be propagated to the compiler or linker for good # libtool support. case $host in ia64-*-hpux*) # Find out what ABI is being produced by ac_compile, and set mode # options accordingly. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.$ac_objext` in *ELF-32*) HPUX_IA64_MODE=32 ;; *ELF-64*) HPUX_IA64_MODE=64 ;; esac fi rm -rf conftest* ;; *-*-irix6*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '#line '$LINENO' "configure"' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then if test yes = "$lt_cv_prog_gnu_ld"; then case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -melf32bsmip" ;; *N32*) LD="${LD-ld} -melf32bmipn32" ;; *64-bit*) LD="${LD-ld} -melf64bmip" ;; esac else case `/usr/bin/file conftest.$ac_objext` in *32-bit*) LD="${LD-ld} -32" ;; *N32*) LD="${LD-ld} -n32" ;; *64-bit*) LD="${LD-ld} -64" ;; esac fi fi rm -rf conftest* ;; mips64*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo '#line '$LINENO' "configure"' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then emul=elf case `/usr/bin/file conftest.$ac_objext` in *32-bit*) emul="${emul}32" ;; *64-bit*) emul="${emul}64" ;; esac case `/usr/bin/file conftest.$ac_objext` in *MSB*) emul="${emul}btsmip" ;; *LSB*) emul="${emul}ltsmip" ;; esac case `/usr/bin/file conftest.$ac_objext` in *N32*) emul="${emul}n32" ;; esac LD="${LD-ld} -m $emul" fi rm -rf conftest* ;; x86_64-*kfreebsd*-gnu|x86_64-*linux*|powerpc*-*linux*| \ s390*-*linux*|s390*-*tpf*|sparc*-*linux*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. Note that the listed cases only cover the # situations where additional linker options are needed (such as when # doing 32-bit compilation for a host where ld defaults to 64-bit, or # vice versa); the common cases where no linker options are needed do # not appear in the list. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *32-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_i386_fbsd" ;; x86_64-*linux*) case `/usr/bin/file conftest.o` in *x86-64*) LD="${LD-ld} -m elf32_x86_64" ;; *) LD="${LD-ld} -m elf_i386" ;; esac ;; powerpc64le-*linux*) LD="${LD-ld} -m elf32lppclinux" ;; powerpc64-*linux*) LD="${LD-ld} -m elf32ppclinux" ;; s390x-*linux*) LD="${LD-ld} -m elf_s390" ;; sparc64-*linux*) LD="${LD-ld} -m elf32_sparc" ;; esac ;; *64-bit*) case $host in x86_64-*kfreebsd*-gnu) LD="${LD-ld} -m elf_x86_64_fbsd" ;; x86_64-*linux*) LD="${LD-ld} -m elf_x86_64" ;; powerpcle-*linux*) LD="${LD-ld} -m elf64lppc" ;; powerpc-*linux*) LD="${LD-ld} -m elf64ppc" ;; s390*-*linux*|s390*-*tpf*) LD="${LD-ld} -m elf64_s390" ;; sparc*-*linux*) LD="${LD-ld} -m elf64_sparc" ;; esac ;; esac fi rm -rf conftest* ;; *-*-sco3.2v5*) # On SCO OpenServer 5, we need -belf to get full-featured binaries. SAVE_CFLAGS=$CFLAGS CFLAGS="$CFLAGS -belf" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the C compiler needs -belf" >&5 $as_echo_n "checking whether the C compiler needs -belf... " >&6; } if ${lt_cv_cc_needs_belf+:} false; then : $as_echo_n "(cached) " >&6 else ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_cc_needs_belf=yes else lt_cv_cc_needs_belf=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_cc_needs_belf" >&5 $as_echo "$lt_cv_cc_needs_belf" >&6; } if test yes != "$lt_cv_cc_needs_belf"; then # this is probably gcc 2.8.0, egcs 1.0 or newer; no need for -belf CFLAGS=$SAVE_CFLAGS fi ;; *-*solaris*) # Find out what ABI is being produced by ac_compile, and set linker # options accordingly. echo 'int i;' > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then case `/usr/bin/file conftest.o` in *64-bit*) case $lt_cv_prog_gnu_ld in yes*) case $host in i?86-*-solaris*|x86_64-*-solaris*) LD="${LD-ld} -m elf_x86_64" ;; sparc*-*-solaris*) LD="${LD-ld} -m elf64_sparc" ;; esac # GNU ld 2.21 introduced _sol2 emulations. Use them if available. if ${LD-ld} -V | grep _sol2 >/dev/null 2>&1; then LD=${LD-ld}_sol2 fi ;; *) if ${LD-ld} -64 -r -o conftest2.o conftest.o >/dev/null 2>&1; then LD="${LD-ld} -64" fi ;; esac ;; esac fi rm -rf conftest* ;; esac need_locks=$enable_libtool_lock if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}mt", so it can be a program name with args. set dummy ${ac_tool_prefix}mt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_MANIFEST_TOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$MANIFEST_TOOL"; then ac_cv_prog_MANIFEST_TOOL="$MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_MANIFEST_TOOL="${ac_tool_prefix}mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi MANIFEST_TOOL=$ac_cv_prog_MANIFEST_TOOL if test -n "$MANIFEST_TOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MANIFEST_TOOL" >&5 $as_echo "$MANIFEST_TOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_MANIFEST_TOOL"; then ac_ct_MANIFEST_TOOL=$MANIFEST_TOOL # Extract the first word of "mt", so it can be a program name with args. set dummy mt; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_MANIFEST_TOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_MANIFEST_TOOL"; then ac_cv_prog_ac_ct_MANIFEST_TOOL="$ac_ct_MANIFEST_TOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_MANIFEST_TOOL="mt" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_MANIFEST_TOOL=$ac_cv_prog_ac_ct_MANIFEST_TOOL if test -n "$ac_ct_MANIFEST_TOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_MANIFEST_TOOL" >&5 $as_echo "$ac_ct_MANIFEST_TOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_MANIFEST_TOOL" = x; then MANIFEST_TOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac MANIFEST_TOOL=$ac_ct_MANIFEST_TOOL fi else MANIFEST_TOOL="$ac_cv_prog_MANIFEST_TOOL" fi test -z "$MANIFEST_TOOL" && MANIFEST_TOOL=mt { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $MANIFEST_TOOL is a manifest tool" >&5 $as_echo_n "checking if $MANIFEST_TOOL is a manifest tool... " >&6; } if ${lt_cv_path_mainfest_tool+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_path_mainfest_tool=no echo "$as_me:$LINENO: $MANIFEST_TOOL '-?'" >&5 $MANIFEST_TOOL '-?' 2>conftest.err > conftest.out cat conftest.err >&5 if $GREP 'Manifest Tool' conftest.out > /dev/null; then lt_cv_path_mainfest_tool=yes fi rm -f conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_path_mainfest_tool" >&5 $as_echo "$lt_cv_path_mainfest_tool" >&6; } if test yes != "$lt_cv_path_mainfest_tool"; then MANIFEST_TOOL=: fi case $host_os in rhapsody* | darwin*) if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}dsymutil", so it can be a program name with args. set dummy ${ac_tool_prefix}dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_DSYMUTIL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$DSYMUTIL"; then ac_cv_prog_DSYMUTIL="$DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_DSYMUTIL="${ac_tool_prefix}dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi DSYMUTIL=$ac_cv_prog_DSYMUTIL if test -n "$DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $DSYMUTIL" >&5 $as_echo "$DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_DSYMUTIL"; then ac_ct_DSYMUTIL=$DSYMUTIL # Extract the first word of "dsymutil", so it can be a program name with args. set dummy dsymutil; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_DSYMUTIL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_DSYMUTIL"; then ac_cv_prog_ac_ct_DSYMUTIL="$ac_ct_DSYMUTIL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_DSYMUTIL="dsymutil" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_DSYMUTIL=$ac_cv_prog_ac_ct_DSYMUTIL if test -n "$ac_ct_DSYMUTIL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_DSYMUTIL" >&5 $as_echo "$ac_ct_DSYMUTIL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_DSYMUTIL" = x; then DSYMUTIL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac DSYMUTIL=$ac_ct_DSYMUTIL fi else DSYMUTIL="$ac_cv_prog_DSYMUTIL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}nmedit", so it can be a program name with args. set dummy ${ac_tool_prefix}nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_NMEDIT+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$NMEDIT"; then ac_cv_prog_NMEDIT="$NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_NMEDIT="${ac_tool_prefix}nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi NMEDIT=$ac_cv_prog_NMEDIT if test -n "$NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $NMEDIT" >&5 $as_echo "$NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_NMEDIT"; then ac_ct_NMEDIT=$NMEDIT # Extract the first word of "nmedit", so it can be a program name with args. set dummy nmedit; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_NMEDIT+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_NMEDIT"; then ac_cv_prog_ac_ct_NMEDIT="$ac_ct_NMEDIT" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_NMEDIT="nmedit" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_NMEDIT=$ac_cv_prog_ac_ct_NMEDIT if test -n "$ac_ct_NMEDIT"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_NMEDIT" >&5 $as_echo "$ac_ct_NMEDIT" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_NMEDIT" = x; then NMEDIT=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac NMEDIT=$ac_ct_NMEDIT fi else NMEDIT="$ac_cv_prog_NMEDIT" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}lipo", so it can be a program name with args. set dummy ${ac_tool_prefix}lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_LIPO+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$LIPO"; then ac_cv_prog_LIPO="$LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_LIPO="${ac_tool_prefix}lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi LIPO=$ac_cv_prog_LIPO if test -n "$LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $LIPO" >&5 $as_echo "$LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_LIPO"; then ac_ct_LIPO=$LIPO # Extract the first word of "lipo", so it can be a program name with args. set dummy lipo; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_LIPO+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_LIPO"; then ac_cv_prog_ac_ct_LIPO="$ac_ct_LIPO" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_LIPO="lipo" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_LIPO=$ac_cv_prog_ac_ct_LIPO if test -n "$ac_ct_LIPO"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_LIPO" >&5 $as_echo "$ac_ct_LIPO" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_LIPO" = x; then LIPO=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac LIPO=$ac_ct_LIPO fi else LIPO="$ac_cv_prog_LIPO" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool", so it can be a program name with args. set dummy ${ac_tool_prefix}otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL"; then ac_cv_prog_OTOOL="$OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL="${ac_tool_prefix}otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL=$ac_cv_prog_OTOOL if test -n "$OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL" >&5 $as_echo "$OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL"; then ac_ct_OTOOL=$OTOOL # Extract the first word of "otool", so it can be a program name with args. set dummy otool; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OTOOL+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL"; then ac_cv_prog_ac_ct_OTOOL="$ac_ct_OTOOL" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL="otool" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL=$ac_cv_prog_ac_ct_OTOOL if test -n "$ac_ct_OTOOL"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL" >&5 $as_echo "$ac_ct_OTOOL" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL" = x; then OTOOL=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL=$ac_ct_OTOOL fi else OTOOL="$ac_cv_prog_OTOOL" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}otool64", so it can be a program name with args. set dummy ${ac_tool_prefix}otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_OTOOL64+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$OTOOL64"; then ac_cv_prog_OTOOL64="$OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_OTOOL64="${ac_tool_prefix}otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi OTOOL64=$ac_cv_prog_OTOOL64 if test -n "$OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $OTOOL64" >&5 $as_echo "$OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_OTOOL64"; then ac_ct_OTOOL64=$OTOOL64 # Extract the first word of "otool64", so it can be a program name with args. set dummy otool64; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_OTOOL64+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_OTOOL64"; then ac_cv_prog_ac_ct_OTOOL64="$ac_ct_OTOOL64" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_OTOOL64="otool64" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_OTOOL64=$ac_cv_prog_ac_ct_OTOOL64 if test -n "$ac_ct_OTOOL64"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_OTOOL64" >&5 $as_echo "$ac_ct_OTOOL64" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_OTOOL64" = x; then OTOOL64=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac OTOOL64=$ac_ct_OTOOL64 fi else OTOOL64="$ac_cv_prog_OTOOL64" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -single_module linker flag" >&5 $as_echo_n "checking for -single_module linker flag... " >&6; } if ${lt_cv_apple_cc_single_mod+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_apple_cc_single_mod=no if test -z "$LT_MULTI_MODULE"; then # By default we will add the -single_module flag. You can override # by either setting the environment variable LT_MULTI_MODULE # non-empty at configure time, or by adding -multi_module to the # link flags. rm -rf libconftest.dylib* echo "int foo(void){return 1;}" > conftest.c echo "$LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c" >&5 $LTCC $LTCFLAGS $LDFLAGS -o libconftest.dylib \ -dynamiclib -Wl,-single_module conftest.c 2>conftest.err _lt_result=$? # If there is a non-empty error log, and "single_module" # appears in it, assume the flag caused a linker warning if test -s conftest.err && $GREP single_module conftest.err; then cat conftest.err >&5 # Otherwise, if the output was created with a 0 exit code from # the compiler, it worked. elif test -f libconftest.dylib && test 0 = "$_lt_result"; then lt_cv_apple_cc_single_mod=yes else cat conftest.err >&5 fi rm -rf libconftest.dylib* rm -f conftest.* fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_apple_cc_single_mod" >&5 $as_echo "$lt_cv_apple_cc_single_mod" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -exported_symbols_list linker flag" >&5 $as_echo_n "checking for -exported_symbols_list linker flag... " >&6; } if ${lt_cv_ld_exported_symbols_list+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_exported_symbols_list=no save_LDFLAGS=$LDFLAGS echo "_main" > conftest.sym LDFLAGS="$LDFLAGS -Wl,-exported_symbols_list,conftest.sym" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_ld_exported_symbols_list=yes else lt_cv_ld_exported_symbols_list=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_exported_symbols_list" >&5 $as_echo "$lt_cv_ld_exported_symbols_list" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for -force_load linker flag" >&5 $as_echo_n "checking for -force_load linker flag... " >&6; } if ${lt_cv_ld_force_load+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_ld_force_load=no cat > conftest.c << _LT_EOF int forced_loaded() { return 2;} _LT_EOF echo "$LTCC $LTCFLAGS -c -o conftest.o conftest.c" >&5 $LTCC $LTCFLAGS -c -o conftest.o conftest.c 2>&5 echo "$AR cru libconftest.a conftest.o" >&5 $AR cru libconftest.a conftest.o 2>&5 echo "$RANLIB libconftest.a" >&5 $RANLIB libconftest.a 2>&5 cat > conftest.c << _LT_EOF int main() { return 0;} _LT_EOF echo "$LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a" >&5 $LTCC $LTCFLAGS $LDFLAGS -o conftest conftest.c -Wl,-force_load,./libconftest.a 2>conftest.err _lt_result=$? if test -s conftest.err && $GREP force_load conftest.err; then cat conftest.err >&5 elif test -f conftest && test 0 = "$_lt_result" && $GREP forced_load conftest >/dev/null 2>&1; then lt_cv_ld_force_load=yes else cat conftest.err >&5 fi rm -f conftest.err libconftest.a conftest conftest.c rm -rf conftest.dSYM fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_ld_force_load" >&5 $as_echo "$lt_cv_ld_force_load" >&6; } case $host_os in rhapsody* | darwin1.[012]) _lt_dar_allow_undefined='$wl-undefined ${wl}suppress' ;; darwin1.*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; darwin*) # darwin 5.x on # if running on 10.5 or later, the deployment target defaults # to the OS version, if on x86, and 10.4, the deployment # target defaults to 10.4. Don't you love it? case ${MACOSX_DEPLOYMENT_TARGET-10.0},$host in 10.0,*86*-darwin8*|10.0,*-darwin[91]*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; 10.[012][,.]*) _lt_dar_allow_undefined='$wl-flat_namespace $wl-undefined ${wl}suppress' ;; 10.*) _lt_dar_allow_undefined='$wl-undefined ${wl}dynamic_lookup' ;; esac ;; esac if test yes = "$lt_cv_apple_cc_single_mod"; then _lt_dar_single_mod='$single_module' fi if test yes = "$lt_cv_ld_exported_symbols_list"; then _lt_dar_export_syms=' $wl-exported_symbols_list,$output_objdir/$libname-symbols.expsym' else _lt_dar_export_syms='~$NMEDIT -s $output_objdir/$libname-symbols.expsym $lib' fi if test : != "$DSYMUTIL" && test no = "$lt_cv_ld_force_load"; then _lt_dsymutil='~$DSYMUTIL $lib || :' else _lt_dsymutil= fi ;; esac # func_munge_path_list VARIABLE PATH # ----------------------------------- # VARIABLE is name of variable containing _space_ separated list of # directories to be munged by the contents of PATH, which is string # having a format: # "DIR[:DIR]:" # string "DIR[ DIR]" will be prepended to VARIABLE # ":DIR[:DIR]" # string "DIR[ DIR]" will be appended to VARIABLE # "DIRP[:DIRP]::[DIRA:]DIRA" # string "DIRP[ DIRP]" will be prepended to VARIABLE and string # "DIRA[ DIRA]" will be appended to VARIABLE # "DIR[:DIR]" # VARIABLE will be replaced by "DIR[ DIR]" func_munge_path_list () { case x$2 in x) ;; *:) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" ;; x:*) eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" ;; *::*) eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" ;; *) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" ;; esac } ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C preprocessor" >&5 $as_echo_n "checking how to run the C preprocessor... " >&6; } # On Suns, sometimes $CPP names a directory. if test -n "$CPP" && test -d "$CPP"; then CPP= fi if test -z "$CPP"; then if ${ac_cv_prog_CPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CPP needs to be expanded for CPP in "$CC -E" "$CC -E -traditional-cpp" "/lib/cpp" do ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CPP=$CPP fi CPP=$ac_cv_prog_CPP else ac_cv_prog_CPP=$CPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CPP" >&5 $as_echo "$CPP" >&6; } ac_preproc_ok=false for ac_c_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_c_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C preprocessor \"$CPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if ${ac_cv_header_stdc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_c_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_c_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi # On IRIX 5.3, sys/types and inttypes.h are conflicting. for ac_header in sys/types.h sys/stat.h stdlib.h string.h memory.h strings.h \ inttypes.h stdint.h unistd.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_c_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done for ac_header in dlfcn.h do : ac_fn_c_check_header_compile "$LINENO" "dlfcn.h" "ac_cv_header_dlfcn_h" "$ac_includes_default " if test "x$ac_cv_header_dlfcn_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_DLFCN_H 1 _ACEOF fi done func_stripname_cnf () { case $2 in .*) func_stripname_result=`$ECHO "$3" | $SED "s%^$1%%; s%\\\\$2\$%%"`;; *) func_stripname_result=`$ECHO "$3" | $SED "s%^$1%%; s%$2\$%%"`;; esac } # func_stripname_cnf # Set options enable_dlopen=no enable_win32_dll=no # Check whether --enable-shared was given. if test "${enable_shared+set}" = set; then : enableval=$enable_shared; p=${PACKAGE-default} case $enableval in yes) enable_shared=yes ;; no) enable_shared=no ;; *) enable_shared=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_shared=yes fi done IFS=$lt_save_ifs ;; esac else enable_shared=yes fi # Check whether --enable-static was given. if test "${enable_static+set}" = set; then : enableval=$enable_static; p=${PACKAGE-default} case $enableval in yes) enable_static=yes ;; no) enable_static=no ;; *) enable_static=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_static=yes fi done IFS=$lt_save_ifs ;; esac else enable_static=yes fi # Check whether --with-pic was given. if test "${with_pic+set}" = set; then : withval=$with_pic; lt_p=${PACKAGE-default} case $withval in yes|no) pic_mode=$withval ;; *) pic_mode=default # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for lt_pkg in $withval; do IFS=$lt_save_ifs if test "X$lt_pkg" = "X$lt_p"; then pic_mode=yes fi done IFS=$lt_save_ifs ;; esac else pic_mode=default fi # Check whether --enable-fast-install was given. if test "${enable_fast_install+set}" = set; then : enableval=$enable_fast_install; p=${PACKAGE-default} case $enableval in yes) enable_fast_install=yes ;; no) enable_fast_install=no ;; *) enable_fast_install=no # Look at the argument we got. We use all the common list separators. lt_save_ifs=$IFS; IFS=$IFS$PATH_SEPARATOR, for pkg in $enableval; do IFS=$lt_save_ifs if test "X$pkg" = "X$p"; then enable_fast_install=yes fi done IFS=$lt_save_ifs ;; esac else enable_fast_install=yes fi shared_archive_member_spec= case $host,$enable_shared in power*-*-aix[5-9]*,yes) { $as_echo "$as_me:${as_lineno-$LINENO}: checking which variant of shared library versioning to provide" >&5 $as_echo_n "checking which variant of shared library versioning to provide... " >&6; } # Check whether --with-aix-soname was given. if test "${with_aix_soname+set}" = set; then : withval=$with_aix_soname; case $withval in aix|svr4|both) ;; *) as_fn_error $? "Unknown argument to --with-aix-soname" "$LINENO" 5 ;; esac lt_cv_with_aix_soname=$with_aix_soname else if ${lt_cv_with_aix_soname+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_with_aix_soname=aix fi with_aix_soname=$lt_cv_with_aix_soname fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $with_aix_soname" >&5 $as_echo "$with_aix_soname" >&6; } if test aix != "$with_aix_soname"; then # For the AIX way of multilib, we name the shared archive member # based on the bitwidth used, traditionally 'shr.o' or 'shr_64.o', # and 'shr.imp' or 'shr_64.imp', respectively, for the Import File. # Even when GNU compilers ignore OBJECT_MODE but need '-maix64' flag, # the AIX toolchain works better with OBJECT_MODE set (default 32). if test 64 = "${OBJECT_MODE-32}"; then shared_archive_member_spec=shr_64 else shared_archive_member_spec=shr fi fi ;; *) with_aix_soname=aix ;; esac # This can be used to rebuild libtool when needed LIBTOOL_DEPS=$ltmain # Always use our own libtool. LIBTOOL='$(SHELL) $(top_builddir)/libtool' test -z "$LN_S" && LN_S="ln -s" if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for objdir" >&5 $as_echo_n "checking for objdir... " >&6; } if ${lt_cv_objdir+:} false; then : $as_echo_n "(cached) " >&6 else rm -f .libs 2>/dev/null mkdir .libs 2>/dev/null if test -d .libs; then lt_cv_objdir=.libs else # MS-DOS does not allow filenames that begin with a dot. lt_cv_objdir=_libs fi rmdir .libs 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_objdir" >&5 $as_echo "$lt_cv_objdir" >&6; } objdir=$lt_cv_objdir cat >>confdefs.h <<_ACEOF #define LT_OBJDIR "$lt_cv_objdir/" _ACEOF case $host_os in aix3*) # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi ;; esac # Global variables: ofile=libtool can_build_shared=yes # All known linkers require a '.a' archive for static linking (except MSVC, # which needs '.lib'). libext=a with_gnu_ld=$lt_cv_prog_gnu_ld old_CC=$CC old_CFLAGS=$CFLAGS # Set sane defaults for various variables test -z "$CC" && CC=cc test -z "$LTCC" && LTCC=$CC test -z "$LTCFLAGS" && LTCFLAGS=$CFLAGS test -z "$LD" && LD=ld test -z "$ac_objext" && ac_objext=o func_cc_basename $compiler cc_basename=$func_cc_basename_result # Only perform the check for file, if the check method requires it test -z "$MAGIC_CMD" && MAGIC_CMD=file case $deplibs_check_method in file_magic*) if test "$file_magic_cmd" = '$MAGIC_CMD'; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ${ac_tool_prefix}file" >&5 $as_echo_n "checking for ${ac_tool_prefix}file... " >&6; } if ${lt_cv_path_MAGIC_CMD+:} false; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD=$MAGIC_CMD lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/${ac_tool_prefix}file"; then lt_cv_path_MAGIC_CMD=$ac_dir/"${ac_tool_prefix}file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD=$lt_cv_path_MAGIC_CMD if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS=$lt_save_ifs MAGIC_CMD=$lt_save_MAGIC_CMD ;; esac fi MAGIC_CMD=$lt_cv_path_MAGIC_CMD if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test -z "$lt_cv_path_MAGIC_CMD"; then if test -n "$ac_tool_prefix"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for file" >&5 $as_echo_n "checking for file... " >&6; } if ${lt_cv_path_MAGIC_CMD+:} false; then : $as_echo_n "(cached) " >&6 else case $MAGIC_CMD in [\\/*] | ?:[\\/]*) lt_cv_path_MAGIC_CMD=$MAGIC_CMD # Let the user override the test with a path. ;; *) lt_save_MAGIC_CMD=$MAGIC_CMD lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR ac_dummy="/usr/bin$PATH_SEPARATOR$PATH" for ac_dir in $ac_dummy; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/file"; then lt_cv_path_MAGIC_CMD=$ac_dir/"file" if test -n "$file_magic_test_file"; then case $deplibs_check_method in "file_magic "*) file_magic_regex=`expr "$deplibs_check_method" : "file_magic \(.*\)"` MAGIC_CMD=$lt_cv_path_MAGIC_CMD if eval $file_magic_cmd \$file_magic_test_file 2> /dev/null | $EGREP "$file_magic_regex" > /dev/null; then : else cat <<_LT_EOF 1>&2 *** Warning: the command libtool uses to detect shared libraries, *** $file_magic_cmd, produces output that libtool cannot recognize. *** The result is that libtool may fail to recognize shared libraries *** as such. This will affect the creation of libtool libraries that *** depend on shared libraries, but programs linked with such libtool *** libraries will work regardless of this problem. Nevertheless, you *** may want to report the problem to your system manager and/or to *** bug-libtool@gnu.org _LT_EOF fi ;; esac fi break fi done IFS=$lt_save_ifs MAGIC_CMD=$lt_save_MAGIC_CMD ;; esac fi MAGIC_CMD=$lt_cv_path_MAGIC_CMD if test -n "$MAGIC_CMD"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $MAGIC_CMD" >&5 $as_echo "$MAGIC_CMD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else MAGIC_CMD=: fi fi fi ;; esac # Use C for the default configuration in the libtool script lt_save_CC=$CC ac_ext=c ac_cpp='$CPP $CPPFLAGS' ac_compile='$CC -c $CFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CC -o conftest$ac_exeext $CFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_c_compiler_gnu # Source file extension for C test sources. ac_ext=c # Object file extension for compiled C test sources. objext=o objext=$objext # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(){return(0);}' # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # Save the default compiler, since it gets overwritten when the other # tags are being tested, and _LT_TAGVAR(compiler, []) is a NOP. compiler_DEFAULT=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* if test -n "$compiler"; then lt_prog_compiler_no_builtin_flag= if test yes = "$GCC"; then case $cc_basename in nvcc*) lt_prog_compiler_no_builtin_flag=' -Xcompiler -fno-builtin' ;; *) lt_prog_compiler_no_builtin_flag=' -fno-builtin' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -fno-rtti -fno-exceptions" >&5 $as_echo_n "checking if $compiler supports -fno-rtti -fno-exceptions... " >&6; } if ${lt_cv_prog_compiler_rtti_exceptions+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_rtti_exceptions=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-fno-rtti -fno-exceptions" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_rtti_exceptions=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_rtti_exceptions" >&5 $as_echo "$lt_cv_prog_compiler_rtti_exceptions" >&6; } if test yes = "$lt_cv_prog_compiler_rtti_exceptions"; then lt_prog_compiler_no_builtin_flag="$lt_prog_compiler_no_builtin_flag -fno-rtti -fno-exceptions" else : fi fi lt_prog_compiler_wl= lt_prog_compiler_pic= lt_prog_compiler_static= if test yes = "$GCC"; then lt_prog_compiler_wl='-Wl,' lt_prog_compiler_static='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' fi lt_prog_compiler_pic='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. lt_prog_compiler_pic='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic='-DDLL_EXPORT' case $host_os in os2*) lt_prog_compiler_static='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic='-fno-common' ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. lt_prog_compiler_static= ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) # +Z the default ;; *) lt_prog_compiler_pic='-fPIC' ;; esac ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; msdosdjgpp*) # Just because we use GCC doesn't mean we suddenly get shared libraries # on systems that don't support them. lt_prog_compiler_can_build_shared=no enable_shared=no ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic=-Kconform_pic fi ;; *) lt_prog_compiler_pic='-fPIC' ;; esac case $cc_basename in nvcc*) # Cuda Compiler Driver 2.2 lt_prog_compiler_wl='-Xlinker ' if test -n "$lt_prog_compiler_pic"; then lt_prog_compiler_pic="-Xcompiler $lt_prog_compiler_pic" fi ;; esac else # PORTME Check for flag to pass linker flags through the system compiler. case $host_os in aix*) lt_prog_compiler_wl='-Wl,' if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static='-Bstatic' else lt_prog_compiler_static='-bnso -bI:/lib/syscalls.exp' fi ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic='-fno-common' case $cc_basename in nagfor*) # NAG Fortran compiler lt_prog_compiler_wl='-Wl,-Wl,,' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; esac ;; mingw* | cygwin* | pw32* | os2* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic='-DDLL_EXPORT' case $host_os in os2*) lt_prog_compiler_static='$wl-static' ;; esac ;; hpux9* | hpux10* | hpux11*) lt_prog_compiler_wl='-Wl,' # PIC is the default for IA64 HP-UX and 64-bit HP-UX, but # not for PA HP-UX. case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic='+Z' ;; esac # Is there a better lt_prog_compiler_static that works with the bundled CC? lt_prog_compiler_static='$wl-a ${wl}archive' ;; irix5* | irix6* | nonstopux*) lt_prog_compiler_wl='-Wl,' # PIC (with -KPIC) is the default. lt_prog_compiler_static='-non_shared' ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in # old Intel for x86_64, which still supported -KPIC. ecc*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-static' ;; # icc used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. icc* | ifort*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; # Lahey Fortran 8.1. lf95*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='--shared' lt_prog_compiler_static='--static' ;; nagfor*) # NAG Fortran compiler lt_prog_compiler_wl='-Wl,-Wl,,' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; tcc*) # Fabrice Bellard et al's Tiny C Compiler lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; pgcc* | pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group compilers (*not* the Pentium gcc compiler, # which looks to be a dead project) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fpic' lt_prog_compiler_static='-Bstatic' ;; ccc*) lt_prog_compiler_wl='-Wl,' # All Alpha code is PIC. lt_prog_compiler_static='-non_shared' ;; xl* | bgxl* | bgf* | mpixl*) # IBM XL C 8.0/Fortran 10.1, 11.1 on PPC and BlueGene lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-qpic' lt_prog_compiler_static='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ Ceres\ Fortran* | *Sun*Fortran*\ [1-7].* | *Sun*Fortran*\ 8.[0-3]*) # Sun Fortran 8.3 passes all unrecognized flags to the linker lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='' ;; *Sun\ F* | *Sun*Fortran*) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='-Qoption ld ' ;; *Sun\ C*) # Sun C 5.9 lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' lt_prog_compiler_wl='-Wl,' ;; *Intel*\ [CF]*Compiler*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fPIC' lt_prog_compiler_static='-static' ;; *Portland\ Group*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-fpic' lt_prog_compiler_static='-Bstatic' ;; esac ;; esac ;; newsos6) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; *nto* | *qnx*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic='-fPIC -shared' ;; osf3* | osf4* | osf5*) lt_prog_compiler_wl='-Wl,' # All OSF/1 code is PIC. lt_prog_compiler_static='-non_shared' ;; rdos*) lt_prog_compiler_static='-non_shared' ;; solaris*) lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' case $cc_basename in f77* | f90* | f95* | sunf77* | sunf90* | sunf95*) lt_prog_compiler_wl='-Qoption ld ';; *) lt_prog_compiler_wl='-Wl,';; esac ;; sunos4*) lt_prog_compiler_wl='-Qoption ld ' lt_prog_compiler_pic='-PIC' lt_prog_compiler_static='-Bstatic' ;; sysv4 | sysv4.2uw2* | sysv4.3*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic='-Kconform_pic' lt_prog_compiler_static='-Bstatic' fi ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_pic='-KPIC' lt_prog_compiler_static='-Bstatic' ;; unicos*) lt_prog_compiler_wl='-Wl,' lt_prog_compiler_can_build_shared=no ;; uts4*) lt_prog_compiler_pic='-pic' lt_prog_compiler_static='-Bstatic' ;; *) lt_prog_compiler_can_build_shared=no ;; esac fi case $host_os in # For platforms that do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic= ;; *) lt_prog_compiler_pic="$lt_prog_compiler_pic -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } if ${lt_cv_prog_compiler_pic+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic=$lt_prog_compiler_pic fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic" >&5 $as_echo "$lt_cv_prog_compiler_pic" >&6; } lt_prog_compiler_pic=$lt_cv_prog_compiler_pic # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic works... " >&6; } if ${lt_cv_prog_compiler_pic_works+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic -DPIC" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works" >&5 $as_echo "$lt_cv_prog_compiler_pic_works" >&6; } if test yes = "$lt_cv_prog_compiler_pic_works"; then case $lt_prog_compiler_pic in "" | " "*) ;; *) lt_prog_compiler_pic=" $lt_prog_compiler_pic" ;; esac else lt_prog_compiler_pic= lt_prog_compiler_can_build_shared=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl eval lt_tmp_static_flag=\"$lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if ${lt_cv_prog_compiler_static_works+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works=yes fi else lt_cv_prog_compiler_static_works=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works" >&5 $as_echo "$lt_cv_prog_compiler_static_works" >&6; } if test yes = "$lt_cv_prog_compiler_static_works"; then : else lt_prog_compiler_static= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o" >&5 $as_echo "$lt_cv_prog_compiler_c_o" >&6; } hard_links=nottested if test no = "$lt_cv_prog_compiler_c_o" && test no != "$need_locks"; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test no = "$hard_links"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } runpath_var= allow_undefined_flag= always_export_symbols=no archive_cmds= archive_expsym_cmds= compiler_needs_object=no enable_shared_with_static_runtimes=no export_dynamic_flag_spec= export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' hardcode_automatic=no hardcode_direct=no hardcode_direct_absolute=no hardcode_libdir_flag_spec= hardcode_libdir_separator= hardcode_minus_L=no hardcode_shlibpath_var=unsupported inherit_rpath=no link_all_deplibs=unknown module_cmds= module_expsym_cmds= old_archive_from_new_cmds= old_archive_from_expsyms_cmds= thread_safe_flag_spec= whole_archive_flag_spec= # include_expsyms should be a list of space-separated symbols to be *always* # included in the symbol list include_expsyms= # exclude_expsyms can be an extended regexp of symbols to exclude # it will be wrapped by ' (' and ')$', so one must not match beginning or # end of line. Example: 'a|bc|.*d.*' will exclude the symbols 'a' and 'bc', # as well as any symbol that contains 'd'. exclude_expsyms='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' # Although _GLOBAL_OFFSET_TABLE_ is a valid symbol C name, most a.out # platforms (ab)use it in PIC code, but their linkers get confused if # the symbol is explicitly referenced. Since portable code cannot # rely on this symbol name, it's probably fine to never include it in # preloaded symbol tables. # Exclude shared library initialization/finalization symbols. extract_expsyms_cmds= case $host_os in cygwin* | mingw* | pw32* | cegcc*) # FIXME: the MSVC++ port hasn't been tested in a loooong time # When not using gcc, we currently assume that we are using # Microsoft Visual C++. if test yes != "$GCC"; then with_gnu_ld=no fi ;; interix*) # we just hope/assume this is gcc and not c89 (= MSVC++) with_gnu_ld=yes ;; openbsd* | bitrig*) with_gnu_ld=no ;; linux* | k*bsd*-gnu | gnu*) link_all_deplibs=no ;; esac ld_shlibs=yes # On some targets, GNU ld is compatible enough with the native linker # that we're better off using the native interface for both. lt_use_gnu_ld_interface=no if test yes = "$with_gnu_ld"; then case $host_os in aix*) # The AIX port of GNU ld has always aspired to compatibility # with the native linker. However, as the warning in the GNU ld # block says, versions before 2.19.5* couldn't really create working # shared libraries, regardless of the interface used. case `$LD -v 2>&1` in *\ \(GNU\ Binutils\)\ 2.19.5*) ;; *\ \(GNU\ Binutils\)\ 2.[2-9]*) ;; *\ \(GNU\ Binutils\)\ [3-9]*) ;; *) lt_use_gnu_ld_interface=yes ;; esac ;; *) lt_use_gnu_ld_interface=yes ;; esac fi if test yes = "$lt_use_gnu_ld_interface"; then # If archive_cmds runs LD, not CC, wlarc should be empty wlarc='$wl' # Set some defaults for GNU ld with shared library support. These # are reset later if shared libraries are not supported. Putting them # here allows them to be overridden if necessary. runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' export_dynamic_flag_spec='$wl--export-dynamic' # ancient GNU ld didn't support --whole-archive et. al. if $LD --help 2>&1 | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else whole_archive_flag_spec= fi supports_anon_versioning=no case `$LD -v | $SED -e 's/(^)\+)\s\+//' 2>&1` in *GNU\ gold*) supports_anon_versioning=yes ;; *\ [01].* | *\ 2.[0-9].* | *\ 2.10.*) ;; # catch versions < 2.11 *\ 2.11.93.0.2\ *) supports_anon_versioning=yes ;; # RH7.3 ... *\ 2.11.92.0.12\ *) supports_anon_versioning=yes ;; # Mandrake 8.2 ... *\ 2.11.*) ;; # other 2.11 versions *) supports_anon_versioning=yes ;; esac # See if GNU ld supports shared libraries. case $host_os in aix[3-9]*) # On AIX/PPC, the GNU linker is very broken if test ia64 != "$host_cpu"; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: the GNU linker, at least up to release 2.19, is reported *** to be unable to reliably create shared libraries on AIX. *** Therefore, libtool is disabling shared libraries support. If you *** really care for shared libraries, you may want to install binutils *** 2.20 or above, or modify your PATH so that a non-GNU linker is found. *** You will then need to restart the configuration process. _LT_EOF fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else ld_shlibs=no fi ;; cygwin* | mingw* | pw32* | cegcc*) # _LT_TAGVAR(hardcode_libdir_flag_spec, ) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec='-L$libdir' export_dynamic_flag_spec='$wl--export-all-symbols' allow_undefined_flag=unsupported always_export_symbols=no enable_shared_with_static_runtimes=yes export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared $output_objdir/$soname.def $libobjs $deplibs $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs=no fi ;; haiku*) archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' link_all_deplibs=yes ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes allow_undefined_flag=unsupported shrext_cmds=.dll archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' enable_shared_with_static_runtimes=yes ;; interix[3-9]*) hardcode_direct=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='$wl-rpath,$libdir' export_dynamic_flag_spec='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; gnu* | linux* | tpf* | k*bsd*-gnu | kopensolaris*-gnu) tmp_diet=no if test linux-dietlibc = "$host_os"; then case $cc_basename in diet\ *) tmp_diet=yes;; # linux-dietlibc with static linking (!diet-dyn) esac fi if $LD --help 2>&1 | $EGREP ': supported targets:.* elf' > /dev/null \ && test no = "$tmp_diet" then tmp_addflag=' $pic_flag' tmp_sharedflag='-shared' case $cc_basename,$host_cpu in pgcc*) # Portland Group C compiler whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag' ;; pgf77* | pgf90* | pgf95* | pgfortran*) # Portland Group f77 and f90 compilers whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' tmp_addflag=' $pic_flag -Mnomain' ;; ecc*,ia64* | icc*,ia64*) # Intel C compiler on ia64 tmp_addflag=' -i_dynamic' ;; efc*,ia64* | ifort*,ia64*) # Intel Fortran compiler on ia64 tmp_addflag=' -i_dynamic -nofor_main' ;; ifc* | ifort*) # Intel Fortran compiler tmp_addflag=' -nofor_main' ;; lf95*) # Lahey Fortran 8.1 whole_archive_flag_spec= tmp_sharedflag='--shared' ;; nagfor*) # NAGFOR 5.3 tmp_sharedflag='-Wl,-shared' ;; xl[cC]* | bgxl[cC]* | mpixl[cC]*) # IBM XL C 8.0 on PPC (deal with xlf below) tmp_sharedflag='-qmkshrobj' tmp_addflag= ;; nvcc*) # Cuda Compiler Driver 2.2 whole_archive_flag_spec='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' compiler_needs_object=yes ;; esac case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C 5.9 whole_archive_flag_spec='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' compiler_needs_object=yes tmp_sharedflag='-G' ;; *Sun\ F*) # Sun Fortran 8.3 tmp_sharedflag='-G' ;; esac archive_cmds='$CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC '"$tmp_sharedflag""$tmp_addflag"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi case $cc_basename in tcc*) export_dynamic_flag_spec='-rdynamic' ;; xlf* | bgf* | bgxlf* | mpixlf*) # IBM XL Fortran 10.1 on PPC cannot create shared libs itself whole_archive_flag_spec='--whole-archive$convenience --no-whole-archive' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' archive_cmds='$LD -shared $libobjs $deplibs $linker_flags -soname $soname -o $lib' if test yes = "$supports_anon_versioning"; then archive_expsym_cmds='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $LD -shared $libobjs $deplibs $linker_flags -soname $soname -version-script $output_objdir/$libname.ver -o $lib' fi ;; esac else ld_shlibs=no fi ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable $libobjs $deplibs $linker_flags -o $lib' wlarc= else archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' fi ;; solaris*) if $LD -v 2>&1 | $GREP 'BFD 2\.8' > /dev/null; then ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: The releases 2.8.* of the GNU linker cannot reliably *** create shared libraries on Solaris systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.9.1 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF elif $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX*) case `$LD -v 2>&1` in *\ [01].* | *\ 2.[0-9].* | *\ 2.1[0-5].*) ld_shlibs=no cat <<_LT_EOF 1>&2 *** Warning: Releases of the GNU linker prior to 2.16.91.0.3 cannot *** reliably create shared libraries on SCO systems. Therefore, libtool *** is disabling shared libraries support. We urge you to upgrade GNU *** binutils to release 2.16.91.0.3 or newer. Another option is to modify *** your PATH or compiler configuration so that the native linker is *** used, and then restart. _LT_EOF ;; *) # For security reasons, it is highly recommended that you always # use absolute paths for naming shared libraries, and exclude the # DT_RUNPATH tag from executables and libraries. But doing so # requires that you compile everything twice, which is a pain. if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac ;; sunos4*) archive_cmds='$LD -assert pure-text -Bshareable -o $lib $libobjs $deplibs $linker_flags' wlarc= hardcode_direct=yes hardcode_shlibpath_var=no ;; *) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' else ld_shlibs=no fi ;; esac if test no = "$ld_shlibs"; then runpath_var= hardcode_libdir_flag_spec= export_dynamic_flag_spec= whole_archive_flag_spec= fi else # PORTME fill in a description of your system's linker (not GNU ld) case $host_os in aix3*) allow_undefined_flag=unsupported always_export_symbols=yes archive_expsym_cmds='$LD -o $output_objdir/$soname $libobjs $deplibs $linker_flags -bE:$export_symbols -T512 -H512 -bM:SRE~$AR $AR_FLAGS $lib $output_objdir/$soname' # Note: this linker hardcodes the directories in LIBPATH if there # are no directories specified by -L. hardcode_minus_L=yes if test yes = "$GCC" && test -z "$lt_prog_compiler_static"; then # Neither direct hardcoding nor static linking is supported with a # broken collect2. hardcode_direct=unsupported fi ;; aix[4-9]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else export_symbols_cmds='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do if (test x-brtl = "x$ld_flag" || test x-Wl,-brtl = "x$ld_flag"); then aix_use_runtimelinking=yes break fi done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds='' hardcode_direct=yes hardcode_direct_absolute=yes hardcode_libdir_separator=':' link_all_deplibs=yes file_list_spec='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # traditional, no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. hardcode_direct=no hardcode_direct_absolute=no ;; esac if test yes = "$GCC"; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L=yes hardcode_libdir_flag_spec='-L$libdir' hardcode_libdir_separator= fi ;; esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag="$shared_flag "'$wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi export_dynamic_flag_spec='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to export. always_export_symbols=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. allow_undefined_flag='-berok' # Determine the default libpath from the value encoded in an # empty executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath_+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath_ fi hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then hardcode_libdir_flag_spec='$wl-R $libdir:/usr/lib:/lib' allow_undefined_flag="-z nodefs" archive_expsym_cmds="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath_+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath_=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath_"; then lt_cv_aix_libpath_=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath_ fi hardcode_libdir_flag_spec='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag=' $wl-bernotok' allow_undefined_flag=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. whole_archive_flag_spec='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec='$convenience' fi archive_cmds_need_lc=yes archive_expsym_cmds='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared libraries. archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then archive_expsym_cmds="$archive_expsym_cmds"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols archive_expsym_cmds="$archive_expsym_cmds"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi archive_expsym_cmds="$archive_expsym_cmds"'~$RM -r $output_objdir/$realname.d' fi fi ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds='' ;; m68k) archive_cmds='$RM $output_objdir/a2ixlibrary.data~$ECHO "#define NAME $libname" > $output_objdir/a2ixlibrary.data~$ECHO "#define LIBRARY_ID 1" >> $output_objdir/a2ixlibrary.data~$ECHO "#define VERSION $major" >> $output_objdir/a2ixlibrary.data~$ECHO "#define REVISION $revision" >> $output_objdir/a2ixlibrary.data~$AR $AR_FLAGS $lib $libobjs~$RANLIB $lib~(cd $output_objdir && a2ixlibrary -32)' hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes ;; esac ;; bsdi[45]*) export_dynamic_flag_spec=-rdynamic ;; cygwin* | mingw* | pw32* | cegcc*) # When not using gcc, we currently assume that we are using # Microsoft Visual C++. # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. case $cc_basename in cl*) # Native MSVC hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported always_export_symbols=yes file_list_spec='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' archive_expsym_cmds='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, )='true' enable_shared_with_static_runtimes=yes exclude_expsyms='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' export_symbols_cmds='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1,DATA/'\'' | $SED -e '\''/^[AITW][ ]/s/.*[ ]//'\'' | sort | uniq > $export_symbols' # Don't use ranlib old_postinstall_cmds='chmod 644 $oldlib' postlink_cmds='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # Assume MSVC wrapper hardcode_libdir_flag_spec=' ' allow_undefined_flag=unsupported # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. archive_cmds='$CC -o $lib $libobjs $compiler_flags `func_echo_all "$deplibs" | $SED '\''s/ -lc$//'\''` -link -dll~linknames=' # The linker will automatically build a .lib file if we build a DLL. old_archive_from_new_cmds='true' # FIXME: Should let the user specify the lib program. old_archive_cmds='lib -OUT:$oldlib$oldobjs$old_deplibs' enable_shared_with_static_runtimes=yes ;; esac ;; darwin* | rhapsody*) archive_cmds_need_lc=no hardcode_direct=no hardcode_automatic=yes hardcode_shlibpath_var=unsupported if test yes = "$lt_cv_ld_force_load"; then whole_archive_flag_spec='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' else whole_archive_flag_spec='' fi link_all_deplibs=yes allow_undefined_flag=$_lt_dar_allow_undefined case $cc_basename in ifort*|nagfor*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test yes = "$_lt_dar_can_shared"; then output_verbose_link_cmd=func_echo_all archive_cmds="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" module_cmds="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" archive_expsym_cmds="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" module_expsym_cmds="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" else ld_shlibs=no fi ;; dgux*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; # FreeBSD 2.2.[012] allows us to include c++rt0.o to get C++ constructor # support. Future versions do this automatically, but an explicit c++rt0.o # does not break anything, and helps significantly (at the cost of a little # extra space). freebsd2.2*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags /usr/lib/c++rt0.o' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; # Unfortunately, older versions of FreeBSD 2 do not have this feature. freebsd2.*) archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; # FreeBSD 3 and greater uses gcc -shared to do shared libraries. freebsd* | dragonfly*) archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; hpux9*) if test yes = "$GCC"; then archive_cmds='$RM $output_objdir/$soname~$CC -shared $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $libobjs $deplibs $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else archive_cmds='$RM $output_objdir/$soname~$LD -b +b $install_libdir -o $output_objdir/$soname $libobjs $deplibs $linker_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' fi hardcode_libdir_flag_spec='$wl+b $wl$libdir' hardcode_libdir_separator=: hardcode_direct=yes # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes export_dynamic_flag_spec='$wl-E' ;; hpux10*) if test yes,no = "$GCC,$with_gnu_ld"; then archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi if test no = "$with_gnu_ld"; then hardcode_libdir_flag_spec='$wl+b $wl$libdir' hardcode_libdir_separator=: hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes fi ;; hpux11*) if test yes,no = "$GCC,$with_gnu_ld"; then case $host_cpu in hppa*64*) archive_cmds='$CC -shared $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds='$CC -shared $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' ;; esac else case $host_cpu in hppa*64*) archive_cmds='$CC -b $wl+h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' ;; ia64*) archive_cmds='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $libobjs $deplibs $compiler_flags' ;; *) # Older versions of the 11.00 compiler do not understand -b yet # (HP92453-01 A.11.01.20 doesn't, HP92453-01 B.11.X.35175-35176.GP does) { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $CC understands -b" >&5 $as_echo_n "checking if $CC understands -b... " >&6; } if ${lt_cv_prog_compiler__b+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler__b=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS -b" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler__b=yes fi else lt_cv_prog_compiler__b=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler__b" >&5 $as_echo "$lt_cv_prog_compiler__b" >&6; } if test yes = "$lt_cv_prog_compiler__b"; then archive_cmds='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -b +h $soname +b $install_libdir -o $lib $libobjs $deplibs $linker_flags' fi ;; esac fi if test no = "$with_gnu_ld"; then hardcode_libdir_flag_spec='$wl+b $wl$libdir' hardcode_libdir_separator=: case $host_cpu in hppa*64*|ia64*) hardcode_direct=no hardcode_shlibpath_var=no ;; *) hardcode_direct=yes hardcode_direct_absolute=yes export_dynamic_flag_spec='$wl-E' # hardcode_minus_L: Not really in the search PATH, # but as the default location of the library. hardcode_minus_L=yes ;; esac fi ;; irix5* | irix6* | nonstopux*) if test yes = "$GCC"; then archive_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' # Try to use the -exported_symbol ld option, if it does not # work, assume that -exports_file does not work either and # implicitly export all symbols. # This should be the same for all languages, so no per-tag cache variable. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $host_os linker accepts -exported_symbol" >&5 $as_echo_n "checking whether the $host_os linker accepts -exported_symbol... " >&6; } if ${lt_cv_irix_exported_symbol+:} false; then : $as_echo_n "(cached) " >&6 else save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS -shared $wl-exported_symbol ${wl}foo $wl-update_registry $wl/dev/null" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int foo (void) { return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : lt_cv_irix_exported_symbol=yes else lt_cv_irix_exported_symbol=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_irix_exported_symbol" >&5 $as_echo "$lt_cv_irix_exported_symbol" >&6; } if test yes = "$lt_cv_irix_exported_symbol"; then archive_expsym_cmds='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations $wl-exports_file $wl$export_symbols -o $lib' fi link_all_deplibs=no else archive_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' archive_expsym_cmds='$CC -shared $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -exports_file $export_symbols -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' hardcode_libdir_separator=: inherit_rpath=yes link_all_deplibs=yes ;; linux*) case $cc_basename in tcc*) # Fabrice Bellard et al's Tiny C Compiler ld_shlibs=yes archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; netbsd* | netbsdelf*-gnu) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds='$LD -Bshareable -o $lib $libobjs $deplibs $linker_flags' # a.out else archive_cmds='$LD -shared -o $lib $libobjs $deplibs $linker_flags' # ELF fi hardcode_libdir_flag_spec='-R$libdir' hardcode_direct=yes hardcode_shlibpath_var=no ;; newsos6) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' hardcode_libdir_separator=: hardcode_shlibpath_var=no ;; *nto* | *qnx*) ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then hardcode_direct=yes hardcode_shlibpath_var=no hardcode_direct_absolute=yes if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags $wl-retain-symbols-file,$export_symbols' hardcode_libdir_flag_spec='$wl-rpath,$libdir' export_dynamic_flag_spec='$wl-E' else archive_cmds='$CC -shared $pic_flag -o $lib $libobjs $deplibs $compiler_flags' hardcode_libdir_flag_spec='$wl-rpath,$libdir' fi else ld_shlibs=no fi ;; os2*) hardcode_libdir_flag_spec='-L$libdir' hardcode_minus_L=yes allow_undefined_flag=unsupported shrext_cmds=.dll archive_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' archive_expsym_cmds='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' old_archive_From_new_cmds='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' enable_shared_with_static_runtimes=yes ;; osf3*) if test yes = "$GCC"; then allow_undefined_flag=' $wl-expect_unresolved $wl\*' archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' fi archive_cmds_need_lc='no' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' hardcode_libdir_separator=: ;; osf4* | osf5*) # as osf3* with the addition of -msym flag if test yes = "$GCC"; then allow_undefined_flag=' $wl-expect_unresolved $wl\*' archive_cmds='$CC -shared$allow_undefined_flag $pic_flag $libobjs $deplibs $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' hardcode_libdir_flag_spec='$wl-rpath $wl$libdir' else allow_undefined_flag=' -expect_unresolved \*' archive_cmds='$CC -shared$allow_undefined_flag $libobjs $deplibs $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' archive_expsym_cmds='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done; printf "%s\\n" "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $wl-input $wl$lib.exp $compiler_flags $libobjs $deplibs -soname $soname `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~$RM $lib.exp' # Both c and cxx compiler support -rpath directly hardcode_libdir_flag_spec='-rpath $libdir' fi archive_cmds_need_lc='no' hardcode_libdir_separator=: ;; solaris*) no_undefined_flag=' -z defs' if test yes = "$GCC"; then wlarc='$wl' archive_cmds='$CC -shared $pic_flag $wl-z ${wl}text $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag $wl-z ${wl}text $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' else case `$CC -V 2>&1` in *"Compilers 5.0"*) wlarc='' archive_cmds='$LD -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $linker_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $LD -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $linker_flags~$RM $lib.exp' ;; *) wlarc='$wl' archive_cmds='$CC -G$allow_undefined_flag -h $soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag -M $lib.exp -h $soname -o $lib $libobjs $deplibs $compiler_flags~$RM $lib.exp' ;; esac fi hardcode_libdir_flag_spec='-R$libdir' hardcode_shlibpath_var=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. GCC discards it without '$wl', # but is careful enough not to reorder. # Supported since Solaris 2.6 (maybe 2.5.1?) if test yes = "$GCC"; then whole_archive_flag_spec='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' else whole_archive_flag_spec='-z allextract$convenience -z defaultextract' fi ;; esac link_all_deplibs=yes ;; sunos4*) if test sequent = "$host_vendor"; then # Use $CC to link under sequent, because it throws in some extra .o # files that make .init and .fini sections work. archive_cmds='$CC -G $wl-h $soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$LD -assert pure-text -Bstatic -o $lib $libobjs $deplibs $linker_flags' fi hardcode_libdir_flag_spec='-L$libdir' hardcode_direct=yes hardcode_minus_L=yes hardcode_shlibpath_var=no ;; sysv4) case $host_vendor in sni) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=yes # is this really true??? ;; siemens) ## LD is ld it makes a PLAMLIB ## CC just makes a GrossModule. archive_cmds='$LD -G -o $lib $libobjs $deplibs $linker_flags' reload_cmds='$CC -r -o $output$reload_objs' hardcode_direct=no ;; motorola) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_direct=no #Motorola manual says yes, but my tests say they lie ;; esac runpath_var='LD_RUN_PATH' hardcode_shlibpath_var=no ;; sysv4.3*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no export_dynamic_flag_spec='-Bexport' ;; sysv4*MP*) if test -d /usr/nec; then archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_shlibpath_var=no runpath_var=LD_RUN_PATH hardcode_runpath_var=yes ld_shlibs=yes fi ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag='$wl-z,text' archive_cmds_need_lc=no hardcode_shlibpath_var=no runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag='$wl-z,text' allow_undefined_flag='$wl-z,nodefs' archive_cmds_need_lc=no hardcode_shlibpath_var=no hardcode_libdir_flag_spec='$wl-R,$libdir' hardcode_libdir_separator=':' link_all_deplibs=yes export_dynamic_flag_spec='$wl-Bexport' runpath_var='LD_RUN_PATH' if test yes = "$GCC"; then archive_cmds='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' else archive_cmds='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' fi ;; uts4*) archive_cmds='$LD -G -h $soname -o $lib $libobjs $deplibs $linker_flags' hardcode_libdir_flag_spec='-L$libdir' hardcode_shlibpath_var=no ;; *) ld_shlibs=no ;; esac if test sni = "$host_vendor"; then case $host in sysv4 | sysv4.2uw2* | sysv4.3* | sysv5*) export_dynamic_flag_spec='$wl-Blargedynsym' ;; esac fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs" >&5 $as_echo "$ld_shlibs" >&6; } test no = "$ld_shlibs" && can_build_shared=no with_gnu_ld=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc" in x|xyes) # Assume -lc should be added archive_cmds_need_lc=yes if test yes,yes = "$GCC,$enable_shared"; then case $archive_cmds in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } if ${lt_cv_archive_cmds_need_lc+:} false; then : $as_echo_n "(cached) " >&6 else $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl pic_flag=$lt_prog_compiler_pic compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag allow_undefined_flag= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then lt_cv_archive_cmds_need_lc=no else lt_cv_archive_cmds_need_lc=yes fi allow_undefined_flag=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc" >&5 $as_echo "$lt_cv_archive_cmds_need_lc" >&6; } archive_cmds_need_lc=$lt_cv_archive_cmds_need_lc ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } if test yes = "$GCC"; then case $host_os in darwin*) lt_awk_arg='/^libraries:/,/LR/' ;; *) lt_awk_arg='/^libraries:/' ;; esac case $host_os in mingw* | cegcc*) lt_sed_strip_eq='s|=\([A-Za-z]:\)|\1|g' ;; *) lt_sed_strip_eq='s|=/|/|g' ;; esac lt_search_path_spec=`$CC -print-search-dirs | awk $lt_awk_arg | $SED -e "s/^libraries://" -e $lt_sed_strip_eq` case $lt_search_path_spec in *\;*) # if the path contains ";" then we assume it to be the separator # otherwise default to the standard path separator (i.e. ":") - it is # assumed that no part of a normal pathname contains ";" but that should # okay in the real world where ";" in dirpaths is itself problematic. lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED 's/;/ /g'` ;; *) lt_search_path_spec=`$ECHO "$lt_search_path_spec" | $SED "s/$PATH_SEPARATOR/ /g"` ;; esac # Ok, now we have the path, separated by spaces, we can step through it # and add multilib dir if necessary... lt_tmp_lt_search_path_spec= lt_multi_os_dir=/`$CC $CPPFLAGS $CFLAGS $LDFLAGS -print-multi-os-directory 2>/dev/null` # ...but if some path component already ends with the multilib dir we assume # that all is fine and trust -print-search-dirs as is (GCC 4.2? or newer). case "$lt_multi_os_dir; $lt_search_path_spec " in "/; "* | "/.; "* | "/./; "* | *"$lt_multi_os_dir "* | *"$lt_multi_os_dir/ "*) lt_multi_os_dir= ;; esac for lt_sys_path in $lt_search_path_spec; do if test -d "$lt_sys_path$lt_multi_os_dir"; then lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path$lt_multi_os_dir" elif test -n "$lt_multi_os_dir"; then test -d "$lt_sys_path" && \ lt_tmp_lt_search_path_spec="$lt_tmp_lt_search_path_spec $lt_sys_path" fi done lt_search_path_spec=`$ECHO "$lt_tmp_lt_search_path_spec" | awk ' BEGIN {RS = " "; FS = "/|\n";} { lt_foo = ""; lt_count = 0; for (lt_i = NF; lt_i > 0; lt_i--) { if ($lt_i != "" && $lt_i != ".") { if ($lt_i == "..") { lt_count++; } else { if (lt_count == 0) { lt_foo = "/" $lt_i lt_foo; } else { lt_count--; } } } } if (lt_foo != "") { lt_freq[lt_foo]++; } if (lt_freq[lt_foo] == 1) { print lt_foo; } }'` # AWK program above erroneously prepends '/' to C:/dos/paths # for these hosts. case $host_os in mingw* | cegcc*) lt_search_path_spec=`$ECHO "$lt_search_path_spec" |\ $SED 's|/\([A-Za-z]:\)|\1|g'` ;; esac sys_lib_search_path_spec=`$ECHO "$lt_search_path_spec" | $lt_NL2SP` else sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" fi library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=.so postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='$libname$release$shared_ext$major' ;; aix[4-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test ia64 = "$host_cpu"; then # AIX 5 supports IA64 library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line '#! .'. This would cause the generated library to # depend on '.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # Using Import Files as archive members, it is possible to support # filename-based versioning of shared library archives on AIX. While # this would work for both with and without runtime linking, it will # prevent static linking of such archives. So we do filename-based # shared library versioning with .so extension only, which is used # when both runtime linking and shared linking is enabled. # Unfortunately, runtime linking may impact performance, so we do # not want this to be the default eventually. Also, we use the # versioned .so libs for executables only if there is the -brtl # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. # To allow for filename-based versioning support, we need to create # libNAME.so.V as an archive file, containing: # *) an Import File, referring to the versioned filename of the # archive as well as the shared archive member, telling the # bitwidth (32 or 64) of that shared object, and providing the # list of exported symbols of that shared object, eventually # decorated with the 'weak' keyword # *) the shared object with the F_LOADONLY flag set, to really avoid # it being seen by the linker. # At run time we better use the real file rather than another symlink, # but for link time we create the symlink libNAME.so -> libNAME.so.V case $with_aix_soname,$aix_use_runtimelinking in # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. aix,yes) # traditional libtool dynamic_linker='AIX unversionable lib.so' # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; aix,no) # traditional AIX only dynamic_linker='AIX lib.a(lib.so.V)' # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' ;; svr4,*) # full svr4 only dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,yes) # both, prefer svr4 dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # unpreferred sharedlib libNAME.a needs extra handling postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,no) # both, prefer aix dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' ;; esac shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='$libname$shared_ext' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/lib/w32api" ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' library_names_spec='$libname.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec=$LIB if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' soname_spec='$libname$release$major$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_search_path_spec="$sys_lib_search_path_spec /usr/local/lib" sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[23].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=no sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' if test 32 = "$HPUX_IA64_MODE"; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" sys_lib_dlsearch_path_spec=/usr/lib/hpux32 else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" sys_lib_dlsearch_path_spec=/usr/lib/hpux64 fi ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[3-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test yes = "$lt_cv_prog_gnu_ld"; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; linux*android*) version_type=none # Android doesn't support versioned libraries. need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext' soname_spec='$libname$release$shared_ext' finish_cmds= shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes dynamic_linker='Android linker' # Don't embed -rpath directories since the linker doesn't support them. hardcode_libdir_flag_spec='-L$libdir' ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH if ${lt_cv_shlibpath_overrides_runpath+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir fi shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Ideally, we could use ldconfig to report *all* directores which are # searched for libraries, however this is still not possible. Aside from not # being certain /sbin/ldconfig is available, command # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, # even though it is searched at run-time. Try to do the best guess by # appending ld.so.conf contents (and includes) to the search path. if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsdelf*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='NetBSD ld.elf_so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd* | bitrig*) version_type=sunos sys_lib_dlsearch_path_spec=/usr/lib need_lib_prefix=no if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then need_version=no else need_version=yes fi library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; os2*) libname_spec='$name' version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no # OS/2 can only load a DLL with a base name of 8 characters or less. soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; v=$($ECHO $release$versuffix | tr -d .-); n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); $ECHO $n$v`$shared_ext' library_names_spec='${libname}_dll.$libext' dynamic_linker='OS/2 ld.exe' shlibpath_var=BEGINLIBPATH sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test yes = "$with_gnu_ld"; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec; then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' soname_spec='$libname$shared_ext.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=sco need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test yes = "$with_gnu_ld"; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test no = "$dynamic_linker" && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test yes = "$GCC"; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec fi if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec fi # remember unaugmented sys_lib_dlsearch_path content for libtool script decls... configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec # ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" # to be used as default LT_SYS_LIBRARY_PATH value in generated libtool configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action= if test -n "$hardcode_libdir_flag_spec" || test -n "$runpath_var" || test yes = "$hardcode_automatic"; then # We can hardcode non-existent directories. if test no != "$hardcode_direct" && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, )" && test no != "$hardcode_minus_L"; then # Linking always hardcodes the temporary library directory. hardcode_action=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action" >&5 $as_echo "$hardcode_action" >&6; } if test relink = "$hardcode_action" || test yes = "$inherit_rpath"; then # Fast installation is not supported enable_fast_install=no elif test yes = "$shlibpath_overrides_runpath" || test no = "$enable_shared"; then # Fast installation is not necessary enable_fast_install=needless fi if test yes != "$enable_dlopen"; then enable_dlopen=unknown enable_dlopen_self=unknown enable_dlopen_self_static=unknown else lt_cv_dlopen=no lt_cv_dlopen_libs= case $host_os in beos*) lt_cv_dlopen=load_add_on lt_cv_dlopen_libs= lt_cv_dlopen_self=yes ;; mingw* | pw32* | cegcc*) lt_cv_dlopen=LoadLibrary lt_cv_dlopen_libs= ;; cygwin*) lt_cv_dlopen=dlopen lt_cv_dlopen_libs= ;; darwin*) # if libdl is installed we need to link against it { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl else lt_cv_dlopen=dyld lt_cv_dlopen_libs= lt_cv_dlopen_self=yes fi ;; tpf*) # Don't try to run any link tests for TPF. We know it's impossible # because TPF is a cross-compiler, and we know how we open DSOs. lt_cv_dlopen=dlopen lt_cv_dlopen_libs= lt_cv_dlopen_self=no ;; *) ac_fn_c_check_func "$LINENO" "shl_load" "ac_cv_func_shl_load" if test "x$ac_cv_func_shl_load" = xyes; then : lt_cv_dlopen=shl_load else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for shl_load in -ldld" >&5 $as_echo_n "checking for shl_load in -ldld... " >&6; } if ${ac_cv_lib_dld_shl_load+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char shl_load (); int main () { return shl_load (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_shl_load=yes else ac_cv_lib_dld_shl_load=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_shl_load" >&5 $as_echo "$ac_cv_lib_dld_shl_load" >&6; } if test "x$ac_cv_lib_dld_shl_load" = xyes; then : lt_cv_dlopen=shl_load lt_cv_dlopen_libs=-ldld else ac_fn_c_check_func "$LINENO" "dlopen" "ac_cv_func_dlopen" if test "x$ac_cv_func_dlopen" = xyes; then : lt_cv_dlopen=dlopen else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -ldl" >&5 $as_echo_n "checking for dlopen in -ldl... " >&6; } if ${ac_cv_lib_dl_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dl_dlopen=yes else ac_cv_lib_dl_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlopen" >&5 $as_echo "$ac_cv_lib_dl_dlopen" >&6; } if test "x$ac_cv_lib_dl_dlopen" = xyes; then : lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-ldl else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlopen in -lsvld" >&5 $as_echo_n "checking for dlopen in -lsvld... " >&6; } if ${ac_cv_lib_svld_dlopen+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsvld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlopen (); int main () { return dlopen (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_svld_dlopen=yes else ac_cv_lib_svld_dlopen=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_svld_dlopen" >&5 $as_echo "$ac_cv_lib_svld_dlopen" >&6; } if test "x$ac_cv_lib_svld_dlopen" = xyes; then : lt_cv_dlopen=dlopen lt_cv_dlopen_libs=-lsvld else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dld_link in -ldld" >&5 $as_echo_n "checking for dld_link in -ldld... " >&6; } if ${ac_cv_lib_dld_dld_link+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldld $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dld_link (); int main () { return dld_link (); ; return 0; } _ACEOF if ac_fn_c_try_link "$LINENO"; then : ac_cv_lib_dld_dld_link=yes else ac_cv_lib_dld_dld_link=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dld_dld_link" >&5 $as_echo "$ac_cv_lib_dld_dld_link" >&6; } if test "x$ac_cv_lib_dld_dld_link" = xyes; then : lt_cv_dlopen=dld_link lt_cv_dlopen_libs=-ldld fi fi fi fi fi fi ;; esac if test no = "$lt_cv_dlopen"; then enable_dlopen=no else enable_dlopen=yes fi case $lt_cv_dlopen in dlopen) save_CPPFLAGS=$CPPFLAGS test yes = "$ac_cv_header_dlfcn_h" && CPPFLAGS="$CPPFLAGS -DHAVE_DLFCN_H" save_LDFLAGS=$LDFLAGS wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $export_dynamic_flag_spec\" save_LIBS=$LIBS LIBS="$lt_cv_dlopen_libs $LIBS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a program can dlopen itself" >&5 $as_echo_n "checking whether a program can dlopen itself... " >&6; } if ${lt_cv_dlopen_self+:} false; then : $as_echo_n "(cached) " >&6 else if test yes = "$cross_compiling"; then : lt_cv_dlopen_self=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisibility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self=no ;; esac else : # compilation failed lt_cv_dlopen_self=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self" >&5 $as_echo "$lt_cv_dlopen_self" >&6; } if test yes = "$lt_cv_dlopen_self"; then wl=$lt_prog_compiler_wl eval LDFLAGS=\"\$LDFLAGS $lt_prog_compiler_static\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether a statically linked program can dlopen itself" >&5 $as_echo_n "checking whether a statically linked program can dlopen itself... " >&6; } if ${lt_cv_dlopen_self_static+:} false; then : $as_echo_n "(cached) " >&6 else if test yes = "$cross_compiling"; then : lt_cv_dlopen_self_static=cross else lt_dlunknown=0; lt_dlno_uscore=1; lt_dlneed_uscore=2 lt_status=$lt_dlunknown cat > conftest.$ac_ext <<_LT_EOF #line $LINENO "configure" #include "confdefs.h" #if HAVE_DLFCN_H #include #endif #include #ifdef RTLD_GLOBAL # define LT_DLGLOBAL RTLD_GLOBAL #else # ifdef DL_GLOBAL # define LT_DLGLOBAL DL_GLOBAL # else # define LT_DLGLOBAL 0 # endif #endif /* We may have to define LT_DLLAZY_OR_NOW in the command line if we find out it does not work in some platform. */ #ifndef LT_DLLAZY_OR_NOW # ifdef RTLD_LAZY # define LT_DLLAZY_OR_NOW RTLD_LAZY # else # ifdef DL_LAZY # define LT_DLLAZY_OR_NOW DL_LAZY # else # ifdef RTLD_NOW # define LT_DLLAZY_OR_NOW RTLD_NOW # else # ifdef DL_NOW # define LT_DLLAZY_OR_NOW DL_NOW # else # define LT_DLLAZY_OR_NOW 0 # endif # endif # endif # endif #endif /* When -fvisibility=hidden is used, assume the code has been annotated correspondingly for the symbols needed. */ #if defined __GNUC__ && (((__GNUC__ == 3) && (__GNUC_MINOR__ >= 3)) || (__GNUC__ > 3)) int fnord () __attribute__((visibility("default"))); #endif int fnord () { return 42; } int main () { void *self = dlopen (0, LT_DLGLOBAL|LT_DLLAZY_OR_NOW); int status = $lt_dlunknown; if (self) { if (dlsym (self,"fnord")) status = $lt_dlno_uscore; else { if (dlsym( self,"_fnord")) status = $lt_dlneed_uscore; else puts (dlerror ()); } /* dlclose (self); */ } else puts (dlerror ()); return status; } _LT_EOF if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_link\""; } >&5 (eval $ac_link) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } && test -s "conftest$ac_exeext" 2>/dev/null; then (./conftest; exit; ) >&5 2>/dev/null lt_status=$? case x$lt_status in x$lt_dlno_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlneed_uscore) lt_cv_dlopen_self_static=yes ;; x$lt_dlunknown|x*) lt_cv_dlopen_self_static=no ;; esac else : # compilation failed lt_cv_dlopen_self_static=no fi fi rm -fr conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_dlopen_self_static" >&5 $as_echo "$lt_cv_dlopen_self_static" >&6; } fi CPPFLAGS=$save_CPPFLAGS LDFLAGS=$save_LDFLAGS LIBS=$save_LIBS ;; esac case $lt_cv_dlopen_self in yes|no) enable_dlopen_self=$lt_cv_dlopen_self ;; *) enable_dlopen_self=unknown ;; esac case $lt_cv_dlopen_self_static in yes|no) enable_dlopen_self_static=$lt_cv_dlopen_self_static ;; *) enable_dlopen_self_static=unknown ;; esac fi striplib= old_striplib= { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stripping libraries is possible" >&5 $as_echo_n "checking whether stripping libraries is possible... " >&6; } if test -n "$STRIP" && $STRIP -V 2>&1 | $GREP "GNU strip" >/dev/null; then test -z "$old_striplib" && old_striplib="$STRIP --strip-debug" test -z "$striplib" && striplib="$STRIP --strip-unneeded" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else # FIXME - insert some real tests, host_os isn't really good enough case $host_os in darwin*) if test -n "$STRIP"; then striplib="$STRIP -x" old_striplib="$STRIP -S" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } ;; esac fi # Report what library types will actually be built { $as_echo "$as_me:${as_lineno-$LINENO}: checking if libtool supports shared libraries" >&5 $as_echo_n "checking if libtool supports shared libraries... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: $can_build_shared" >&5 $as_echo "$can_build_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build shared libraries" >&5 $as_echo_n "checking whether to build shared libraries... " >&6; } test no = "$can_build_shared" && enable_shared=no # On AIX, shared libraries and static libraries use the same namespace, and # are all built from PIC. case $host_os in aix3*) test yes = "$enable_shared" && enable_static=no if test -n "$RANLIB"; then archive_cmds="$archive_cmds~\$RANLIB \$lib" postinstall_cmds='$RANLIB $lib' fi ;; aix[4-9]*) if test ia64 != "$host_cpu"; then case $enable_shared,$with_aix_soname,$aix_use_runtimelinking in yes,aix,yes) ;; # shared object as lib.so file only yes,svr4,*) ;; # shared object as lib.so archive member only yes,*) enable_static=no ;; # shared object in lib.a archive as well esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_shared" >&5 $as_echo "$enable_shared" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether to build static libraries" >&5 $as_echo_n "checking whether to build static libraries... " >&6; } # Make sure either enable_shared or enable_static is yes. test yes = "$enable_shared" || enable_static=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: $enable_static" >&5 $as_echo "$enable_static" >&6; } fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu CC=$lt_save_CC if test -n "$CXX" && ( test no != "$CXX" && ( (test g++ = "$CXX" && `g++ -v >/dev/null 2>&1` ) || (test g++ != "$CXX"))); then ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to run the C++ preprocessor" >&5 $as_echo_n "checking how to run the C++ preprocessor... " >&6; } if test -z "$CXXCPP"; then if ${ac_cv_prog_CXXCPP+:} false; then : $as_echo_n "(cached) " >&6 else # Double quotes because CXXCPP needs to be expanded for CXXCPP in "$CXX -E" "/lib/cpp" do ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : break fi done ac_cv_prog_CXXCPP=$CXXCPP fi CXXCPP=$ac_cv_prog_CXXCPP else ac_cv_prog_CXXCPP=$CXXCPP fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $CXXCPP" >&5 $as_echo "$CXXCPP" >&6; } ac_preproc_ok=false for ac_cxx_preproc_warn_flag in '' yes do # Use a header file that comes with gcc, so configuring glibc # with a fresh cross-compiler works. # Prefer to if __STDC__ is defined, since # exists even on freestanding compilers. # On the NeXT, cc -E runs the code through the compiler's parser, # not just through cpp. "Syntax error" is here to catch this case. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifdef __STDC__ # include #else # include #endif Syntax error _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : else # Broken: fails on valid input. continue fi rm -f conftest.err conftest.i conftest.$ac_ext # OK, works on sane cases. Now check whether nonexistent headers # can be detected and how. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if ac_fn_cxx_try_cpp "$LINENO"; then : # Broken: success on invalid input. continue else # Passes both tests. ac_preproc_ok=: break fi rm -f conftest.err conftest.i conftest.$ac_ext done # Because of `break', _AC_PREPROC_IFELSE's cleaning code was skipped. rm -f conftest.i conftest.err conftest.$ac_ext if $ac_preproc_ok; then : else { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "C++ preprocessor \"$CXXCPP\" fails sanity check See \`config.log' for more details" "$LINENO" 5; } fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu else _lt_caught_CXX_error=yes fi ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu archive_cmds_need_lc_CXX=no allow_undefined_flag_CXX= always_export_symbols_CXX=no archive_expsym_cmds_CXX= compiler_needs_object_CXX=no export_dynamic_flag_spec_CXX= hardcode_direct_CXX=no hardcode_direct_absolute_CXX=no hardcode_libdir_flag_spec_CXX= hardcode_libdir_separator_CXX= hardcode_minus_L_CXX=no hardcode_shlibpath_var_CXX=unsupported hardcode_automatic_CXX=no inherit_rpath_CXX=no module_cmds_CXX= module_expsym_cmds_CXX= link_all_deplibs_CXX=unknown old_archive_cmds_CXX=$old_archive_cmds reload_flag_CXX=$reload_flag reload_cmds_CXX=$reload_cmds no_undefined_flag_CXX= whole_archive_flag_spec_CXX= enable_shared_with_static_runtimes_CXX=no # Source file extension for C++ test sources. ac_ext=cpp # Object file extension for compiled C++ test sources. objext=o objext_CXX=$objext # No sense in running all these tests if we already determined that # the CXX compiler isn't working. Some variables (like enable_shared) # are currently assumed to apply to all compilers on this platform, # and will be corrupted by setting them based on a non-working compiler. if test yes != "$_lt_caught_CXX_error"; then # Code to be used in simple compile tests lt_simple_compile_test_code="int some_variable = 0;" # Code to be used in simple link tests lt_simple_link_test_code='int main(int, char *[]) { return(0); }' # ltmain only uses $CC for tagged configurations so make sure $CC is set. # If no C compiler was specified, use CC. LTCC=${LTCC-"$CC"} # If no C compiler flags were specified, use CFLAGS. LTCFLAGS=${LTCFLAGS-"$CFLAGS"} # Allow CC to be a program name with arguments. compiler=$CC # save warnings/boilerplate of simple test code ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" >conftest.$ac_ext eval "$ac_compile" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_compiler_boilerplate=`cat conftest.err` $RM conftest* ac_outfile=conftest.$ac_objext echo "$lt_simple_link_test_code" >conftest.$ac_ext eval "$ac_link" 2>&1 >/dev/null | $SED '/^$/d; /^ *+/d' >conftest.err _lt_linker_boilerplate=`cat conftest.err` $RM -r conftest* # Allow CC to be a program name with arguments. lt_save_CC=$CC lt_save_CFLAGS=$CFLAGS lt_save_LD=$LD lt_save_GCC=$GCC GCC=$GXX lt_save_with_gnu_ld=$with_gnu_ld lt_save_path_LD=$lt_cv_path_LD if test -n "${lt_cv_prog_gnu_ldcxx+set}"; then lt_cv_prog_gnu_ld=$lt_cv_prog_gnu_ldcxx else $as_unset lt_cv_prog_gnu_ld fi if test -n "${lt_cv_path_LDCXX+set}"; then lt_cv_path_LD=$lt_cv_path_LDCXX else $as_unset lt_cv_path_LD fi test -z "${LDCXX+set}" || LD=$LDCXX CC=${CXX-"c++"} CFLAGS=$CXXFLAGS compiler=$CC compiler_CXX=$CC func_cc_basename $compiler cc_basename=$func_cc_basename_result if test -n "$compiler"; then # We don't want -fno-exception when compiling C++ code, so set the # no_builtin_flag separately if test yes = "$GXX"; then lt_prog_compiler_no_builtin_flag_CXX=' -fno-builtin' else lt_prog_compiler_no_builtin_flag_CXX= fi if test yes = "$GXX"; then # Set up default GNU C++ configuration # Check whether --with-gnu-ld was given. if test "${with_gnu_ld+set}" = set; then : withval=$with_gnu_ld; test no = "$withval" || with_gnu_ld=yes else with_gnu_ld=no fi ac_prog=ld if test yes = "$GCC"; then # Check if gcc -print-prog-name=ld gives a path. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ld used by $CC" >&5 $as_echo_n "checking for ld used by $CC... " >&6; } case $host in *-*-mingw*) # gcc leaves a trailing carriage return, which upsets mingw ac_prog=`($CC -print-prog-name=ld) 2>&5 | tr -d '\015'` ;; *) ac_prog=`($CC -print-prog-name=ld) 2>&5` ;; esac case $ac_prog in # Accept absolute paths. [\\/]* | ?:[\\/]*) re_direlt='/[^/][^/]*/\.\./' # Canonicalize the pathname of ld ac_prog=`$ECHO "$ac_prog"| $SED 's%\\\\%/%g'` while $ECHO "$ac_prog" | $GREP "$re_direlt" > /dev/null 2>&1; do ac_prog=`$ECHO $ac_prog| $SED "s%$re_direlt%/%"` done test -z "$LD" && LD=$ac_prog ;; "") # If it fails, then pretend we aren't using GCC. ac_prog=ld ;; *) # If it is relative, then search for the first ld in PATH. with_gnu_ld=unknown ;; esac elif test yes = "$with_gnu_ld"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU ld" >&5 $as_echo_n "checking for GNU ld... " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for non-GNU ld" >&5 $as_echo_n "checking for non-GNU ld... " >&6; } fi if ${lt_cv_path_LD+:} false; then : $as_echo_n "(cached) " >&6 else if test -z "$LD"; then lt_save_ifs=$IFS; IFS=$PATH_SEPARATOR for ac_dir in $PATH; do IFS=$lt_save_ifs test -z "$ac_dir" && ac_dir=. if test -f "$ac_dir/$ac_prog" || test -f "$ac_dir/$ac_prog$ac_exeext"; then lt_cv_path_LD=$ac_dir/$ac_prog # Check to see if the program is GNU ld. I'd rather use --version, # but apparently some variants of GNU ld only accept -v. # Break only if it was the GNU/non-GNU ld that we prefer. case `"$lt_cv_path_LD" -v 2>&1 &5 $as_echo "$LD" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi test -z "$LD" && as_fn_error $? "no acceptable ld found in \$PATH" "$LINENO" 5 { $as_echo "$as_me:${as_lineno-$LINENO}: checking if the linker ($LD) is GNU ld" >&5 $as_echo_n "checking if the linker ($LD) is GNU ld... " >&6; } if ${lt_cv_prog_gnu_ld+:} false; then : $as_echo_n "(cached) " >&6 else # I'd rather use --version here, but apparently some GNU lds only accept -v. case `$LD -v 2>&1 &5 $as_echo "$lt_cv_prog_gnu_ld" >&6; } with_gnu_ld=$lt_cv_prog_gnu_ld # Check if GNU C++ uses GNU ld as the underlying linker, since the # archiving commands below assume that GNU ld is being used. if test yes = "$with_gnu_ld"; then archive_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC $pic_flag -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' # If archive_cmds runs LD, not CC, wlarc should be empty # XXX I think wlarc can be eliminated in ltcf-cxx, but I need to # investigate it a little bit more. (MM) wlarc='$wl' # ancient GNU ld didn't support --whole-archive et. al. if eval "`$CC -print-prog-name=ld` --help 2>&1" | $GREP 'no-whole-archive' > /dev/null; then whole_archive_flag_spec_CXX=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' else whole_archive_flag_spec_CXX= fi else with_gnu_ld=no wlarc= # A generic and very simple default shared library creation # command for GNU C++ for the case where it uses the native # linker, instead of GNU ld. If possible, this setting should # overridden to take advantage of the native linker features on # the platform it is being used on. archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' fi # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else GXX=no with_gnu_ld=no wlarc= fi # PORTME: fill in a description of your system's C++ link characteristics { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } ld_shlibs_CXX=yes case $host_os in aix3*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aix[4-9]*) if test ia64 = "$host_cpu"; then # On IA64, the linker does run time linking by default, so we don't # have to do anything special. aix_use_runtimelinking=no exp_sym_flag='-Bexport' no_entry_flag= else aix_use_runtimelinking=no # Test if we are trying to use run time linking or normal # AIX style linking. If -brtl is somewhere in LDFLAGS, we # have runtime linking enabled, and use it for executables. # For shared libraries, we enable/disable runtime linking # depending on the kind of the shared library created - # when "with_aix_soname,aix_use_runtimelinking" is: # "aix,no" lib.a(lib.so.V) shared, rtl:no, for executables # "aix,yes" lib.so shared, rtl:yes, for executables # lib.a static archive # "both,no" lib.so.V(shr.o) shared, rtl:yes # lib.a(lib.so.V) shared, rtl:no, for executables # "both,yes" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a(lib.so.V) shared, rtl:no # "svr4,*" lib.so.V(shr.o) shared, rtl:yes, for executables # lib.a static archive case $host_os in aix4.[23]|aix4.[23].*|aix[5-9]*) for ld_flag in $LDFLAGS; do case $ld_flag in *-brtl*) aix_use_runtimelinking=yes break ;; esac done if test svr4,no = "$with_aix_soname,$aix_use_runtimelinking"; then # With aix-soname=svr4, we create the lib.so.V shared archives only, # so we don't have lib.a shared libs to link our executables. # We have to force runtime linking in this case. aix_use_runtimelinking=yes LDFLAGS="$LDFLAGS -Wl,-brtl" fi ;; esac exp_sym_flag='-bexport' no_entry_flag='-bnoentry' fi # When large executables or shared objects are built, AIX ld can # have problems creating the table of contents. If linking a library # or program results in "error TOC overflow" add -mminimal-toc to # CXXFLAGS/CFLAGS for g++/gcc. In the cases where that is not # enough to fix the problem, add -Wl,-bbigtoc to LDFLAGS. archive_cmds_CXX='' hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes file_list_spec_CXX='$wl-f,' case $with_aix_soname,$aix_use_runtimelinking in aix,*) ;; # no import file svr4,* | *,yes) # use import file # The Import File defines what to hardcode. hardcode_direct_CXX=no hardcode_direct_absolute_CXX=no ;; esac if test yes = "$GXX"; then case $host_os in aix4.[012]|aix4.[012].*) # We only want to do this on AIX 4.2 and lower, the check # below for broken collect2 doesn't work under 4.3+ collect2name=`$CC -print-prog-name=collect2` if test -f "$collect2name" && strings "$collect2name" | $GREP resolve_lib_name >/dev/null then # We have reworked collect2 : else # We have old collect2 hardcode_direct_CXX=unsupported # It fails to find uninstalled libraries when the uninstalled # path is not listed in the libpath. Setting hardcode_minus_L # to unsupported forces relinking hardcode_minus_L_CXX=yes hardcode_libdir_flag_spec_CXX='-L$libdir' hardcode_libdir_separator_CXX= fi esac shared_flag='-shared' if test yes = "$aix_use_runtimelinking"; then shared_flag=$shared_flag' $wl-G' fi # Need to ensure runtime linking is disabled for the traditional # shared library, or the linker may eventually find shared libraries # /with/ Import File - we do not want to mix them. shared_flag_aix='-shared' shared_flag_svr4='-shared $wl-G' else # not using gcc if test ia64 = "$host_cpu"; then # VisualAge C++, Version 5.5 for AIX 5L for IA-64, Beta 3 Release # chokes on -Wl,-G. The following line is correct: shared_flag='-G' else if test yes = "$aix_use_runtimelinking"; then shared_flag='$wl-G' else shared_flag='$wl-bM:SRE' fi shared_flag_aix='$wl-bM:SRE' shared_flag_svr4='$wl-G' fi fi export_dynamic_flag_spec_CXX='$wl-bexpall' # It seems that -bexpall does not export symbols beginning with # underscore (_), so it is better to generate a list of symbols to # export. always_export_symbols_CXX=yes if test aix,yes = "$with_aix_soname,$aix_use_runtimelinking"; then # Warning - without using the other runtime loading flags (-brtl), # -berok will link without error, but may produce a broken library. # The "-G" linker flag allows undefined symbols. no_undefined_flag_CXX='-bernotok' # Determine the default libpath from the value encoded in an empty # executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath__CXX+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath__CXX fi hardcode_libdir_flag_spec_CXX='$wl-blibpath:$libdir:'"$aix_libpath" archive_expsym_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $deplibs $wl'$no_entry_flag' $compiler_flags `if test -n "$allow_undefined_flag"; then func_echo_all "$wl$allow_undefined_flag"; else :; fi` $wl'$exp_sym_flag:\$export_symbols' '$shared_flag else if test ia64 = "$host_cpu"; then hardcode_libdir_flag_spec_CXX='$wl-R $libdir:/usr/lib:/lib' allow_undefined_flag_CXX="-z nodefs" archive_expsym_cmds_CXX="\$CC $shared_flag"' -o $output_objdir/$soname $libobjs $deplibs '"\$wl$no_entry_flag"' $compiler_flags $wl$allow_undefined_flag '"\$wl$exp_sym_flag:\$export_symbols" else # Determine the default libpath from the value encoded in an # empty executable. if test set = "${lt_cv_aix_libpath+set}"; then aix_libpath=$lt_cv_aix_libpath else if ${lt_cv_aix_libpath__CXX+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : lt_aix_libpath_sed=' /Import File Strings/,/^$/ { /^0/ { s/^0 *\([^ ]*\) *$/\1/ p } }' lt_cv_aix_libpath__CXX=`dump -H conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` # Check for a 64-bit object if we didn't find anything. if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=`dump -HX64 conftest$ac_exeext 2>/dev/null | $SED -n -e "$lt_aix_libpath_sed"` fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext if test -z "$lt_cv_aix_libpath__CXX"; then lt_cv_aix_libpath__CXX=/usr/lib:/lib fi fi aix_libpath=$lt_cv_aix_libpath__CXX fi hardcode_libdir_flag_spec_CXX='$wl-blibpath:$libdir:'"$aix_libpath" # Warning - without using the other run time loading flags, # -berok will link without error, but may produce a broken library. no_undefined_flag_CXX=' $wl-bernotok' allow_undefined_flag_CXX=' $wl-berok' if test yes = "$with_gnu_ld"; then # We only use this code for GNU lds that support --whole-archive. whole_archive_flag_spec_CXX='$wl--whole-archive$convenience $wl--no-whole-archive' else # Exported symbols can be pulled into shared objects from archives whole_archive_flag_spec_CXX='$convenience' fi archive_cmds_need_lc_CXX=yes archive_expsym_cmds_CXX='$RM -r $output_objdir/$realname.d~$MKDIR $output_objdir/$realname.d' # -brtl affects multiple linker settings, -berok does not and is overridden later compiler_flags_filtered='`func_echo_all "$compiler_flags " | $SED -e "s%-brtl\\([, ]\\)%-berok\\1%g"`' if test svr4 != "$with_aix_soname"; then # This is similar to how AIX traditionally builds its shared # libraries. Need -bnortl late, we may have -brtl in LDFLAGS. archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$CC '$shared_flag_aix' -o $output_objdir/$realname.d/$soname $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$AR $AR_FLAGS $output_objdir/$libname$release.a $output_objdir/$realname.d/$soname' fi if test aix != "$with_aix_soname"; then archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$CC '$shared_flag_svr4' -o $output_objdir/$realname.d/$shared_archive_member_spec.o $libobjs $deplibs $wl-bnoentry '$compiler_flags_filtered'$wl-bE:$export_symbols$allow_undefined_flag~$STRIP -e $output_objdir/$realname.d/$shared_archive_member_spec.o~( func_echo_all "#! $soname($shared_archive_member_spec.o)"; if test shr_64 = "$shared_archive_member_spec"; then func_echo_all "# 64"; else func_echo_all "# 32"; fi; cat $export_symbols ) > $output_objdir/$realname.d/$shared_archive_member_spec.imp~$AR $AR_FLAGS $output_objdir/$soname $output_objdir/$realname.d/$shared_archive_member_spec.o $output_objdir/$realname.d/$shared_archive_member_spec.imp' else # used by -dlpreopen to get the symbols archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$MV $output_objdir/$realname.d/$soname $output_objdir' fi archive_expsym_cmds_CXX="$archive_expsym_cmds_CXX"'~$RM -r $output_objdir/$realname.d' fi fi ;; beos*) if $LD --help 2>&1 | $GREP ': supported targets:.* elf' > /dev/null; then allow_undefined_flag_CXX=unsupported # Joseph Beckenbach says some releases of gcc # support --undefined. This deserves some investigation. FIXME archive_cmds_CXX='$CC -nostart $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' else ld_shlibs_CXX=no fi ;; chorus*) case $cc_basename in *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; cygwin* | mingw* | pw32* | cegcc*) case $GXX,$cc_basename in ,cl* | no,cl*) # Native MSVC # hardcode_libdir_flag_spec is actually meaningless, as there is # no search path for DLLs. hardcode_libdir_flag_spec_CXX=' ' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=yes file_list_spec_CXX='@' # Tell ltmain to make .lib files, not .a files. libext=lib # Tell ltmain to make .dll files, not .so files. shrext_cmds=.dll # FIXME: Setting linknames here is a bad hack. archive_cmds_CXX='$CC -o $output_objdir/$soname $libobjs $compiler_flags $deplibs -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~linknames=' archive_expsym_cmds_CXX='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp "$export_symbols" "$output_objdir/$soname.def"; echo "$tool_output_objdir$soname.def" > "$output_objdir/$soname.exp"; else $SED -e '\''s/^/-link -EXPORT:/'\'' < $export_symbols > $output_objdir/$soname.exp; fi~ $CC -o $tool_output_objdir$soname $libobjs $compiler_flags $deplibs "@$tool_output_objdir$soname.exp" -Wl,-DLL,-IMPLIB:"$tool_output_objdir$libname.dll.lib"~ linknames=' # The linker will not automatically build a static lib if we build a DLL. # _LT_TAGVAR(old_archive_from_new_cmds, CXX)='true' enable_shared_with_static_runtimes_CXX=yes # Don't use ranlib old_postinstall_cmds_CXX='chmod 644 $oldlib' postlink_cmds_CXX='lt_outputfile="@OUTPUT@"~ lt_tool_outputfile="@TOOL_OUTPUT@"~ case $lt_outputfile in *.exe|*.EXE) ;; *) lt_outputfile=$lt_outputfile.exe lt_tool_outputfile=$lt_tool_outputfile.exe ;; esac~ func_to_tool_file "$lt_outputfile"~ if test : != "$MANIFEST_TOOL" && test -f "$lt_outputfile.manifest"; then $MANIFEST_TOOL -manifest "$lt_tool_outputfile.manifest" -outputresource:"$lt_tool_outputfile" || exit 1; $RM "$lt_outputfile.manifest"; fi' ;; *) # g++ # _LT_TAGVAR(hardcode_libdir_flag_spec, CXX) is actually meaningless, # as there is no search path for DLLs. hardcode_libdir_flag_spec_CXX='-L$libdir' export_dynamic_flag_spec_CXX='$wl--export-all-symbols' allow_undefined_flag_CXX=unsupported always_export_symbols_CXX=no enable_shared_with_static_runtimes_CXX=yes if $LD --help 2>&1 | $GREP 'auto-import' > /dev/null; then archive_cmds_CXX='$CC -shared -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' # If the export-symbols file already is a .def file, use it as # is; otherwise, prepend EXPORTS... archive_expsym_cmds_CXX='if test DEF = "`$SED -n -e '\''s/^[ ]*//'\'' -e '\''/^\(;.*\)*$/d'\'' -e '\''s/^\(EXPORTS\|LIBRARY\)\([ ].*\)*$/DEF/p'\'' -e q $export_symbols`" ; then cp $export_symbols $output_objdir/$soname.def; else echo EXPORTS > $output_objdir/$soname.def; cat $export_symbols >> $output_objdir/$soname.def; fi~ $CC -shared -nostdlib $output_objdir/$soname.def $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $output_objdir/$soname $wl--enable-auto-image-base -Xlinker --out-implib -Xlinker $lib' else ld_shlibs_CXX=no fi ;; esac ;; darwin* | rhapsody*) archive_cmds_need_lc_CXX=no hardcode_direct_CXX=no hardcode_automatic_CXX=yes hardcode_shlibpath_var_CXX=unsupported if test yes = "$lt_cv_ld_force_load"; then whole_archive_flag_spec_CXX='`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience $wl-force_load,$conv\"; done; func_echo_all \"$new_convenience\"`' else whole_archive_flag_spec_CXX='' fi link_all_deplibs_CXX=yes allow_undefined_flag_CXX=$_lt_dar_allow_undefined case $cc_basename in ifort*|nagfor*) _lt_dar_can_shared=yes ;; *) _lt_dar_can_shared=$GCC ;; esac if test yes = "$_lt_dar_can_shared"; then output_verbose_link_cmd=func_echo_all archive_cmds_CXX="\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dsymutil" module_cmds_CXX="\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dsymutil" archive_expsym_cmds_CXX="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$libobjs \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring $_lt_dar_single_mod$_lt_dar_export_syms$_lt_dsymutil" module_expsym_cmds_CXX="sed -e 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC \$allow_undefined_flag -o \$lib -bundle \$libobjs \$deplibs \$compiler_flags$_lt_dar_export_syms$_lt_dsymutil" if test yes != "$lt_cv_apple_cc_single_mod"; then archive_cmds_CXX="\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dsymutil" archive_expsym_cmds_CXX="sed 's|^|_|' < \$export_symbols > \$output_objdir/\$libname-symbols.expsym~\$CC -r -keep_private_externs -nostdlib -o \$lib-master.o \$libobjs~\$CC -dynamiclib \$allow_undefined_flag -o \$lib \$lib-master.o \$deplibs \$compiler_flags -install_name \$rpath/\$soname \$verstring$_lt_dar_export_syms$_lt_dsymutil" fi else ld_shlibs_CXX=no fi ;; os2*) hardcode_libdir_flag_spec_CXX='-L$libdir' hardcode_minus_L_CXX=yes allow_undefined_flag_CXX=unsupported shrext_cmds=.dll archive_cmds_CXX='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ emxexp $libobjs | $SED /"_DLL_InitTerm"/d >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' archive_expsym_cmds_CXX='$ECHO "LIBRARY ${soname%$shared_ext} INITINSTANCE TERMINSTANCE" > $output_objdir/$libname.def~ $ECHO "DESCRIPTION \"$libname\"" >> $output_objdir/$libname.def~ $ECHO "DATA MULTIPLE NONSHARED" >> $output_objdir/$libname.def~ $ECHO EXPORTS >> $output_objdir/$libname.def~ prefix_cmds="$SED"~ if test EXPORTS = "`$SED 1q $export_symbols`"; then prefix_cmds="$prefix_cmds -e 1d"; fi~ prefix_cmds="$prefix_cmds -e \"s/^\(.*\)$/_\1/g\""~ cat $export_symbols | $prefix_cmds >> $output_objdir/$libname.def~ $CC -Zdll -Zcrtdll -o $output_objdir/$soname $libobjs $deplibs $compiler_flags $output_objdir/$libname.def~ emximp -o $lib $output_objdir/$libname.def' old_archive_From_new_cmds_CXX='emximp -o $output_objdir/${libname}_dll.a $output_objdir/$libname.def' enable_shared_with_static_runtimes_CXX=yes ;; dgux*) case $cc_basename in ec++*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; ghcx*) # Green Hills C++ Compiler # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; freebsd2.*) # C++ shared libraries reported to be fairly broken before # switch to ELF ld_shlibs_CXX=no ;; freebsd-elf*) archive_cmds_need_lc_CXX=no ;; freebsd* | dragonfly*) # FreeBSD 3 and later use GNU C++ and GNU ld with standard ELF # conventions ld_shlibs_CXX=yes ;; haiku*) archive_cmds_CXX='$CC -shared $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' link_all_deplibs_CXX=yes ;; hpux9*) hardcode_libdir_flag_spec_CXX='$wl+b $wl$libdir' hardcode_libdir_separator_CXX=: export_dynamic_flag_spec_CXX='$wl-E' hardcode_direct_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) archive_cmds_CXX='$RM $output_objdir/$soname~$CC -b $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $EGREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then archive_cmds_CXX='$RM $output_objdir/$soname~$CC -shared -nostdlib $pic_flag $wl+b $wl$install_libdir -o $output_objdir/$soname $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~test "x$output_objdir/$soname" = "x$lib" || mv $output_objdir/$soname $lib' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; hpux10*|hpux11*) if test no = "$with_gnu_ld"; then hardcode_libdir_flag_spec_CXX='$wl+b $wl$libdir' hardcode_libdir_separator_CXX=: case $host_cpu in hppa*64*|ia64*) ;; *) export_dynamic_flag_spec_CXX='$wl-E' ;; esac fi case $host_cpu in hppa*64*|ia64*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no ;; *) hardcode_direct_CXX=yes hardcode_direct_absolute_CXX=yes hardcode_minus_L_CXX=yes # Not in the search PATH, # but as the default # location of the library. ;; esac case $cc_basename in CC*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; aCC*) case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -b $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -b $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -b $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`($CC -b $CFLAGS -v conftest.$objext 2>&1) | $GREP "\-L"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then case $host_cpu in hppa*64*) archive_cmds_CXX='$CC -shared -nostdlib -fPIC $wl+h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; ia64*) archive_cmds_CXX='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+nodefaultrpath -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared -nostdlib $pic_flag $wl+h $wl$soname $wl+b $wl$install_libdir -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' ;; esac fi else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; interix[3-9]*) hardcode_direct_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' export_dynamic_flag_spec_CXX='$wl-E' # Hack: On Interix 3.x, we cannot compile PIC because of a broken gcc. # Instead, shared libraries are loaded at an image base (0x10000000 by # default) and relocated if they conflict, which is a slow very memory # consuming and fragmenting process. To avoid this, we pick a random, # 256 KiB-aligned image base between 0x50000000 and 0x6FFC0000 at link # time. Moving up from 0x10000000 also allows more sbrk(2) space. archive_cmds_CXX='$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' archive_expsym_cmds_CXX='sed "s|^|_|" $export_symbols >$output_objdir/$soname.expsym~$CC -shared $pic_flag $libobjs $deplibs $compiler_flags $wl-h,$soname $wl--retain-symbols-file,$output_objdir/$soname.expsym $wl--image-base,`expr ${RANDOM-$$} % 4096 / 2 \* 262144 + 1342177280` -o $lib' ;; irix5* | irix6*) case $cc_basename in CC*) # SGI C++ archive_cmds_CXX='$CC -shared -all -multigot $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' # Archives containing C++ object files must be created using # "CC -ar", where "CC" is the IRIX C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -ar -WR,-u -o $oldlib $oldobjs' ;; *) if test yes = "$GXX"; then if test no = "$with_gnu_ld"; then archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' else archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` -o $lib' fi fi link_all_deplibs_CXX=yes ;; esac hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' hardcode_libdir_separator_CXX=: inherit_rpath_CXX=yes ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' archive_expsym_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo $lib | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib $wl-retain-symbols-file,$export_symbols; mv \$templib $lib' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 | $GREP "ld"`; rm -f libconftest$shared_ext; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' # Archives containing C++ object files must be created using # "CC -Bstatic", where "CC" is the KAI C++ compiler. old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; icpc* | ecpc* ) # Intel C++ with_gnu_ld=yes # version 8.0 and above of icpc choke on multiply defined symbols # if we add $predep_objects and $postdep_objects, however 7.1 and # earlier do not add the objects themselves. case `$CC -V 2>&1` in *"Version 7."*) archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 8.0 or newer tmp_idyn= case $host_cpu in ia64*) tmp_idyn=' -i_dynamic';; esac archive_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared'"$tmp_idyn"' $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac archive_cmds_need_lc_CXX=no hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' whole_archive_flag_spec_CXX='$wl--whole-archive$convenience $wl--no-whole-archive' ;; pgCC* | pgcpp*) # Portland Group C++ compiler case `$CC -V` in *pgCC\ [1-5].* | *pgcpp\ [1-5].*) prelink_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $objs $libobjs $compile_deplibs~ compile_command="$compile_command `find $tpldir -name \*.o | sort | $NL2SP`"' old_archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $oldobjs$old_deplibs~ $AR $AR_FLAGS $oldlib$oldobjs$old_deplibs `find $tpldir -name \*.o | sort | $NL2SP`~ $RANLIB $oldlib' archive_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='tpldir=Template.dir~ rm -rf $tpldir~ $CC --prelink_objects --instantiation_dir $tpldir $predep_objects $libobjs $deplibs $convenience $postdep_objects~ $CC -shared $pic_flag $predep_objects $libobjs $deplibs `find $tpldir -name \*.o | sort | $NL2SP` $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; *) # Version 6 and above use weak symbols archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname $wl-retain-symbols-file $wl$export_symbols -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='$wl--rpath $wl$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' whole_archive_flag_spec_CXX='$wl--whole-archive`for conv in $convenience\"\"; do test -n \"$conv\" && new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' ;; cxx*) # Compaq C++ archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib' archive_expsym_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname -o $lib $wl-retain-symbols-file $wl$export_symbols' runpath_var=LD_RUN_PATH hardcode_libdir_flag_spec_CXX='-rpath $libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld .*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "X$list" | $Xsed' ;; xl* | mpixl* | bgxl*) # IBM XL 8.0 on PPC, with GNU ld hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' export_dynamic_flag_spec_CXX='$wl--export-dynamic' archive_cmds_CXX='$CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname -o $lib' if test yes = "$supports_anon_versioning"; then archive_expsym_cmds_CXX='echo "{ global:" > $output_objdir/$libname.ver~ cat $export_symbols | sed -e "s/\(.*\)/\1;/" >> $output_objdir/$libname.ver~ echo "local: *; };" >> $output_objdir/$libname.ver~ $CC -qmkshrobj $libobjs $deplibs $compiler_flags $wl-soname $wl$soname $wl-version-script $wl$output_objdir/$libname.ver -o $lib' fi ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file $wl$export_symbols' hardcode_libdir_flag_spec_CXX='-R$libdir' whole_archive_flag_spec_CXX='$wl--whole-archive`new_convenience=; for conv in $convenience\"\"; do test -z \"$conv\" || new_convenience=\"$new_convenience,$conv\"; done; func_echo_all \"$new_convenience\"` $wl--no-whole-archive' compiler_needs_object_CXX=yes # Not sure whether something based on # $CC $CFLAGS -v conftest.$objext -o libconftest$shared_ext 2>&1 # would be better. output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; esac ;; esac ;; lynxos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; m88k*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; mvs*) case $cc_basename in cxx*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; netbsd*) if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then archive_cmds_CXX='$LD -Bshareable -o $lib $predep_objects $libobjs $deplibs $postdep_objects $linker_flags' wlarc= hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no fi # Workaround some broken pre-1.5 toolchains output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP conftest.$objext | $SED -e "s:-lgcc -lc -lgcc::"' ;; *nto* | *qnx*) ld_shlibs_CXX=yes ;; openbsd* | bitrig*) if test -f /usr/libexec/ld.so; then hardcode_direct_CXX=yes hardcode_shlibpath_var_CXX=no hardcode_direct_absolute_CXX=yes archive_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -o $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' if test -z "`echo __ELF__ | $CC -E - | grep __ELF__`"; then archive_expsym_cmds_CXX='$CC -shared $pic_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-retain-symbols-file,$export_symbols -o $lib' export_dynamic_flag_spec_CXX='$wl-E' whole_archive_flag_spec_CXX=$wlarc'--whole-archive$convenience '$wlarc'--no-whole-archive' fi output_verbose_link_cmd=func_echo_all else ld_shlibs_CXX=no fi ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) # Kuck and Associates, Inc. (KAI) C++ Compiler # KCC will only create a shared library if the output file # ends with ".so" (or ".sl" for HP-UX), so rename the library # to its proper name (with version) after linking. archive_cmds_CXX='tempext=`echo $shared_ext | $SED -e '\''s/\([^()0-9A-Za-z{}]\)/\\\\\1/g'\''`; templib=`echo "$lib" | $SED -e "s/\$tempext\..*/.so/"`; $CC $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags --soname $soname -o \$templib; mv \$templib $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath,$libdir' hardcode_libdir_separator_CXX=: # Archives containing C++ object files must be created using # the KAI C++ compiler. case $host in osf3*) old_archive_cmds_CXX='$CC -Bstatic -o $oldlib $oldobjs' ;; *) old_archive_cmds_CXX='$CC -o $oldlib $oldobjs' ;; esac ;; RCC*) # Rational C++ 2.4.1 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; cxx*) case $host in osf3*) allow_undefined_flag_CXX=' $wl-expect_unresolved $wl\*' archive_cmds_CXX='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $soname `test -n "$verstring" && func_echo_all "$wl-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' ;; *) allow_undefined_flag_CXX=' -expect_unresolved \*' archive_cmds_CXX='$CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname `test -n "$verstring" && func_echo_all "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib' archive_expsym_cmds_CXX='for i in `cat $export_symbols`; do printf "%s %s\\n" -exported_symbol "\$i" >> $lib.exp; done~ echo "-hidden">> $lib.exp~ $CC -shared$allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags -msym -soname $soname $wl-input $wl$lib.exp `test -n "$verstring" && $ECHO "-set_version $verstring"` -update_registry $output_objdir/so_locations -o $lib~ $RM $lib.exp' hardcode_libdir_flag_spec_CXX='-rpath $libdir' ;; esac hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. # # There doesn't appear to be a way to prevent this compiler from # explicitly linking system object files so we need to strip them # from the output so that they don't get included in the library # dependencies. output_verbose_link_cmd='templist=`$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP "ld" | $GREP -v "ld:"`; templist=`func_echo_all "$templist" | $SED "s/\(^.*ld.*\)\( .*ld.*$\)/\1/"`; list= ; for z in $templist; do case $z in conftest.$objext) list="$list $z";; *.$objext);; *) list="$list $z";;esac; done; func_echo_all "$list"' ;; *) if test yes,no = "$GXX,$with_gnu_ld"; then allow_undefined_flag_CXX=' $wl-expect_unresolved $wl\*' case $host in osf3*) archive_cmds_CXX='$CC -shared -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; *) archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $allow_undefined_flag $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-msym $wl-soname $wl$soname `test -n "$verstring" && func_echo_all "$wl-set_version $wl$verstring"` $wl-update_registry $wl$output_objdir/so_locations -o $lib' ;; esac hardcode_libdir_flag_spec_CXX='$wl-rpath $wl$libdir' hardcode_libdir_separator_CXX=: # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # FIXME: insert proper C++ library support ld_shlibs_CXX=no fi ;; esac ;; psos*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; lcc*) # Lucid # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ archive_cmds_need_lc_CXX=yes no_undefined_flag_CXX=' -zdefs' archive_cmds_CXX='$CC -G$allow_undefined_flag -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G$allow_undefined_flag $wl-M $wl$lib.exp -h$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' hardcode_libdir_flag_spec_CXX='-R$libdir' hardcode_shlibpath_var_CXX=no case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) # The compiler driver will combine and reorder linker options, # but understands '-z linker_flag'. # Supported since Solaris 2.6 (maybe 2.5.1?) whole_archive_flag_spec_CXX='-z allextract$convenience -z defaultextract' ;; esac link_all_deplibs_CXX=yes output_verbose_link_cmd='func_echo_all' # Archives containing C++ object files must be created using # "CC -xar", where "CC" is the Sun C++ compiler. This is # necessary to make sure instantiated templates are included # in the archive. old_archive_cmds_CXX='$CC -xar -o $oldlib $oldobjs' ;; gcx*) # Green Hills C++ Compiler archive_cmds_CXX='$CC -shared $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' # The C++ compiler must be used to create the archive. old_archive_cmds_CXX='$CC $LDFLAGS -archive -o $oldlib $oldobjs' ;; *) # GNU C++ compiler with Solaris linker if test yes,no = "$GXX,$with_gnu_ld"; then no_undefined_flag_CXX=' $wl-z ${wl}defs' if $CC --version | $GREP -v '^2\.7' > /dev/null; then archive_cmds_CXX='$CC -shared $pic_flag -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -shared $pic_flag -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -shared $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' else # g++ 2.7 appears to require '-G' NOT '-shared' on this # platform. archive_cmds_CXX='$CC -G -nostdlib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags $wl-h $wl$soname -o $lib' archive_expsym_cmds_CXX='echo "{ global:" > $lib.exp~cat $export_symbols | $SED -e "s/\(.*\)/\1;/" >> $lib.exp~echo "local: *; };" >> $lib.exp~ $CC -G -nostdlib $wl-M $wl$lib.exp $wl-h $wl$soname -o $lib $predep_objects $libobjs $deplibs $postdep_objects $compiler_flags~$RM $lib.exp' # Commands to make compiler produce verbose output that lists # what "hidden" libraries, object files and flags are used when # linking a shared library. output_verbose_link_cmd='$CC -G $CFLAGS -v conftest.$objext 2>&1 | $GREP -v "^Configured with:" | $GREP "\-L"' fi hardcode_libdir_flag_spec_CXX='$wl-R $wl$libdir' case $host_os in solaris2.[0-5] | solaris2.[0-5].*) ;; *) whole_archive_flag_spec_CXX='$wl-z ${wl}allextract$convenience $wl-z ${wl}defaultextract' ;; esac fi ;; esac ;; sysv4*uw2* | sysv5OpenUNIX* | sysv5UnixWare7.[01].[10]* | unixware7* | sco3.2v5.0.[024]*) no_undefined_flag_CXX='$wl-z,text' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; *) archive_cmds_CXX='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; sysv5* | sco3.2v5* | sco5v6*) # Note: We CANNOT use -z defs as we might desire, because we do not # link with -lc, and that would cause any symbols used from libc to # always be unresolved, which means just about no library would # ever link correctly. If we're not using GNU ld we use -z text # though, which does catch some bad symbols but isn't as heavy-handed # as -z defs. no_undefined_flag_CXX='$wl-z,text' allow_undefined_flag_CXX='$wl-z,nodefs' archive_cmds_need_lc_CXX=no hardcode_shlibpath_var_CXX=no hardcode_libdir_flag_spec_CXX='$wl-R,$libdir' hardcode_libdir_separator_CXX=':' link_all_deplibs_CXX=yes export_dynamic_flag_spec_CXX='$wl-Bexport' runpath_var='LD_RUN_PATH' case $cc_basename in CC*) archive_cmds_CXX='$CC -G $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -G $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' old_archive_cmds_CXX='$CC -Tprelink_objects $oldobjs~ '"$old_archive_cmds_CXX" reload_cmds_CXX='$CC -Tprelink_objects $reload_objs~ '"$reload_cmds_CXX" ;; *) archive_cmds_CXX='$CC -shared $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' archive_expsym_cmds_CXX='$CC -shared $wl-Bexport:$export_symbols $wl-h,$soname -o $lib $libobjs $deplibs $compiler_flags' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac ;; vxworks*) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; *) # FIXME: insert proper C++ library support ld_shlibs_CXX=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test no = "$ld_shlibs_CXX" && can_build_shared=no GCC_CXX=$GXX LD_CXX=$LD ## CAVEAT EMPTOR: ## There is no encapsulation within the following macros, do not change ## the running order or otherwise move them around unless you know exactly ## what you are doing... # Dependencies to place before and after the object being linked: predep_objects_CXX= postdep_objects_CXX= predeps_CXX= postdeps_CXX= compiler_lib_search_path_CXX= cat > conftest.$ac_ext <<_LT_EOF class Foo { public: Foo (void) { a = 0; } private: int a; }; _LT_EOF _lt_libdeps_save_CFLAGS=$CFLAGS case "$CC $CFLAGS " in #( *\ -flto*\ *) CFLAGS="$CFLAGS -fno-lto" ;; *\ -fwhopr*\ *) CFLAGS="$CFLAGS -fno-whopr" ;; *\ -fuse-linker-plugin*\ *) CFLAGS="$CFLAGS -fno-use-linker-plugin" ;; esac if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then # Parse the compiler output and extract the necessary # objects, libraries and library flags. # Sentinel used to keep track of whether or not we are before # the conftest object file. pre_test_object_deps_done=no for p in `eval "$output_verbose_link_cmd"`; do case $prev$p in -L* | -R* | -l*) # Some compilers place space between "-{L,R}" and the path. # Remove the space. if test x-L = "$p" || test x-R = "$p"; then prev=$p continue fi # Expand the sysroot to ease extracting the directories later. if test -z "$prev"; then case $p in -L*) func_stripname_cnf '-L' '' "$p"; prev=-L; p=$func_stripname_result ;; -R*) func_stripname_cnf '-R' '' "$p"; prev=-R; p=$func_stripname_result ;; -l*) func_stripname_cnf '-l' '' "$p"; prev=-l; p=$func_stripname_result ;; esac fi case $p in =*) func_stripname_cnf '=' '' "$p"; p=$lt_sysroot$func_stripname_result ;; esac if test no = "$pre_test_object_deps_done"; then case $prev in -L | -R) # Internal compiler library paths should come after those # provided the user. The postdeps already come after the # user supplied libs so there is no need to process them. if test -z "$compiler_lib_search_path_CXX"; then compiler_lib_search_path_CXX=$prev$p else compiler_lib_search_path_CXX="${compiler_lib_search_path_CXX} $prev$p" fi ;; # The "-l" case would never come before the object being # linked, so don't bother handling this case. esac else if test -z "$postdeps_CXX"; then postdeps_CXX=$prev$p else postdeps_CXX="${postdeps_CXX} $prev$p" fi fi prev= ;; *.lto.$objext) ;; # Ignore GCC LTO objects *.$objext) # This assumes that the test object file only shows up # once in the compiler output. if test "$p" = "conftest.$objext"; then pre_test_object_deps_done=yes continue fi if test no = "$pre_test_object_deps_done"; then if test -z "$predep_objects_CXX"; then predep_objects_CXX=$p else predep_objects_CXX="$predep_objects_CXX $p" fi else if test -z "$postdep_objects_CXX"; then postdep_objects_CXX=$p else postdep_objects_CXX="$postdep_objects_CXX $p" fi fi ;; *) ;; # Ignore the rest. esac done # Clean up. rm -f a.out a.exe else echo "libtool.m4: error: problem compiling CXX test program" fi $RM -f confest.$objext CFLAGS=$_lt_libdeps_save_CFLAGS # PORTME: override above test on systems where it is broken case $host_os in interix[3-9]*) # Interix 3.5 installs completely hosed .la files for C++, so rather than # hack all around it, let's just trust "g++" to DTRT. predep_objects_CXX= postdep_objects_CXX= postdeps_CXX= ;; esac case " $postdeps_CXX " in *" -lc "*) archive_cmds_need_lc_CXX=no ;; esac compiler_lib_search_dirs_CXX= if test -n "${compiler_lib_search_path_CXX}"; then compiler_lib_search_dirs_CXX=`echo " ${compiler_lib_search_path_CXX}" | $SED -e 's! -L! !g' -e 's!^ !!'` fi lt_prog_compiler_wl_CXX= lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX= # C++ specific cases for pic, static, wl, etc. if test yes = "$GXX"; then lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-static' case $host_os in aix*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' fi lt_prog_compiler_pic_CXX='-fPIC' ;; amigaos*) case $host_cpu in powerpc) # see comment about AmigaOS4 .so support lt_prog_compiler_pic_CXX='-fPIC' ;; m68k) # FIXME: we need at least 68020 code to build shared libraries, but # adding the '-m68020' flag to GCC prevents building anything better, # like '-m68040'. lt_prog_compiler_pic_CXX='-m68020 -resident32 -malways-restore-a4' ;; esac ;; beos* | irix5* | irix6* | nonstopux* | osf3* | osf4* | osf5*) # PIC is the default for these OSes. ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). # Although the cygwin gcc ignores -fPIC, still need this for old-style # (--disable-auto-import) libraries lt_prog_compiler_pic_CXX='-DDLL_EXPORT' case $host_os in os2*) lt_prog_compiler_static_CXX='$wl-static' ;; esac ;; darwin* | rhapsody*) # PIC is the default on this platform # Common symbols not allowed in MH_DYLIB files lt_prog_compiler_pic_CXX='-fno-common' ;; *djgpp*) # DJGPP does not support shared libraries at all lt_prog_compiler_pic_CXX= ;; haiku*) # PIC is the default for Haiku. # The "-static" flag exists, but is broken. lt_prog_compiler_static_CXX= ;; interix[3-9]*) # Interix 3.x gcc -fpic/-fPIC options generate broken code. # Instead, we relocate shared libraries at runtime. ;; sysv4*MP*) if test -d /usr/nec; then lt_prog_compiler_pic_CXX=-Kconform_pic fi ;; hpux*) # PIC is the default for 64-bit PA HP-UX, but not for 32-bit # PA HP-UX. On IA64 HP-UX, PIC is the default but the pic flag # sets the default TLS model and affects inlining. case $host_cpu in hppa*64*) ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; *) lt_prog_compiler_pic_CXX='-fPIC' ;; esac else case $host_os in aix[4-9]*) # All AIX code is PIC. if test ia64 = "$host_cpu"; then # AIX 5 now supports IA64 processor lt_prog_compiler_static_CXX='-Bstatic' else lt_prog_compiler_static_CXX='-bnso -bI:/lib/syscalls.exp' fi ;; chorus*) case $cc_basename in cxch68*) # Green Hills C++ Compiler # _LT_TAGVAR(lt_prog_compiler_static, CXX)="--no_auto_instantiation -u __main -u __premain -u _abort -r $COOL_DIR/lib/libOrb.a $MVME_DIR/lib/CC/libC.a $MVME_DIR/lib/classix/libcx.s.a" ;; esac ;; mingw* | cygwin* | os2* | pw32* | cegcc*) # This hack is so that the source file can tell whether it is being # built for inclusion in a dll (and should export symbols for example). lt_prog_compiler_pic_CXX='-DDLL_EXPORT' ;; dgux*) case $cc_basename in ec++*) lt_prog_compiler_pic_CXX='-KPIC' ;; ghcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; freebsd* | dragonfly*) # FreeBSD uses GNU C++ ;; hpux9* | hpux10* | hpux11*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='$wl-a ${wl}archive' if test ia64 != "$host_cpu"; then lt_prog_compiler_pic_CXX='+Z' fi ;; aCC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='$wl-a ${wl}archive' case $host_cpu in hppa*64*|ia64*) # +Z the default ;; *) lt_prog_compiler_pic_CXX='+Z' ;; esac ;; *) ;; esac ;; interix*) # This is c89, which is MS Visual C++ (no shared libs) # Anyone wants to do a port? ;; irix5* | irix6* | nonstopux*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_static_CXX='-non_shared' # CC pic flag -KPIC is the default. ;; *) ;; esac ;; linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) case $cc_basename in KCC*) # KAI C++ Compiler lt_prog_compiler_wl_CXX='--backend -Wl,' lt_prog_compiler_pic_CXX='-fPIC' ;; ecpc* ) # old Intel C++ for x86_64, which still supported -KPIC. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-static' ;; icpc* ) # Intel C++, used to be incompatible with GCC. # ICC 10 doesn't accept -KPIC any more. lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fPIC' lt_prog_compiler_static_CXX='-static' ;; pgCC* | pgcpp*) # Portland Group C++ compiler lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-fpic' lt_prog_compiler_static_CXX='-Bstatic' ;; cxx*) # Compaq C++ # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; xlc* | xlC* | bgxl[cC]* | mpixl[cC]*) # IBM XL 8.0, 9.0 on PPC and BlueGene lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-qpic' lt_prog_compiler_static_CXX='-qstaticlink' ;; *) case `$CC -V 2>&1 | sed 5q` in *Sun\ C*) # Sun C++ 5.9 lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; esac ;; esac ;; lynxos*) ;; m88k*) ;; mvs*) case $cc_basename in cxx*) lt_prog_compiler_pic_CXX='-W c,exportall' ;; *) ;; esac ;; netbsd* | netbsdelf*-gnu) ;; *qnx* | *nto*) # QNX uses GNU C++, but need to define -shared option too, otherwise # it will coredump. lt_prog_compiler_pic_CXX='-fPIC -shared' ;; osf3* | osf4* | osf5*) case $cc_basename in KCC*) lt_prog_compiler_wl_CXX='--backend -Wl,' ;; RCC*) # Rational C++ 2.4.1 lt_prog_compiler_pic_CXX='-pic' ;; cxx*) # Digital/Compaq C++ lt_prog_compiler_wl_CXX='-Wl,' # Make sure the PIC flag is empty. It appears that all Alpha # Linux and Compaq Tru64 Unix objects are PIC. lt_prog_compiler_pic_CXX= lt_prog_compiler_static_CXX='-non_shared' ;; *) ;; esac ;; psos*) ;; solaris*) case $cc_basename in CC* | sunCC*) # Sun C++ 4.2, 5.x and Centerline C++ lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' lt_prog_compiler_wl_CXX='-Qoption ld ' ;; gcx*) # Green Hills C++ Compiler lt_prog_compiler_pic_CXX='-PIC' ;; *) ;; esac ;; sunos4*) case $cc_basename in CC*) # Sun C++ 4.x lt_prog_compiler_pic_CXX='-pic' lt_prog_compiler_static_CXX='-Bstatic' ;; lcc*) # Lucid lt_prog_compiler_pic_CXX='-pic' ;; *) ;; esac ;; sysv5* | unixware* | sco3.2v5* | sco5v6* | OpenUNIX*) case $cc_basename in CC*) lt_prog_compiler_wl_CXX='-Wl,' lt_prog_compiler_pic_CXX='-KPIC' lt_prog_compiler_static_CXX='-Bstatic' ;; esac ;; tandem*) case $cc_basename in NCC*) # NonStop-UX NCC 3.20 lt_prog_compiler_pic_CXX='-KPIC' ;; *) ;; esac ;; vxworks*) ;; *) lt_prog_compiler_can_build_shared_CXX=no ;; esac fi case $host_os in # For platforms that do not support PIC, -DPIC is meaningless: *djgpp*) lt_prog_compiler_pic_CXX= ;; *) lt_prog_compiler_pic_CXX="$lt_prog_compiler_pic_CXX -DPIC" ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $compiler option to produce PIC" >&5 $as_echo_n "checking for $compiler option to produce PIC... " >&6; } if ${lt_cv_prog_compiler_pic_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_CXX=$lt_prog_compiler_pic_CXX fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_CXX" >&5 $as_echo "$lt_cv_prog_compiler_pic_CXX" >&6; } lt_prog_compiler_pic_CXX=$lt_cv_prog_compiler_pic_CXX # # Check to make sure the PIC flag actually works. # if test -n "$lt_prog_compiler_pic_CXX"; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works" >&5 $as_echo_n "checking if $compiler PIC flag $lt_prog_compiler_pic_CXX works... " >&6; } if ${lt_cv_prog_compiler_pic_works_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_pic_works_CXX=no ac_outfile=conftest.$ac_objext echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="$lt_prog_compiler_pic_CXX -DPIC" ## exclude from sc_useless_quotes_in_assignment # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. # The option is referenced via a variable to avoid confusing sed. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>conftest.err) ac_status=$? cat conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s "$ac_outfile"; then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings other than the usual output. $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' >conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if test ! -s conftest.er2 || diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_pic_works_CXX=yes fi fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_pic_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_pic_works_CXX" >&6; } if test yes = "$lt_cv_prog_compiler_pic_works_CXX"; then case $lt_prog_compiler_pic_CXX in "" | " "*) ;; *) lt_prog_compiler_pic_CXX=" $lt_prog_compiler_pic_CXX" ;; esac else lt_prog_compiler_pic_CXX= lt_prog_compiler_can_build_shared_CXX=no fi fi # # Check to make sure the static flag actually works. # wl=$lt_prog_compiler_wl_CXX eval lt_tmp_static_flag=\"$lt_prog_compiler_static_CXX\" { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler static flag $lt_tmp_static_flag works" >&5 $as_echo_n "checking if $compiler static flag $lt_tmp_static_flag works... " >&6; } if ${lt_cv_prog_compiler_static_works_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_static_works_CXX=no save_LDFLAGS=$LDFLAGS LDFLAGS="$LDFLAGS $lt_tmp_static_flag" echo "$lt_simple_link_test_code" > conftest.$ac_ext if (eval $ac_link 2>conftest.err) && test -s conftest$ac_exeext; then # The linker can only warn and ignore the option if not recognized # So say no if there are warnings if test -s conftest.err; then # Append any errors to the config.log. cat conftest.err 1>&5 $ECHO "$_lt_linker_boilerplate" | $SED '/^$/d' > conftest.exp $SED '/^$/d; /^ *+/d' conftest.err >conftest.er2 if diff conftest.exp conftest.er2 >/dev/null; then lt_cv_prog_compiler_static_works_CXX=yes fi else lt_cv_prog_compiler_static_works_CXX=yes fi fi $RM -r conftest* LDFLAGS=$save_LDFLAGS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_static_works_CXX" >&5 $as_echo "$lt_cv_prog_compiler_static_works_CXX" >&6; } if test yes = "$lt_cv_prog_compiler_static_works_CXX"; then : else lt_prog_compiler_static_CXX= fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking if $compiler supports -c -o file.$ac_objext" >&5 $as_echo_n "checking if $compiler supports -c -o file.$ac_objext... " >&6; } if ${lt_cv_prog_compiler_c_o_CXX+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_prog_compiler_c_o_CXX=no $RM -r conftest 2>/dev/null mkdir conftest cd conftest mkdir out echo "$lt_simple_compile_test_code" > conftest.$ac_ext lt_compiler_flag="-o out/conftest2.$ac_objext" # Insert the option either (1) after the last *FLAGS variable, or # (2) before a word containing "conftest.", or (3) at the end. # Note that $ac_compile itself does not contain backslashes and begins # with a dollar sign (not a hyphen), so the echo should work correctly. lt_compile=`echo "$ac_compile" | $SED \ -e 's:.*FLAGS}\{0,1\} :&$lt_compiler_flag :; t' \ -e 's: [^ ]*conftest\.: $lt_compiler_flag&:; t' \ -e 's:$: $lt_compiler_flag:'` (eval echo "\"\$as_me:$LINENO: $lt_compile\"" >&5) (eval "$lt_compile" 2>out/conftest.err) ac_status=$? cat out/conftest.err >&5 echo "$as_me:$LINENO: \$? = $ac_status" >&5 if (exit $ac_status) && test -s out/conftest2.$ac_objext then # The compiler can only warn and ignore the option if not recognized # So say no if there are warnings $ECHO "$_lt_compiler_boilerplate" | $SED '/^$/d' > out/conftest.exp $SED '/^$/d; /^ *+/d' out/conftest.err >out/conftest.er2 if test ! -s out/conftest.er2 || diff out/conftest.exp out/conftest.er2 >/dev/null; then lt_cv_prog_compiler_c_o_CXX=yes fi fi chmod u+w . 2>&5 $RM conftest* # SGI C++ compiler will create directory out/ii_files/ for # template instantiation test -d out/ii_files && $RM out/ii_files/* && rmdir out/ii_files $RM out/* && rmdir out cd .. $RM -r conftest $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_prog_compiler_c_o_CXX" >&5 $as_echo "$lt_cv_prog_compiler_c_o_CXX" >&6; } hard_links=nottested if test no = "$lt_cv_prog_compiler_c_o_CXX" && test no != "$need_locks"; then # do not overwrite the value of need_locks provided by the user { $as_echo "$as_me:${as_lineno-$LINENO}: checking if we can lock with hard links" >&5 $as_echo_n "checking if we can lock with hard links... " >&6; } hard_links=yes $RM conftest* ln conftest.a conftest.b 2>/dev/null && hard_links=no touch conftest.a ln conftest.a conftest.b 2>&5 || hard_links=no ln conftest.a conftest.b 2>/dev/null && hard_links=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hard_links" >&5 $as_echo "$hard_links" >&6; } if test no = "$hard_links"; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&5 $as_echo "$as_me: WARNING: '$CC' does not support '-c -o', so 'make -j' may be unsafe" >&2;} need_locks=warn fi else need_locks=no fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether the $compiler linker ($LD) supports shared libraries" >&5 $as_echo_n "checking whether the $compiler linker ($LD) supports shared libraries... " >&6; } export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms_CXX='_GLOBAL_OFFSET_TABLE_|_GLOBAL__F[ID]_.*' case $host_os in aix[4-9]*) # If we're using GNU nm, then we don't want the "-C" option. # -C means demangle to GNU nm, but means don't demangle to AIX nm. # Without the "-l" option, or with the "-B" option, AIX nm treats # weak defined symbols like other global defined symbols, whereas # GNU nm marks them as "W". # While the 'weak' keyword is ignored in the Export File, we need # it in the Import File for the 'aix-soname' feature, so we have # to replace the "-B" option with "-P" for AIX nm. if $NM -V 2>&1 | $GREP 'GNU' > /dev/null; then export_symbols_cmds_CXX='$NM -Bpg $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W")) && (substr(\$ 3,1,1) != ".")) { if (\$ 2 == "W") { print \$ 3 " weak" } else { print \$ 3 } } }'\'' | sort -u > $export_symbols' else export_symbols_cmds_CXX='`func_echo_all $NM | $SED -e '\''s/B\([^B]*\)$/P\1/'\''` -PCpgl $libobjs $convenience | awk '\''{ if (((\$ 2 == "T") || (\$ 2 == "D") || (\$ 2 == "B") || (\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) && (substr(\$ 1,1,1) != ".")) { if ((\$ 2 == "W") || (\$ 2 == "V") || (\$ 2 == "Z")) { print \$ 1 " weak" } else { print \$ 1 } } }'\'' | sort -u > $export_symbols' fi ;; pw32*) export_symbols_cmds_CXX=$ltdll_cmds ;; cygwin* | mingw* | cegcc*) case $cc_basename in cl*) exclude_expsyms_CXX='_NULL_IMPORT_DESCRIPTOR|_IMPORT_DESCRIPTOR_.*' ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED -e '\''/^[BCDGRS][ ]/s/.*[ ]\([^ ]*\)/\1 DATA/;s/^.*[ ]__nm__\([^ ]*\)[ ][^ ]*/\1 DATA/;/^I[ ]/d;/^[AITW][ ]/s/.* //'\'' | sort | uniq > $export_symbols' exclude_expsyms_CXX='[_]+GLOBAL_OFFSET_TABLE_|[_]+GLOBAL__[FID]_.*|[_]+head_[A-Za-z0-9_]+_dll|[A-Za-z0-9_]+_dll_iname' ;; esac ;; linux* | k*bsd*-gnu | gnu*) link_all_deplibs_CXX=no ;; *) export_symbols_cmds_CXX='$NM $libobjs $convenience | $global_symbol_pipe | $SED '\''s/.* //'\'' | sort | uniq > $export_symbols' ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ld_shlibs_CXX" >&5 $as_echo "$ld_shlibs_CXX" >&6; } test no = "$ld_shlibs_CXX" && can_build_shared=no with_gnu_ld_CXX=$with_gnu_ld # # Do we need to explicitly link libc? # case "x$archive_cmds_need_lc_CXX" in x|xyes) # Assume -lc should be added archive_cmds_need_lc_CXX=yes if test yes,yes = "$GCC,$enable_shared"; then case $archive_cmds_CXX in *'~'*) # FIXME: we may have to deal with multi-command sequences. ;; '$CC '*) # Test whether the compiler implicitly links with -lc since on some # systems, -lgcc has to come before -lc. If gcc already passes -lc # to ld, don't add -lc before -lgcc. { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether -lc should be explicitly linked in" >&5 $as_echo_n "checking whether -lc should be explicitly linked in... " >&6; } if ${lt_cv_archive_cmds_need_lc_CXX+:} false; then : $as_echo_n "(cached) " >&6 else $RM conftest* echo "$lt_simple_compile_test_code" > conftest.$ac_ext if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$ac_compile\""; } >&5 (eval $ac_compile) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } 2>conftest.err; then soname=conftest lib=conftest libobjs=conftest.$ac_objext deplibs= wl=$lt_prog_compiler_wl_CXX pic_flag=$lt_prog_compiler_pic_CXX compiler_flags=-v linker_flags=-v verstring= output_objdir=. libname=conftest lt_save_allow_undefined_flag=$allow_undefined_flag_CXX allow_undefined_flag_CXX= if { { eval echo "\"\$as_me\":${as_lineno-$LINENO}: \"$archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1\""; } >&5 (eval $archive_cmds_CXX 2\>\&1 \| $GREP \" -lc \" \>/dev/null 2\>\&1) 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; } then lt_cv_archive_cmds_need_lc_CXX=no else lt_cv_archive_cmds_need_lc_CXX=yes fi allow_undefined_flag_CXX=$lt_save_allow_undefined_flag else cat conftest.err 1>&5 fi $RM conftest* fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $lt_cv_archive_cmds_need_lc_CXX" >&5 $as_echo "$lt_cv_archive_cmds_need_lc_CXX" >&6; } archive_cmds_need_lc_CXX=$lt_cv_archive_cmds_need_lc_CXX ;; esac fi ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: checking dynamic linker characteristics" >&5 $as_echo_n "checking dynamic linker characteristics... " >&6; } library_names_spec= libname_spec='lib$name' soname_spec= shrext_cmds=.so postinstall_cmds= postuninstall_cmds= finish_cmds= finish_eval= shlibpath_var= shlibpath_overrides_runpath=unknown version_type=none dynamic_linker="$host_os ld.so" sys_lib_dlsearch_path_spec="/lib /usr/lib" need_lib_prefix=unknown hardcode_into_libs=no # when you set need_version to no, make sure it does not cause -set_version # flags to be left without arguments need_version=unknown case $host_os in aix3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname.a' shlibpath_var=LIBPATH # AIX 3 has no versioning support, so we append a major version to the name. soname_spec='$libname$release$shared_ext$major' ;; aix[4-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no hardcode_into_libs=yes if test ia64 = "$host_cpu"; then # AIX 5 supports IA64 library_names_spec='$libname$release$shared_ext$major $libname$release$shared_ext$versuffix $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH else # With GCC up to 2.95.x, collect2 would create an import file # for dependence libraries. The import file would start with # the line '#! .'. This would cause the generated library to # depend on '.', always an invalid library. This was fixed in # development snapshots of GCC prior to 3.0. case $host_os in aix4 | aix4.[01] | aix4.[01].*) if { echo '#if __GNUC__ > 2 || (__GNUC__ == 2 && __GNUC_MINOR__ >= 97)' echo ' yes ' echo '#endif'; } | $CC -E - | $GREP yes > /dev/null; then : else can_build_shared=no fi ;; esac # Using Import Files as archive members, it is possible to support # filename-based versioning of shared library archives on AIX. While # this would work for both with and without runtime linking, it will # prevent static linking of such archives. So we do filename-based # shared library versioning with .so extension only, which is used # when both runtime linking and shared linking is enabled. # Unfortunately, runtime linking may impact performance, so we do # not want this to be the default eventually. Also, we use the # versioned .so libs for executables only if there is the -brtl # linker flag in LDFLAGS as well, or --with-aix-soname=svr4 only. # To allow for filename-based versioning support, we need to create # libNAME.so.V as an archive file, containing: # *) an Import File, referring to the versioned filename of the # archive as well as the shared archive member, telling the # bitwidth (32 or 64) of that shared object, and providing the # list of exported symbols of that shared object, eventually # decorated with the 'weak' keyword # *) the shared object with the F_LOADONLY flag set, to really avoid # it being seen by the linker. # At run time we better use the real file rather than another symlink, # but for link time we create the symlink libNAME.so -> libNAME.so.V case $with_aix_soname,$aix_use_runtimelinking in # AIX (on Power*) has no versioning support, so currently we cannot hardcode correct # soname into executable. Probably we can add versioning support to # collect2, so additional links can be useful in future. aix,yes) # traditional libtool dynamic_linker='AIX unversionable lib.so' # If using run time linking (on AIX 4.2 or later) use lib.so # instead of lib.a to let people know that these are not # typical AIX shared libraries. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; aix,no) # traditional AIX only dynamic_linker='AIX lib.a(lib.so.V)' # We preserve .a as extension for shared libraries through AIX4.2 # and later when we are not doing run time linking. library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' ;; svr4,*) # full svr4 only dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,yes) # both, prefer svr4 dynamic_linker="AIX lib.so.V($shared_archive_member_spec.o), lib.a(lib.so.V)" library_names_spec='$libname$release$shared_ext$major $libname$shared_ext' # unpreferred sharedlib libNAME.a needs extra handling postinstall_cmds='test -n "$linkname" || linkname="$realname"~func_stripname "" ".so" "$linkname"~$install_shared_prog "$dir/$func_stripname_result.$libext" "$destdir/$func_stripname_result.$libext"~test -z "$tstripme" || test -z "$striplib" || $striplib "$destdir/$func_stripname_result.$libext"' postuninstall_cmds='for n in $library_names $old_library; do :; done~func_stripname "" ".so" "$n"~test "$func_stripname_result" = "$n" || func_append rmfiles " $odir/$func_stripname_result.$libext"' # We do not specify a path in Import Files, so LIBPATH fires. shlibpath_overrides_runpath=yes ;; *,no) # both, prefer aix dynamic_linker="AIX lib.a(lib.so.V), lib.so.V($shared_archive_member_spec.o)" library_names_spec='$libname$release.a $libname.a' soname_spec='$libname$release$shared_ext$major' # unpreferred sharedlib libNAME.so.V and symlink libNAME.so need extra handling postinstall_cmds='test -z "$dlname" || $install_shared_prog $dir/$dlname $destdir/$dlname~test -z "$tstripme" || test -z "$striplib" || $striplib $destdir/$dlname~test -n "$linkname" || linkname=$realname~func_stripname "" ".a" "$linkname"~(cd "$destdir" && $LN_S -f $dlname $func_stripname_result.so)' postuninstall_cmds='test -z "$dlname" || func_append rmfiles " $odir/$dlname"~for n in $old_library $library_names; do :; done~func_stripname "" ".a" "$n"~func_append rmfiles " $odir/$func_stripname_result.so"' ;; esac shlibpath_var=LIBPATH fi ;; amigaos*) case $host_cpu in powerpc) # Since July 2007 AmigaOS4 officially supports .so libraries. # When compiling the executable, add -use-dynld -Lsobjs: to the compileline. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' ;; m68k) library_names_spec='$libname.ixlibrary $libname.a' # Create ${libname}_ixlibrary.a entries in /sys/libs. finish_eval='for lib in `ls $libdir/*.ixlibrary 2>/dev/null`; do libname=`func_echo_all "$lib" | $SED '\''s%^.*/\([^/]*\)\.ixlibrary$%\1%'\''`; $RM /sys/libs/${libname}_ixlibrary.a; $show "cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a"; cd /sys/libs && $LN_S $lib ${libname}_ixlibrary.a || exit 1; done' ;; esac ;; beos*) library_names_spec='$libname$shared_ext' dynamic_linker="$host_os ld.so" shlibpath_var=LIBRARY_PATH ;; bsdi[45]*) version_type=linux # correct to gnu/linux during the next big refactor need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/shlib /usr/lib /usr/X11/lib /usr/contrib/lib /lib /usr/local/lib" sys_lib_dlsearch_path_spec="/shlib /usr/lib /usr/local/lib" # the default ld.so.conf also contains /usr/contrib/lib and # /usr/X11R6/lib (/usr/X11 is a link to /usr/X11R6), but let us allow # libtool to hard-code these into programs ;; cygwin* | mingw* | pw32* | cegcc*) version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no case $GCC,$cc_basename in yes,*) # gcc library_names_spec='$libname.dll.a' # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes case $host_os in cygwin*) # Cygwin DLLs use 'cyg' prefix rather than 'lib' soname_spec='`echo $libname | sed -e 's/^lib/cyg/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; mingw* | cegcc*) # MinGW DLLs use traditional 'lib' prefix soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; pw32*) # pw32 DLLs use 'pw' prefix rather than 'lib' library_names_spec='`echo $libname | sed -e 's/^lib/pw/'``echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' ;; esac dynamic_linker='Win32 ld.exe' ;; *,cl*) # Native MSVC libname_spec='$name' soname_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext' library_names_spec='$libname.dll.lib' case $build_os in mingw*) sys_lib_search_path_spec= lt_save_ifs=$IFS IFS=';' for lt_path in $LIB do IFS=$lt_save_ifs # Let DOS variable expansion print the short 8.3 style file name. lt_path=`cd "$lt_path" 2>/dev/null && cmd //C "for %i in (".") do @echo %~si"` sys_lib_search_path_spec="$sys_lib_search_path_spec $lt_path" done IFS=$lt_save_ifs # Convert to MSYS style. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | sed -e 's|\\\\|/|g' -e 's| \\([a-zA-Z]\\):| /\\1|g' -e 's|^ ||'` ;; cygwin*) # Convert to unix form, then to dos form, then back to unix form # but this time dos style (no spaces!) so that the unix form looks # like /cygdrive/c/PROGRA~1:/cygdr... sys_lib_search_path_spec=`cygpath --path --unix "$LIB"` sys_lib_search_path_spec=`cygpath --path --dos "$sys_lib_search_path_spec" 2>/dev/null` sys_lib_search_path_spec=`cygpath --path --unix "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` ;; *) sys_lib_search_path_spec=$LIB if $ECHO "$sys_lib_search_path_spec" | $GREP ';[c-zC-Z]:/' >/dev/null; then # It is most probably a Windows format PATH. sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e 's/;/ /g'` else sys_lib_search_path_spec=`$ECHO "$sys_lib_search_path_spec" | $SED -e "s/$PATH_SEPARATOR/ /g"` fi # FIXME: find the short name or the path components, as spaces are # common. (e.g. "Program Files" -> "PROGRA~1") ;; esac # DLL is installed to $(libdir)/../bin by postinstall_cmds postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; echo \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; echo \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' shlibpath_overrides_runpath=yes dynamic_linker='Win32 link.exe' ;; *) # Assume MSVC wrapper library_names_spec='$libname`echo $release | $SED -e 's/[.]/-/g'`$versuffix$shared_ext $libname.lib' dynamic_linker='Win32 ld.exe' ;; esac # FIXME: first we should search . and the directory the executable is in shlibpath_var=PATH ;; darwin* | rhapsody*) dynamic_linker="$host_os dyld" version_type=darwin need_lib_prefix=no need_version=no library_names_spec='$libname$release$major$shared_ext $libname$shared_ext' soname_spec='$libname$release$major$shared_ext' shlibpath_overrides_runpath=yes shlibpath_var=DYLD_LIBRARY_PATH shrext_cmds='`test .$module = .yes && echo .so || echo .dylib`' sys_lib_dlsearch_path_spec='/usr/local/lib /lib /usr/lib' ;; dgux*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; freebsd* | dragonfly*) # DragonFly does not have aout. When/if they implement a new # versioning mechanism, adjust this. if test -x /usr/bin/objformat; then objformat=`/usr/bin/objformat` else case $host_os in freebsd[23].*) objformat=aout ;; *) objformat=elf ;; esac fi version_type=freebsd-$objformat case $version_type in freebsd-elf*) library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' need_version=no need_lib_prefix=no ;; freebsd-*) library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' need_version=yes ;; esac shlibpath_var=LD_LIBRARY_PATH case $host_os in freebsd2.*) shlibpath_overrides_runpath=yes ;; freebsd3.[01]* | freebsdelf3.[01]*) shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; freebsd3.[2-9]* | freebsdelf3.[2-9]* | \ freebsd4.[0-5] | freebsdelf4.[0-5] | freebsd4.1.1 | freebsdelf4.1.1) shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; *) # from 4.6 on, and DragonFly shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; esac ;; haiku*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no dynamic_linker="$host_os runtime_loader" library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LIBRARY_PATH shlibpath_overrides_runpath=no sys_lib_dlsearch_path_spec='/boot/home/config/lib /boot/common/lib /boot/system/lib' hardcode_into_libs=yes ;; hpux9* | hpux10* | hpux11*) # Give a soname corresponding to the major version so that dld.sl refuses to # link against other versions. version_type=sunos need_lib_prefix=no need_version=no case $host_cpu in ia64*) shrext_cmds='.so' hardcode_into_libs=yes dynamic_linker="$host_os dld.so" shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' if test 32 = "$HPUX_IA64_MODE"; then sys_lib_search_path_spec="/usr/lib/hpux32 /usr/local/lib/hpux32 /usr/local/lib" sys_lib_dlsearch_path_spec=/usr/lib/hpux32 else sys_lib_search_path_spec="/usr/lib/hpux64 /usr/local/lib/hpux64" sys_lib_dlsearch_path_spec=/usr/lib/hpux64 fi ;; hppa*64*) shrext_cmds='.sl' hardcode_into_libs=yes dynamic_linker="$host_os dld.sl" shlibpath_var=LD_LIBRARY_PATH # How should we handle SHLIB_PATH shlibpath_overrides_runpath=yes # Unless +noenvvar is specified. library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' sys_lib_search_path_spec="/usr/lib/pa20_64 /usr/ccs/lib/pa20_64" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; *) shrext_cmds='.sl' dynamic_linker="$host_os dld.sl" shlibpath_var=SHLIB_PATH shlibpath_overrides_runpath=no # +s is required to enable SHLIB_PATH library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' ;; esac # HP-UX runs *really* slowly unless shared libraries are mode 555, ... postinstall_cmds='chmod 555 $lib' # or fails outright, so override atomically: install_override_mode=555 ;; interix[3-9]*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='Interix 3.x ld.so.1 (PE, like ELF)' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; irix5* | irix6* | nonstopux*) case $host_os in nonstopux*) version_type=nonstopux ;; *) if test yes = "$lt_cv_prog_gnu_ld"; then version_type=linux # correct to gnu/linux during the next big refactor else version_type=irix fi ;; esac need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$release$shared_ext $libname$shared_ext' case $host_os in irix5* | nonstopux*) libsuff= shlibsuff= ;; *) case $LD in # libtool.m4 will add one of these switches to LD *-32|*"-32 "|*-melf32bsmip|*"-melf32bsmip ") libsuff= shlibsuff= libmagic=32-bit;; *-n32|*"-n32 "|*-melf32bmipn32|*"-melf32bmipn32 ") libsuff=32 shlibsuff=N32 libmagic=N32;; *-64|*"-64 "|*-melf64bmip|*"-melf64bmip ") libsuff=64 shlibsuff=64 libmagic=64-bit;; *) libsuff= shlibsuff= libmagic=never-match;; esac ;; esac shlibpath_var=LD_LIBRARY${shlibsuff}_PATH shlibpath_overrides_runpath=no sys_lib_search_path_spec="/usr/lib$libsuff /lib$libsuff /usr/local/lib$libsuff" sys_lib_dlsearch_path_spec="/usr/lib$libsuff /lib$libsuff" hardcode_into_libs=yes ;; # No shared lib support for Linux oldld, aout, or coff. linux*oldld* | linux*aout* | linux*coff*) dynamic_linker=no ;; linux*android*) version_type=none # Android doesn't support versioned libraries. need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext' soname_spec='$libname$release$shared_ext' finish_cmds= shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes dynamic_linker='Android linker' # Don't embed -rpath directories since the linker doesn't support them. hardcode_libdir_flag_spec_CXX='-L$libdir' ;; # This must be glibc/ELF. linux* | k*bsd*-gnu | kopensolaris*-gnu | gnu*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' finish_cmds='PATH="\$PATH:/sbin" ldconfig -n $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no # Some binutils ld are patched to set DT_RUNPATH if ${lt_cv_shlibpath_overrides_runpath+:} false; then : $as_echo_n "(cached) " >&6 else lt_cv_shlibpath_overrides_runpath=no save_LDFLAGS=$LDFLAGS save_libdir=$libdir eval "libdir=/foo; wl=\"$lt_prog_compiler_wl_CXX\"; \ LDFLAGS=\"\$LDFLAGS $hardcode_libdir_flag_spec_CXX\"" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : if ($OBJDUMP -p conftest$ac_exeext) 2>/dev/null | grep "RUNPATH.*$libdir" >/dev/null; then : lt_cv_shlibpath_overrides_runpath=yes fi fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LDFLAGS=$save_LDFLAGS libdir=$save_libdir fi shlibpath_overrides_runpath=$lt_cv_shlibpath_overrides_runpath # This implies no fast_install, which is unacceptable. # Some rework will be needed to allow for fast_install # before this can be enabled. hardcode_into_libs=yes # Ideally, we could use ldconfig to report *all* directores which are # searched for libraries, however this is still not possible. Aside from not # being certain /sbin/ldconfig is available, command # 'ldconfig -N -X -v | grep ^/' on 64bit Fedora does not report /usr/lib64, # even though it is searched at run-time. Try to do the best guess by # appending ld.so.conf contents (and includes) to the search path. if test -f /etc/ld.so.conf; then lt_ld_extra=`awk '/^include / { system(sprintf("cd /etc; cat %s 2>/dev/null", \$2)); skip = 1; } { if (!skip) print \$0; skip = 0; }' < /etc/ld.so.conf | $SED -e 's/#.*//;/^[ ]*hwcap[ ]/d;s/[:, ]/ /g;s/=[^=]*$//;s/=[^= ]* / /g;s/"//g;/^$/d' | tr '\n' ' '` sys_lib_dlsearch_path_spec="/lib /usr/lib $lt_ld_extra" fi # We used to test for /lib/ld.so.1 and disable shared libraries on # powerpc, because MkLinux only supported shared libraries with the # GNU dynamic linker. Since this was broken with cross compilers, # most powerpc-linux boxes support dynamic linking these days and # people can always --disable-shared, the test was removed, and we # assume the GNU/Linux dynamic linker is in use. dynamic_linker='GNU/Linux ld.so' ;; netbsdelf*-gnu) version_type=linux need_lib_prefix=no need_version=no library_names_spec='${libname}${release}${shared_ext}$versuffix ${libname}${release}${shared_ext}$major ${libname}${shared_ext}' soname_spec='${libname}${release}${shared_ext}$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='NetBSD ld.elf_so' ;; netbsd*) version_type=sunos need_lib_prefix=no need_version=no if echo __ELF__ | $CC -E - | $GREP __ELF__ >/dev/null; then library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' dynamic_linker='NetBSD (a.out) ld.so' else library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' dynamic_linker='NetBSD ld.elf_so' fi shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes ;; newsos6) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; *nto* | *qnx*) version_type=qnx need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes dynamic_linker='ldqnx.so' ;; openbsd* | bitrig*) version_type=sunos sys_lib_dlsearch_path_spec=/usr/lib need_lib_prefix=no if test -z "`echo __ELF__ | $CC -E - | $GREP __ELF__`"; then need_version=no else need_version=yes fi library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/sbin" ldconfig -m $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes ;; os2*) libname_spec='$name' version_type=windows shrext_cmds=.dll need_version=no need_lib_prefix=no # OS/2 can only load a DLL with a base name of 8 characters or less. soname_spec='`test -n "$os2dllname" && libname="$os2dllname"; v=$($ECHO $release$versuffix | tr -d .-); n=$($ECHO $libname | cut -b -$((8 - ${#v})) | tr . _); $ECHO $n$v`$shared_ext' library_names_spec='${libname}_dll.$libext' dynamic_linker='OS/2 ld.exe' shlibpath_var=BEGINLIBPATH sys_lib_search_path_spec="/lib /usr/lib /usr/local/lib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec postinstall_cmds='base_file=`basename \$file`~ dlpath=`$SHELL 2>&1 -c '\''. $dir/'\''\$base_file'\''i; $ECHO \$dlname'\''`~ dldir=$destdir/`dirname \$dlpath`~ test -d \$dldir || mkdir -p \$dldir~ $install_prog $dir/$dlname \$dldir/$dlname~ chmod a+x \$dldir/$dlname~ if test -n '\''$stripme'\'' && test -n '\''$striplib'\''; then eval '\''$striplib \$dldir/$dlname'\'' || exit \$?; fi' postuninstall_cmds='dldll=`$SHELL 2>&1 -c '\''. $file; $ECHO \$dlname'\''`~ dlpath=$dir/\$dldll~ $RM \$dlpath' ;; osf3* | osf4* | osf5*) version_type=osf need_lib_prefix=no need_version=no soname_spec='$libname$release$shared_ext$major' library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH sys_lib_search_path_spec="/usr/shlib /usr/ccs/lib /usr/lib/cmplrs/cc /usr/lib /usr/local/lib /var/shlib" sys_lib_dlsearch_path_spec=$sys_lib_search_path_spec ;; rdos*) dynamic_linker=no ;; solaris*) version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes # ldd complains unless libraries are executable postinstall_cmds='chmod +x $lib' ;; sunos4*) version_type=sunos library_names_spec='$libname$release$shared_ext$versuffix $libname$shared_ext$versuffix' finish_cmds='PATH="\$PATH:/usr/etc" ldconfig $libdir' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes if test yes = "$with_gnu_ld"; then need_lib_prefix=no fi need_version=yes ;; sysv4 | sysv4.3*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH case $host_vendor in sni) shlibpath_overrides_runpath=no need_lib_prefix=no runpath_var=LD_RUN_PATH ;; siemens) need_lib_prefix=no ;; motorola) need_lib_prefix=no need_version=no shlibpath_overrides_runpath=no sys_lib_search_path_spec='/lib /usr/lib /usr/ccs/lib' ;; esac ;; sysv4*MP*) if test -d /usr/nec; then version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$shared_ext.$versuffix $libname$shared_ext.$major $libname$shared_ext' soname_spec='$libname$shared_ext.$major' shlibpath_var=LD_LIBRARY_PATH fi ;; sysv5* | sco3.2v5* | sco5v6* | unixware* | OpenUNIX* | sysv4*uw2*) version_type=sco need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=yes hardcode_into_libs=yes if test yes = "$with_gnu_ld"; then sys_lib_search_path_spec='/usr/local/lib /usr/gnu/lib /usr/ccs/lib /usr/lib /lib' else sys_lib_search_path_spec='/usr/ccs/lib /usr/lib' case $host_os in sco3.2v5*) sys_lib_search_path_spec="$sys_lib_search_path_spec /lib" ;; esac fi sys_lib_dlsearch_path_spec='/usr/lib' ;; tpf*) # TPF is a cross-target only. Preferred cross-host = GNU/Linux. version_type=linux # correct to gnu/linux during the next big refactor need_lib_prefix=no need_version=no library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' shlibpath_var=LD_LIBRARY_PATH shlibpath_overrides_runpath=no hardcode_into_libs=yes ;; uts4*) version_type=linux # correct to gnu/linux during the next big refactor library_names_spec='$libname$release$shared_ext$versuffix $libname$release$shared_ext$major $libname$shared_ext' soname_spec='$libname$release$shared_ext$major' shlibpath_var=LD_LIBRARY_PATH ;; *) dynamic_linker=no ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dynamic_linker" >&5 $as_echo "$dynamic_linker" >&6; } test no = "$dynamic_linker" && can_build_shared=no variables_saved_for_relink="PATH $shlibpath_var $runpath_var" if test yes = "$GCC"; then variables_saved_for_relink="$variables_saved_for_relink GCC_EXEC_PREFIX COMPILER_PATH LIBRARY_PATH" fi if test set = "${lt_cv_sys_lib_search_path_spec+set}"; then sys_lib_search_path_spec=$lt_cv_sys_lib_search_path_spec fi if test set = "${lt_cv_sys_lib_dlsearch_path_spec+set}"; then sys_lib_dlsearch_path_spec=$lt_cv_sys_lib_dlsearch_path_spec fi # remember unaugmented sys_lib_dlsearch_path content for libtool script decls... configure_time_dlsearch_path=$sys_lib_dlsearch_path_spec # ... but it needs LT_SYS_LIBRARY_PATH munging for other configure-time code func_munge_path_list sys_lib_dlsearch_path_spec "$LT_SYS_LIBRARY_PATH" # to be used as default LT_SYS_LIBRARY_PATH value in generated libtool configure_time_lt_sys_library_path=$LT_SYS_LIBRARY_PATH { $as_echo "$as_me:${as_lineno-$LINENO}: checking how to hardcode library paths into programs" >&5 $as_echo_n "checking how to hardcode library paths into programs... " >&6; } hardcode_action_CXX= if test -n "$hardcode_libdir_flag_spec_CXX" || test -n "$runpath_var_CXX" || test yes = "$hardcode_automatic_CXX"; then # We can hardcode non-existent directories. if test no != "$hardcode_direct_CXX" && # If the only mechanism to avoid hardcoding is shlibpath_var, we # have to relink, otherwise we might link with an installed library # when we should be linking with a yet-to-be-installed one ## test no != "$_LT_TAGVAR(hardcode_shlibpath_var, CXX)" && test no != "$hardcode_minus_L_CXX"; then # Linking always hardcodes the temporary library directory. hardcode_action_CXX=relink else # We can link without hardcoding, and we can hardcode nonexisting dirs. hardcode_action_CXX=immediate fi else # We cannot hardcode anything, or else we can only hardcode existing # directories. hardcode_action_CXX=unsupported fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $hardcode_action_CXX" >&5 $as_echo "$hardcode_action_CXX" >&6; } if test relink = "$hardcode_action_CXX" || test yes = "$inherit_rpath_CXX"; then # Fast installation is not supported enable_fast_install=no elif test yes = "$shlibpath_overrides_runpath" || test no = "$enable_shared"; then # Fast installation is not necessary enable_fast_install=needless fi fi # test -n "$compiler" CC=$lt_save_CC CFLAGS=$lt_save_CFLAGS LDCXX=$LD LD=$lt_save_LD GCC=$lt_save_GCC with_gnu_ld=$lt_save_with_gnu_ld lt_cv_path_LDCXX=$lt_cv_path_LD lt_cv_path_LD=$lt_save_path_LD lt_cv_prog_gnu_ldcxx=$lt_cv_prog_gnu_ld lt_cv_prog_gnu_ld=$lt_save_with_gnu_ld fi # test yes != "$_lt_caught_CXX_error" ac_ext=cpp ac_cpp='$CXXCPP $CPPFLAGS' ac_compile='$CXX -c $CXXFLAGS $CPPFLAGS conftest.$ac_ext >&5' ac_link='$CXX -o conftest$ac_exeext $CXXFLAGS $CPPFLAGS $LDFLAGS conftest.$ac_ext $LIBS >&5' ac_compiler_gnu=$ac_cv_cxx_compiler_gnu ac_config_commands="$ac_config_commands libtool" # Only expand once: { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether ${MAKE-make} sets \$(MAKE)" >&5 $as_echo_n "checking whether ${MAKE-make} sets \$(MAKE)... " >&6; } set x ${MAKE-make} ac_make=`$as_echo "$2" | sed 's/+/p/g; s/[^a-zA-Z0-9_]/_/g'` if eval \${ac_cv_prog_make_${ac_make}_set+:} false; then : $as_echo_n "(cached) " >&6 else cat >conftest.make <<\_ACEOF SHELL = /bin/sh all: @echo '@@@%%%=$(MAKE)=@@@%%%' _ACEOF # GNU make sometimes prints "make[1]: Entering ...", which would confuse us. case `${MAKE-make} -f conftest.make 2>/dev/null` in *@@@%%%=?*=@@@%%%*) eval ac_cv_prog_make_${ac_make}_set=yes;; *) eval ac_cv_prog_make_${ac_make}_set=no;; esac rm -f conftest.make fi if eval test \$ac_cv_prog_make_${ac_make}_set = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } SET_MAKE= else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } SET_MAKE="MAKE=${MAKE-make}" fi if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}ranlib", so it can be a program name with args. set dummy ${ac_tool_prefix}ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$RANLIB"; then ac_cv_prog_RANLIB="$RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_RANLIB="${ac_tool_prefix}ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi RANLIB=$ac_cv_prog_RANLIB if test -n "$RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $RANLIB" >&5 $as_echo "$RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_prog_RANLIB"; then ac_ct_RANLIB=$RANLIB # Extract the first word of "ranlib", so it can be a program name with args. set dummy ranlib; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_ac_ct_RANLIB+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$ac_ct_RANLIB"; then ac_cv_prog_ac_ct_RANLIB="$ac_ct_RANLIB" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_ac_ct_RANLIB="ranlib" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS fi fi ac_ct_RANLIB=$ac_cv_prog_ac_ct_RANLIB if test -n "$ac_ct_RANLIB"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_ct_RANLIB" >&5 $as_echo "$ac_ct_RANLIB" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_ct_RANLIB" = x; then RANLIB=":" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac RANLIB=$ac_ct_RANLIB fi else RANLIB="$ac_cv_prog_RANLIB" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for C++ compiler usability" >&5 $as_echo_n "checking for C++ compiler usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { class test { public: int test; }; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } else as_fn_error $? "No C++ compiler found" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext # Defining _XOPEN_SOURCE to get extra field in struct stat $as_echo "#define _XOPEN_SOURCE 700" >>confdefs.h # Define _BSD_SOURCE in order to be able to call makedev(), minor() and major() under OpenBSD when _XOPEN_SOURCE is set $as_echo "#define _BSD_SOURCE 1" >>confdefs.h $as_echo "#define _DEFAULT_SOURCE 1" >>confdefs.h ########### ## THE FOLLOWING "DEFINE" USED TO RE-ENABLE FULL LIBC FEATURES ON DIFFERENT OPERATING SYSTEMS ## HAVE BEEN BORROWED FROM PYTHON's configure.in ## ## # The later defininition of _XOPEN_SOURCE disables certain features # on Linux, so we need _GNU_SOURCE to re-enable them (makedev, tm_zone). $as_echo "#define _GNU_SOURCE 1" >>confdefs.h # The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables # certain features on NetBSD, so we need _NETBSD_SOURCE to re-enable # them. $as_echo "#define _NETBSD_SOURCE 1" >>confdefs.h # The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables # certain features on FreeBSD, so we need __BSD_VISIBLE to re-enable # them. $as_echo "#define __BSD_VISIBLE 1" >>confdefs.h # The later defininition of _XOPEN_SOURCE and _POSIX_C_SOURCE disables # certain features on Mac OS X, so we need _DARWIN_C_SOURCE to re-enable # them. $as_echo "#define _DARWIN_C_SOURCE 1" >>confdefs.h ## ## ########### # Checks for libraries. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for socket in -lsocket" >&5 $as_echo_n "checking for socket in -lsocket... " >&6; } if ${ac_cv_lib_socket_socket+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lsocket $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char socket (); int main () { return socket (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_socket_socket=yes else ac_cv_lib_socket_socket=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_socket_socket" >&5 $as_echo "$ac_cv_lib_socket_socket" >&6; } if test "x$ac_cv_lib_socket_socket" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBSOCKET 1 _ACEOF LIBS="-lsocket $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for endnetconfig in -lnsl" >&5 $as_echo_n "checking for endnetconfig in -lnsl... " >&6; } if ${ac_cv_lib_nsl_endnetconfig+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lnsl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char endnetconfig (); int main () { return endnetconfig (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_nsl_endnetconfig=yes else ac_cv_lib_nsl_endnetconfig=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_nsl_endnetconfig" >&5 $as_echo "$ac_cv_lib_nsl_endnetconfig" >&6; } if test "x$ac_cv_lib_nsl_endnetconfig" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBNSL 1 _ACEOF LIBS="-lnsl $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for cap_get_proc in -lcap" >&5 $as_echo_n "checking for cap_get_proc in -lcap... " >&6; } if ${ac_cv_lib_cap_cap_get_proc+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lcap $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char cap_get_proc (); int main () { return cap_get_proc (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_cap_cap_get_proc=yes else ac_cv_lib_cap_cap_get_proc=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_cap_cap_get_proc" >&5 $as_echo "$ac_cv_lib_cap_cap_get_proc" >&6; } if test "x$ac_cv_lib_cap_cap_get_proc" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBCAP 1 _ACEOF LIBS="-lcap $LIBS" fi # Check whether --enable-libdl-linking was given. if test "${enable_libdl_linking+set}" = set; then : enableval=$enable_libdl_linking; else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for dlsym in -ldl" >&5 $as_echo_n "checking for dlsym in -ldl... " >&6; } if ${ac_cv_lib_dl_dlsym+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-ldl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char dlsym (); int main () { return dlsym (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_dl_dlsym=yes else ac_cv_lib_dl_dlsym=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_dl_dlsym" >&5 $as_echo "$ac_cv_lib_dl_dlsym" >&6; } if test "x$ac_cv_lib_dl_dlsym" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBDL 1 _ACEOF LIBS="-ldl $LIBS" fi fi # Checks for header files. ac_header_dirent=no for ac_hdr in dirent.h sys/ndir.h sys/dir.h ndir.h; do as_ac_Header=`$as_echo "ac_cv_header_dirent_$ac_hdr" | $as_tr_sh` { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_hdr that defines DIR" >&5 $as_echo_n "checking for $ac_hdr that defines DIR... " >&6; } if eval \${$as_ac_Header+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include <$ac_hdr> int main () { if ((DIR *) 0) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : eval "$as_ac_Header=yes" else eval "$as_ac_Header=no" fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi eval ac_res=\$$as_ac_Header { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_res" >&5 $as_echo "$ac_res" >&6; } if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_hdr" | $as_tr_cpp` 1 _ACEOF ac_header_dirent=$ac_hdr; break fi done # Two versions of opendir et al. are in -ldir and -lx on SCO Xenix. if test $ac_header_dirent = dirent.h; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing opendir" >&5 $as_echo_n "checking for library containing opendir... " >&6; } if ${ac_cv_search_opendir+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char opendir (); int main () { return opendir (); ; return 0; } _ACEOF for ac_lib in '' dir; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_search_opendir=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_opendir+:} false; then : break fi done if ${ac_cv_search_opendir+:} false; then : else ac_cv_search_opendir=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_opendir" >&5 $as_echo "$ac_cv_search_opendir" >&6; } ac_res=$ac_cv_search_opendir if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for library containing opendir" >&5 $as_echo_n "checking for library containing opendir... " >&6; } if ${ac_cv_search_opendir+:} false; then : $as_echo_n "(cached) " >&6 else ac_func_search_save_LIBS=$LIBS cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char opendir (); int main () { return opendir (); ; return 0; } _ACEOF for ac_lib in '' x; do if test -z "$ac_lib"; then ac_res="none required" else ac_res=-l$ac_lib LIBS="-l$ac_lib $ac_func_search_save_LIBS" fi if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_search_opendir=$ac_res fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext if ${ac_cv_search_opendir+:} false; then : break fi done if ${ac_cv_search_opendir+:} false; then : else ac_cv_search_opendir=no fi rm conftest.$ac_ext LIBS=$ac_func_search_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_search_opendir" >&5 $as_echo "$ac_cv_search_opendir" >&6; } ac_res=$ac_cv_search_opendir if test "$ac_res" != no; then : test "$ac_res" = "none required" || LIBS="$ac_res $LIBS" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for ANSI C header files" >&5 $as_echo_n "checking for ANSI C header files... " >&6; } if ${ac_cv_header_stdc+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include #include int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_header_stdc=yes else ac_cv_header_stdc=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext if test $ac_cv_header_stdc = yes; then # SunOS 4.x string.h does not declare mem*, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "memchr" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # ISC 2.0.2 stdlib.h does not declare free, contrary to ANSI. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "free" >/dev/null 2>&1; then : else ac_cv_header_stdc=no fi rm -f conftest* fi if test $ac_cv_header_stdc = yes; then # /bin/cc in Irix-4.0.5 gets non-ANSI ctype macros unless using -ansi. if test "$cross_compiling" = yes; then : : else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #if ((' ' & 0x0FF) == 0x020) # define ISLOWER(c) ('a' <= (c) && (c) <= 'z') # define TOUPPER(c) (ISLOWER(c) ? 'A' + ((c) - 'a') : (c)) #else # define ISLOWER(c) \ (('a' <= (c) && (c) <= 'i') \ || ('j' <= (c) && (c) <= 'r') \ || ('s' <= (c) && (c) <= 'z')) # define TOUPPER(c) (ISLOWER(c) ? ((c) | 0x40) : (c)) #endif #define XOR(e, f) (((e) && !(f)) || (!(e) && (f))) int main () { int i; for (i = 0; i < 256; i++) if (XOR (islower (i), ISLOWER (i)) || toupper (i) != TOUPPER (i)) return 2; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : else ac_cv_header_stdc=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_stdc" >&5 $as_echo "$ac_cv_header_stdc" >&6; } if test $ac_cv_header_stdc = yes; then $as_echo "#define STDC_HEADERS 1" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sys/wait.h that is POSIX.1 compatible" >&5 $as_echo_n "checking for sys/wait.h that is POSIX.1 compatible... " >&6; } if ${ac_cv_header_sys_wait_h+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #ifndef WEXITSTATUS # define WEXITSTATUS(stat_val) ((unsigned int) (stat_val) >> 8) #endif #ifndef WIFEXITED # define WIFEXITED(stat_val) (((stat_val) & 255) == 0) #endif int main () { int s; wait (&s); s = WIFEXITED (s) ? WEXITSTATUS (s) : 1; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_header_sys_wait_h=yes else ac_cv_header_sys_wait_h=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_sys_wait_h" >&5 $as_echo "$ac_cv_header_sys_wait_h" >&6; } if test $ac_cv_header_sys_wait_h = yes; then $as_echo "#define HAVE_SYS_WAIT_H 1" >>confdefs.h fi for ac_header in fcntl.h netinet/in.h arpa/inet.h stdint.h stdlib.h string.h sys/ioctl.h sys/socket.h termios.h unistd.h utime.h sys/types.h signal.h errno.h sys/un.h sys/stat.h time.h fnmatch.h regex.h pwd.h grp.h stdio.h pthread.h ctype.h getopt.h limits.h stddef.h sys/utsname.h libintl.h sys/capability.h linux/capability.h utimes.h sys/time.h wchar.h wctype.h stddef.h do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_cxx_check_header_mongrel "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default" if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done # Check whether --enable-largefile was given. if test "${enable_largefile+set}" = set; then : enableval=$enable_largefile; fi if test "$enable_largefile" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for special C compiler options needed for large files" >&5 $as_echo_n "checking for special C compiler options needed for large files... " >&6; } if ${ac_cv_sys_largefile_CC+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_sys_largefile_CC=no if test "$GCC" != yes; then ac_save_CC=$CC while :; do # IRIX 6.2 and later do not support large files by default, # so use the C compiler's -n32 option if that helps. cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : break fi rm -f core conftest.err conftest.$ac_objext CC="$CC -n32" if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_largefile_CC=' -n32'; break fi rm -f core conftest.err conftest.$ac_objext break done CC=$ac_save_CC rm -f conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_largefile_CC" >&5 $as_echo "$ac_cv_sys_largefile_CC" >&6; } if test "$ac_cv_sys_largefile_CC" != no; then CC=$CC$ac_cv_sys_largefile_CC fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _FILE_OFFSET_BITS value needed for large files" >&5 $as_echo_n "checking for _FILE_OFFSET_BITS value needed for large files... " >&6; } if ${ac_cv_sys_file_offset_bits+:} false; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _FILE_OFFSET_BITS 64 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_file_offset_bits=64; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_file_offset_bits=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_file_offset_bits" >&5 $as_echo "$ac_cv_sys_file_offset_bits" >&6; } case $ac_cv_sys_file_offset_bits in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _FILE_OFFSET_BITS $ac_cv_sys_file_offset_bits _ACEOF ;; esac rm -rf conftest* if test $ac_cv_sys_file_offset_bits = unknown; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for _LARGE_FILES value needed for large files" >&5 $as_echo_n "checking for _LARGE_FILES value needed for large files... " >&6; } if ${ac_cv_sys_large_files+:} false; then : $as_echo_n "(cached) " >&6 else while :; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_large_files=no; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #define _LARGE_FILES 1 #include /* Check that off_t can represent 2**63 - 1 correctly. We can't simply define LARGE_OFF_T to be 9223372036854775807, since some C++ compilers masquerading as C compilers incorrectly reject 9223372036854775807. */ #define LARGE_OFF_T ((((off_t) 1 << 31) << 31) - 1 + (((off_t) 1 << 31) << 31)) int off_t_is_large[(LARGE_OFF_T % 2147483629 == 721 && LARGE_OFF_T % 2147483647 == 1) ? 1 : -1]; int main () { ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_sys_large_files=1; break fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext ac_cv_sys_large_files=unknown break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sys_large_files" >&5 $as_echo "$ac_cv_sys_large_files" >&6; } case $ac_cv_sys_large_files in #( no | unknown) ;; *) cat >>confdefs.h <<_ACEOF #define _LARGE_FILES $ac_cv_sys_large_files _ACEOF ;; esac rm -rf conftest* fi fi # Checks for typedefs, structures, and compiler characteristics. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for an ANSI C-conforming const" >&5 $as_echo_n "checking for an ANSI C-conforming const... " >&6; } if ${ac_cv_c_const+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { #ifndef __cplusplus /* Ultrix mips cc rejects this sort of thing. */ typedef int charset[2]; const charset cs = { 0, 0 }; /* SunOS 4.1.1 cc rejects this. */ char const *const *pcpcc; char **ppc; /* NEC SVR4.0.2 mips cc rejects this. */ struct point {int x, y;}; static struct point const zero = {0,0}; /* AIX XL C 1.02.0.0 rejects this. It does not let you subtract one const X* pointer from another in an arm of an if-expression whose if-part is not a constant expression */ const char *g = "string"; pcpcc = &g + (g ? g-g : 0); /* HPUX 7.0 cc rejects these. */ ++pcpcc; ppc = (char**) pcpcc; pcpcc = (char const *const *) ppc; { /* SCO 3.2v4 cc rejects this sort of thing. */ char tx; char *t = &tx; char const *s = 0 ? (char *) 0 : (char const *) 0; *t++ = 0; if (s) return 0; } { /* Someone thinks the Sun supposedly-ANSI compiler will reject this. */ int x[] = {25, 17}; const int *foo = &x[0]; ++foo; } { /* Sun SC1.0 ANSI compiler rejects this -- but not the above. */ typedef const int *iptr; iptr p = 0; ++p; } { /* AIX XL C 1.02.0.0 rejects this sort of thing, saying "k.c", line 2.27: 1506-025 (S) Operand must be a modifiable lvalue. */ struct s { int j; const int *ap[3]; } bx; struct s *b = &bx; b->j = 5; } { /* ULTRIX-32 V3.1 (Rev 9) vcc rejects this */ const int foo = 10; if (!foo) return 0; } return !cs[0] && !zero.x; #endif ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_c_const=yes else ac_cv_c_const=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_const" >&5 $as_echo "$ac_cv_c_const" >&6; } if test $ac_cv_c_const = no; then $as_echo "#define const /**/" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for inline" >&5 $as_echo_n "checking for inline... " >&6; } if ${ac_cv_c_inline+:} false; then : $as_echo_n "(cached) " >&6 else ac_cv_c_inline=no for ac_kw in inline __inline__ __inline; do cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #ifndef __cplusplus typedef int foo_t; static $ac_kw foo_t static_foo () {return 0; } $ac_kw foo_t foo () {return 0; } #endif _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_c_inline=$ac_kw fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext test "$ac_cv_c_inline" != no && break done fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_c_inline" >&5 $as_echo "$ac_cv_c_inline" >&6; } case $ac_cv_c_inline in inline | yes) ;; *) case $ac_cv_c_inline in no) ac_val=;; *) ac_val=$ac_cv_c_inline;; esac cat >>confdefs.h <<_ACEOF #ifndef __cplusplus #define inline $ac_val #endif _ACEOF ;; esac ac_fn_cxx_check_type "$LINENO" "off_t" "ac_cv_type_off_t" "$ac_includes_default" if test "x$ac_cv_type_off_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define off_t long int _ACEOF fi ac_fn_cxx_check_type "$LINENO" "pid_t" "ac_cv_type_pid_t" "$ac_includes_default" if test "x$ac_cv_type_pid_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define pid_t int _ACEOF fi ac_fn_cxx_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" if test "x$ac_cv_type_size_t" = xyes; then : else cat >>confdefs.h <<_ACEOF #define size_t unsigned int _ACEOF fi ac_fn_cxx_check_member "$LINENO" "struct stat" "st_rdev" "ac_cv_member_struct_stat_st_rdev" "$ac_includes_default" if test "x$ac_cv_member_struct_stat_st_rdev" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STRUCT_STAT_ST_RDEV 1 _ACEOF fi ac_fn_cxx_check_decl "$LINENO" "sys_siglist" "ac_cv_have_decl_sys_siglist" "#include /* NetBSD declares sys_siglist in unistd.h. */ #ifdef HAVE_UNISTD_H # include #endif " if test "x$ac_cv_have_decl_sys_siglist" = xyes; then : ac_have_decl=1 else ac_have_decl=0 fi cat >>confdefs.h <<_ACEOF #define HAVE_DECL_SYS_SIGLIST $ac_have_decl _ACEOF ac_fn_cxx_check_type "$LINENO" "size_t" "ac_cv_type_size_t" "$ac_includes_default" if test "x$ac_cv_type_size_t" = xyes; then : # The cast to long int works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. { $as_echo "$as_me:${as_lineno-$LINENO}: checking size of size_t" >&5 $as_echo_n "checking size of size_t... " >&6; } if ${ac_cv_sizeof_size_t+:} false; then : $as_echo_n "(cached) " >&6 else if ac_fn_cxx_compute_int "$LINENO" "(long int) (sizeof (size_t))" "ac_cv_sizeof_size_t" "$ac_includes_default"; then : else if test "$ac_cv_type_size_t" = yes; then { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "cannot compute sizeof (size_t) See \`config.log' for more details" "$LINENO" 5; } else ac_cv_sizeof_size_t=0 fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_size_t" >&5 $as_echo "$ac_cv_sizeof_size_t" >&6; } cat >>confdefs.h <<_ACEOF #define SIZEOF_SIZE_T $ac_cv_sizeof_size_t _ACEOF else as_fn_error $? "Cannot find size_t type" "$LINENO" 5 fi ac_fn_cxx_check_type "$LINENO" "time_t" "ac_cv_type_time_t" "$ac_includes_default" if test "x$ac_cv_type_time_t" = xyes; then : # The cast to long int works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. { $as_echo "$as_me:${as_lineno-$LINENO}: checking size of time_t" >&5 $as_echo_n "checking size of time_t... " >&6; } if ${ac_cv_sizeof_time_t+:} false; then : $as_echo_n "(cached) " >&6 else if ac_fn_cxx_compute_int "$LINENO" "(long int) (sizeof (time_t))" "ac_cv_sizeof_time_t" "$ac_includes_default"; then : else if test "$ac_cv_type_time_t" = yes; then { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "cannot compute sizeof (time_t) See \`config.log' for more details" "$LINENO" 5; } else ac_cv_sizeof_time_t=0 fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_time_t" >&5 $as_echo "$ac_cv_sizeof_time_t" >&6; } cat >>confdefs.h <<_ACEOF #define SIZEOF_TIME_T $ac_cv_sizeof_time_t _ACEOF else as_fn_error $? "Cannot find time_t type" "$LINENO" 5 fi ac_fn_cxx_check_type "$LINENO" "off_t" "ac_cv_type_off_t" "$ac_includes_default" if test "x$ac_cv_type_off_t" = xyes; then : # The cast to long int works around a bug in the HP C Compiler # version HP92453-01 B.11.11.23709.GP, which incorrectly rejects # declarations like `int a3[[(sizeof (unsigned char)) >= 0]];'. # This bug is HP SR number 8606223364. { $as_echo "$as_me:${as_lineno-$LINENO}: checking size of off_t" >&5 $as_echo_n "checking size of off_t... " >&6; } if ${ac_cv_sizeof_off_t+:} false; then : $as_echo_n "(cached) " >&6 else if ac_fn_cxx_compute_int "$LINENO" "(long int) (sizeof (off_t))" "ac_cv_sizeof_off_t" "$ac_includes_default"; then : else if test "$ac_cv_type_off_t" = yes; then { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error 77 "cannot compute sizeof (off_t) See \`config.log' for more details" "$LINENO" 5; } else ac_cv_sizeof_off_t=0 fi fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_sizeof_off_t" >&5 $as_echo "$ac_cv_sizeof_off_t" >&6; } cat >>confdefs.h <<_ACEOF #define SIZEOF_OFF_T $ac_cv_sizeof_off_t _ACEOF else as_fn_error $? "Cannot find off_t type" "$LINENO" 5 fi # Checks for library functions. { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working POSIX fnmatch" >&5 $as_echo_n "checking for working POSIX fnmatch... " >&6; } if ${ac_cv_func_fnmatch_works+:} false; then : $as_echo_n "(cached) " >&6 else # Some versions of Solaris, SCO, and the GNU C Library # have a broken or incompatible fnmatch. # So we run a test program. If we are cross-compiling, take no chance. # Thanks to John Oleynick, Franc,ois Pinard, and Paul Eggert for this test. if test "$cross_compiling" = yes; then : ac_cv_func_fnmatch_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include # define y(a, b, c) (fnmatch (a, b, c) == 0) # define n(a, b, c) (fnmatch (a, b, c) == FNM_NOMATCH) int main () { return (!(y ("a*", "abc", 0) && n ("d*/*1", "d/s/1", FNM_PATHNAME) && y ("a\\\\bc", "abc", 0) && n ("a\\\\bc", "abc", FNM_NOESCAPE) && y ("*x", ".x", 0) && n ("*x", ".x", FNM_PERIOD) && 1)); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_fnmatch_works=yes else ac_cv_func_fnmatch_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_fnmatch_works" >&5 $as_echo "$ac_cv_func_fnmatch_works" >&6; } if test $ac_cv_func_fnmatch_works = yes; then : $as_echo "#define HAVE_FNMATCH 1" >>confdefs.h fi for ac_header in vfork.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "vfork.h" "ac_cv_header_vfork_h" "$ac_includes_default" if test "x$ac_cv_header_vfork_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_VFORK_H 1 _ACEOF fi done for ac_func in fork vfork do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_cxx_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done if test "x$ac_cv_func_fork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working fork" >&5 $as_echo_n "checking for working fork... " >&6; } if ${ac_cv_func_fork_works+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_fork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { /* By Ruediger Kuhlmann. */ return fork () < 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_fork_works=yes else ac_cv_func_fork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_fork_works" >&5 $as_echo "$ac_cv_func_fork_works" >&6; } else ac_cv_func_fork_works=$ac_cv_func_fork fi if test "x$ac_cv_func_fork_works" = xcross; then case $host in *-*-amigaos* | *-*-msdosdjgpp*) # Override, as these systems have only a dummy fork() stub ac_cv_func_fork_works=no ;; *) ac_cv_func_fork_works=yes ;; esac { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_fork_works guessed because of cross compilation" >&2;} fi ac_cv_func_vfork_works=$ac_cv_func_vfork if test "x$ac_cv_func_vfork" = xyes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for working vfork" >&5 $as_echo_n "checking for working vfork... " >&6; } if ${ac_cv_func_vfork_works+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_vfork_works=cross else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Thanks to Paul Eggert for this test. */ $ac_includes_default #include #ifdef HAVE_VFORK_H # include #endif /* On some sparc systems, changes by the child to local and incoming argument registers are propagated back to the parent. The compiler is told about this with #include , but some compilers (e.g. gcc -O) don't grok . Test for this by using a static variable whose address is put into a register that is clobbered by the vfork. */ static void #ifdef __cplusplus sparc_address_test (int arg) # else sparc_address_test (arg) int arg; #endif { static pid_t child; if (!child) { child = vfork (); if (child < 0) { perror ("vfork"); _exit(2); } if (!child) { arg = getpid(); write(-1, "", 0); _exit (arg); } } } int main () { pid_t parent = getpid (); pid_t child; sparc_address_test (0); child = vfork (); if (child == 0) { /* Here is another test for sparc vfork register problems. This test uses lots of local variables, at least as many local variables as main has allocated so far including compiler temporaries. 4 locals are enough for gcc 1.40.3 on a Solaris 4.1.3 sparc, but we use 8 to be safe. A buggy compiler should reuse the register of parent for one of the local variables, since it will think that parent can't possibly be used any more in this routine. Assigning to the local variable will thus munge parent in the parent process. */ pid_t p = getpid(), p1 = getpid(), p2 = getpid(), p3 = getpid(), p4 = getpid(), p5 = getpid(), p6 = getpid(), p7 = getpid(); /* Convince the compiler that p..p7 are live; otherwise, it might use the same hardware register for all 8 local variables. */ if (p != p1 || p != p2 || p != p3 || p != p4 || p != p5 || p != p6 || p != p7) _exit(1); /* On some systems (e.g. IRIX 3.3), vfork doesn't separate parent from child file descriptors. If the child closes a descriptor before it execs or exits, this munges the parent's descriptor as well. Test for this by closing stdout in the child. */ _exit(close(fileno(stdout)) != 0); } else { int status; struct stat st; while (wait(&status) != child) ; return ( /* Was there some problem with vforking? */ child < 0 /* Did the child fail? (This shouldn't happen.) */ || status /* Did the vfork/compiler bug occur? */ || parent != getpid() /* Did the file descriptor bug occur? */ || fstat(fileno(stdout), &st) != 0 ); } } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_vfork_works=yes else ac_cv_func_vfork_works=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_vfork_works" >&5 $as_echo "$ac_cv_func_vfork_works" >&6; } fi; if test "x$ac_cv_func_fork_works" = xcross; then ac_cv_func_vfork_works=$ac_cv_func_vfork { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&5 $as_echo "$as_me: WARNING: result $ac_cv_func_vfork_works guessed because of cross compilation" >&2;} fi if test "x$ac_cv_func_vfork_works" = xyes; then $as_echo "#define HAVE_WORKING_VFORK 1" >>confdefs.h else $as_echo "#define vfork fork" >>confdefs.h fi if test "x$ac_cv_func_fork_works" = xyes; then $as_echo "#define HAVE_WORKING_FORK 1" >>confdefs.h fi if test $ac_cv_c_compiler_gnu = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether $CC needs -traditional" >&5 $as_echo_n "checking whether $CC needs -traditional... " >&6; } if ${ac_cv_prog_gcc_traditional+:} false; then : $as_echo_n "(cached) " >&6 else ac_pattern="Autoconf.*'x'" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include Autoconf TIOCGETP _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "$ac_pattern" >/dev/null 2>&1; then : ac_cv_prog_gcc_traditional=yes else ac_cv_prog_gcc_traditional=no fi rm -f conftest* if test $ac_cv_prog_gcc_traditional = no; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include Autoconf TCGETA _ACEOF if (eval "$ac_cpp conftest.$ac_ext") 2>&5 | $EGREP "$ac_pattern" >/dev/null 2>&1; then : ac_cv_prog_gcc_traditional=yes fi rm -f conftest* fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_prog_gcc_traditional" >&5 $as_echo "$ac_cv_prog_gcc_traditional" >&6; } if test $ac_cv_prog_gcc_traditional = yes; then CC="$CC -traditional" fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat correctly handles trailing slash" >&5 $as_echo_n "checking whether lstat correctly handles trailing slash... " >&6; } if ${ac_cv_func_lstat_dereferences_slashed_symlink+:} false; then : $as_echo_n "(cached) " >&6 else rm -f conftest.sym conftest.file echo >conftest.file if test "$as_ln_s" = "ln -s" && ln -s conftest.file conftest.sym; then if test "$cross_compiling" = yes; then : ac_cv_func_lstat_dereferences_slashed_symlink=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; /* Linux will dereference the symlink and fail, as required by POSIX. That is better in the sense that it means we will not have to compile and use the lstat wrapper. */ return lstat ("conftest.sym/", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_lstat_dereferences_slashed_symlink=yes else ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else # If the `ln -s' command failed, then we probably don't even # have an lstat function. ac_cv_func_lstat_dereferences_slashed_symlink=no fi rm -f conftest.sym conftest.file fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_dereferences_slashed_symlink" >&5 $as_echo "$ac_cv_func_lstat_dereferences_slashed_symlink" >&6; } test $ac_cv_func_lstat_dereferences_slashed_symlink = yes && cat >>confdefs.h <<_ACEOF #define LSTAT_FOLLOWS_SLASHED_SYMLINK 1 _ACEOF if test "x$ac_cv_func_lstat_dereferences_slashed_symlink" = xno; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether lstat accepts an empty string" >&5 $as_echo_n "checking whether lstat accepts an empty string... " >&6; } if ${ac_cv_func_lstat_empty_string_bug+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_lstat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return lstat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_lstat_empty_string_bug=no else ac_cv_func_lstat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_lstat_empty_string_bug" >&5 $as_echo "$ac_cv_func_lstat_empty_string_bug" >&6; } if test $ac_cv_func_lstat_empty_string_bug = yes; then case " $LIBOBJS " in *" lstat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS lstat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_LSTAT_EMPTY_STRING_BUG 1 _ACEOF fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether sys/types.h defines makedev" >&5 $as_echo_n "checking whether sys/types.h defines makedev... " >&6; } if ${ac_cv_header_sys_types_h_makedev+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include int main () { return makedev(0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_header_sys_types_h_makedev=yes else ac_cv_header_sys_types_h_makedev=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_sys_types_h_makedev" >&5 $as_echo "$ac_cv_header_sys_types_h_makedev" >&6; } if test $ac_cv_header_sys_types_h_makedev = no; then ac_fn_cxx_check_header_mongrel "$LINENO" "sys/mkdev.h" "ac_cv_header_sys_mkdev_h" "$ac_includes_default" if test "x$ac_cv_header_sys_mkdev_h" = xyes; then : $as_echo "#define MAJOR_IN_MKDEV 1" >>confdefs.h fi if test $ac_cv_header_sys_mkdev_h = no; then ac_fn_cxx_check_header_mongrel "$LINENO" "sys/sysmacros.h" "ac_cv_header_sys_sysmacros_h" "$ac_includes_default" if test "x$ac_cv_header_sys_sysmacros_h" = xyes; then : $as_echo "#define MAJOR_IN_SYSMACROS 1" >>confdefs.h fi fi fi for ac_header in stdlib.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "stdlib.h" "ac_cv_header_stdlib_h" "$ac_includes_default" if test "x$ac_cv_header_stdlib_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_STDLIB_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GNU libc compatible malloc" >&5 $as_echo_n "checking for GNU libc compatible malloc... " >&6; } if ${ac_cv_func_malloc_0_nonnull+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_malloc_0_nonnull=no else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if defined STDC_HEADERS || defined HAVE_STDLIB_H # include #else char *malloc (); #endif int main () { return ! malloc (0); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_malloc_0_nonnull=yes else ac_cv_func_malloc_0_nonnull=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_malloc_0_nonnull" >&5 $as_echo "$ac_cv_func_malloc_0_nonnull" >&6; } if test $ac_cv_func_malloc_0_nonnull = yes; then : $as_echo "#define HAVE_MALLOC 1" >>confdefs.h else $as_echo "#define HAVE_MALLOC 0" >>confdefs.h case " $LIBOBJS " in *" malloc.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS malloc.$ac_objext" ;; esac $as_echo "#define malloc rpl_malloc" >>confdefs.h fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking return type of signal handlers" >&5 $as_echo_n "checking return type of signal handlers... " >&6; } if ${ac_cv_type_signal+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include int main () { return *(signal (0, 0)) (0) == 1; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_type_signal=int else ac_cv_type_signal=void fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_type_signal" >&5 $as_echo "$ac_cv_type_signal" >&6; } cat >>confdefs.h <<_ACEOF #define RETSIGTYPE $ac_cv_type_signal _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether stat accepts an empty string" >&5 $as_echo_n "checking whether stat accepts an empty string... " >&6; } if ${ac_cv_func_stat_empty_string_bug+:} false; then : $as_echo_n "(cached) " >&6 else if test "$cross_compiling" = yes; then : ac_cv_func_stat_empty_string_bug=yes else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default int main () { struct stat sbuf; return stat ("", &sbuf) == 0; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_stat_empty_string_bug=no else ac_cv_func_stat_empty_string_bug=yes fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_stat_empty_string_bug" >&5 $as_echo "$ac_cv_func_stat_empty_string_bug" >&6; } if test $ac_cv_func_stat_empty_string_bug = yes; then case " $LIBOBJS " in *" stat.$ac_objext "* ) ;; *) LIBOBJS="$LIBOBJS stat.$ac_objext" ;; esac cat >>confdefs.h <<_ACEOF #define HAVE_STAT_EMPTY_STRING_BUG 1 _ACEOF fi for ac_header in $ac_header_list do : as_ac_Header=`$as_echo "ac_cv_header_$ac_header" | $as_tr_sh` ac_fn_cxx_check_header_compile "$LINENO" "$ac_header" "$as_ac_Header" "$ac_includes_default " if eval test \"x\$"$as_ac_Header"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_header" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether utime accepts a null argument" >&5 $as_echo_n "checking whether utime accepts a null argument... " >&6; } if ${ac_cv_func_utime_null+:} false; then : $as_echo_n "(cached) " >&6 else rm -f conftest.data; >conftest.data # Sequent interprets utime(file, 0) to mean use start of epoch. Wrong. if test "$cross_compiling" = yes; then : ac_cv_func_utime_null='guessing yes' else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ $ac_includes_default #ifdef HAVE_UTIME_H # include #endif int main () { struct stat s, t; return ! (stat ("conftest.data", &s) == 0 && utime ("conftest.data", 0) == 0 && stat ("conftest.data", &t) == 0 && t.st_mtime >= s.st_mtime && t.st_mtime - s.st_mtime < 120); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : ac_cv_func_utime_null=yes else ac_cv_func_utime_null=no fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_func_utime_null" >&5 $as_echo "$ac_cv_func_utime_null" >&6; } if test "x$ac_cv_func_utime_null" != xno; then ac_cv_func_utime_null=yes $as_echo "#define HAVE_UTIME_NULL 1" >>confdefs.h fi rm -f conftest.data { $as_echo "$as_me:${as_lineno-$LINENO}: checking whether time.h and sys/time.h may both be included" >&5 $as_echo_n "checking whether time.h and sys/time.h may both be included... " >&6; } if ${ac_cv_header_time+:} false; then : $as_echo_n "(cached) " >&6 else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #include #include #include int main () { if ((struct tm *) 0) return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : ac_cv_header_time=yes else ac_cv_header_time=no fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_header_time" >&5 $as_echo "$ac_cv_header_time" >&6; } if test $ac_cv_header_time = yes; then $as_echo "#define TIME_WITH_SYS_TIME 1" >>confdefs.h fi for ac_func in lchown mkdir regcomp rmdir strerr-or strerror_r utime fdopendir readdir_r ctime_r getgrnam_r getpwnam_r localtime_r do : as_ac_var=`$as_echo "ac_cv_func_$ac_func" | $as_tr_sh` ac_fn_cxx_check_func "$LINENO" "$ac_func" "$as_ac_var" if eval test \"x\$"$as_ac_var"\" = x"yes"; then : cat >>confdefs.h <<_ACEOF #define `$as_echo "HAVE_$ac_func" | $as_tr_cpp` 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for c++11 support" >&5 $as_echo_n "checking for c++11 support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { thread_local static int test = 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: checking for c++ support with -std=c++11 option set" >&5 $as_echo_n "checking for c++ support with -std=c++11 option set... " >&6; } CXXSTDFLAGS="-std=c++11" CXXFLAGS="$CXXFLAGS $CXXSTDFLAGS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ int main () { thread_local static int test = 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } as_fn_error $? "C++ compiler lack support for c++11 standard" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for sed -r/-E option" >&5 $as_echo_n "checking for sed -r/-E option... " >&6; } if sed -r -e 's/(c|o)+/\1/g' > /dev/null < /dev/null ; then local_sed="gnu" { $as_echo "$as_me:${as_lineno-$LINENO}: result: GNU sed, using -r option for regex" >&5 $as_echo "GNU sed, using -r option for regex" >&6; } else if sed -E -e 's/(c|o)+/\1/g' > /dev/null < /dev/null ; then local_sed="bsd" { $as_echo "$as_me:${as_lineno-$LINENO}: result: BSD sed, using -E option for regex" >&5 $as_echo "BSD sed, using -E option for regex" >&6; } else local_sed=unknown as_fn_error $? "unknown switch to use with sed to support regex" "$LINENO" 5 fi fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt() in " >&5 $as_echo_n "checking for getopt() in ... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_UNISTD_H #include #endif } int main () { getopt(0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_GETOPT_IN_UNISTD_H 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: present" >&5 $as_echo "present" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: absent" >&5 $as_echo "absent" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt_long() in " >&5 $as_echo_n "checking for getopt_long() in ... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_UNISTD_H #include #endif } int main () { getopt_long(0, 0, 0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_GETOPT_LONG_IN_UNISTD_H 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: present" >&5 $as_echo "present" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: absent" >&5 $as_echo "absent" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for optreset presence" >&5 $as_echo_n "checking for optreset presence... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif } int main () { int x = optreset; return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_OPTRESET 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Door file support" >&5 $as_echo_n "checking for Door file support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_STAT_H #include #endif #if HAVE_UNISTD_H #include #endif } int main () { struct stat buf; if(S_ISDOOR(buf.st_mode)) return 0; else return 1; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_DOOR 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for POSIX.1e capabilities support" >&5 $as_echo_n "checking for POSIX.1e capabilities support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_CAPABILITY_H #include #else #if HAVE_LINUX_CAPABILITY_H #include #endif #endif #if HAVE_SYS_TYPES_H #include #endif } int main () { cap_t capaset = cap_get_proc(); (void)cap_free((void *)capset); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_CAPABILITIES 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fdatasync() availability" >&5 $as_echo_n "checking for fdatasync() availability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_UNISTD_H #include #endif } int main () { (void)fdatasync(0); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_FDATASYNC 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for syncfs() availability" >&5 $as_echo_n "checking for syncfs() availability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_UNISTD_H #include #endif } int main () { (void)syncfs(0); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_SYNCFS 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for the possibility to set timestamps at a microsecond precision" >&5 $as_echo_n "checking for the possibility to set timestamps at a microsecond precision... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TYPES_H #include #endif #if HAVE_SYS_STAT_H #include #endif #if HAVE_UNISTD_H #include #endif #if HAVE_UTIMES_H #include #endif #if HAVE_SYS_TIME_H #include #endif } int main () { struct timeval tv[2]; tv[0].tv_sec = 1000; tv[1].tv_usec = 2000; (void)utimes("/tmp/testfile.tmp", tv); /* note that this test program is only compiled+linked not run */ return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define LIBDAR_MICROSECOND_WRITE_ACCURACY 1" >>confdefs.h local_microsecond_write=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for the possibility to read timestamps at a microsecond precision" >&5 $as_echo_n "checking for the possibility to read timestamps at a microsecond precision... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TYPES_H #include #endif #if HAVE_SYS_STAT_H #include #endif #if HAVE_UNISTD_H #include #endif } int main () { struct stat st; /* tv_nsec is nanosecond, but for now we do not keep more precised timing than microseconds because we have not portable way to restore nanoseconds but only seconds and microsecond */ if(st.st_atim.tv_nsec != 0) return 0; else return 1; /* whatever, this test program is only compiled no run */ ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define LIBDAR_MICROSECOND_READ_ACCURACY 1" >>confdefs.h local_microsecond_read=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lutimes() availability" >&5 $as_echo_n "checking for lutimes() availability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TIME_H #include #endif } // extern "C" int main () { struct timeval tv[2]; int lu = lutimes("/tmp/noway", tv); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_LUTIMES 1" >>confdefs.h local_lutimes=yes { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for strerror_r flavor" >&5 $as_echo_n "checking for strerror_r flavor... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STRING_H #include #endif } // externe "C" int main () { char *ptr = strerror_r(0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_STRERROR_R_CHAR_PTR 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: GNU specific" >&5 $as_echo "GNU specific" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: XSI compliant" >&5 $as_echo "XSI compliant" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext # DAR's features # Check whether --enable-libz-linking was given. if test "${enable_libz_linking+set}" = set; then : enableval=$enable_libz_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libz compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: libz compression support has been disabled by user" >&2;} local_libz="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for deflate in -lz" >&5 $as_echo_n "checking for deflate in -lz... " >&6; } if ${ac_cv_lib_z_deflate+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lz $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char deflate (); int main () { return deflate (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_z_deflate=yes else ac_cv_lib_z_deflate=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_z_deflate" >&5 $as_echo "$ac_cv_lib_z_deflate" >&6; } if test "x$ac_cv_lib_z_deflate" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBZ 1 _ACEOF LIBS="-lz $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: library zlib not found" >&5 $as_echo "$as_me: WARNING: library zlib not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "zlib.h" "ac_cv_header_zlib_h" "$ac_includes_default" if test "x$ac_cv_header_zlib_h" = xyes; then : local_libz="yes" $as_echo "#define HAVE_ZLIB_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find zlib.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find zlib.h header file" >&2;} local_libz="no" fi if test "$local_libz" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_ZLIB_H #include #endif } int main () { z_stream *ptr = (z_stream *)0; deflate(ptr, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBZ_AVAILABLE 1" >>confdefs.h else local_libz="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libz compression support not available" >&5 $as_echo "$as_me: WARNING: libz compression support not available" >&2;} fi fi # Check whether --enable-libbz2-linking was given. if test "${enable_libbz2_linking+set}" = set; then : enableval=$enable_libbz2_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libbz2 compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: libbz2 compression support has been disabled by user" >&2;} local_libbz2="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for BZ2_bzCompress in -lbz2" >&5 $as_echo_n "checking for BZ2_bzCompress in -lbz2... " >&6; } if ${ac_cv_lib_bz2_BZ2_bzCompress+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lbz2 $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char BZ2_bzCompress (); int main () { return BZ2_bzCompress (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_bz2_BZ2_bzCompress=yes else ac_cv_lib_bz2_BZ2_bzCompress=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_bz2_BZ2_bzCompress" >&5 $as_echo "$ac_cv_lib_bz2_BZ2_bzCompress" >&6; } if test "x$ac_cv_lib_bz2_BZ2_bzCompress" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBBZ2 1 _ACEOF LIBS="-lbz2 $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: library libbz2 not found" >&5 $as_echo "$as_me: WARNING: library libbz2 not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "bzlib.h" "ac_cv_header_bzlib_h" "$ac_includes_default" if test "x$ac_cv_header_bzlib_h" = xyes; then : local_libbz2="yes" $as_echo "#define HAVE_BZLIB_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find bzlib.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find bzlib.h header file" >&2;} local_libbz2="no" fi if test "$local_libbz2" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_BZLIB_H #include #endif } int main () { bz_stream *ptr = (bz_stream *)0; BZ2_bzCompress(ptr, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBBZ2_AVAILABLE 1" >>confdefs.h else local_libbz2="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libbz2 compression support not available" >&5 $as_echo "$as_me: WARNING: libbz2 compression support not available" >&2;} fi fi # Check whether --enable-liblzo2-linking was given. if test "${enable_liblzo2_linking+set}" = set; then : enableval=$enable_liblzo2_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: lzo compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: lzo compression support has been disabled by user" >&2;} local_liblzo2="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lzo1x_1_compress in -llzo2" >&5 $as_echo_n "checking for lzo1x_1_compress in -llzo2... " >&6; } if ${ac_cv_lib_lzo2_lzo1x_1_compress+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-llzo2 $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char lzo1x_1_compress (); int main () { return lzo1x_1_compress (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_lzo2_lzo1x_1_compress=yes else ac_cv_lib_lzo2_lzo1x_1_compress=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lzo2_lzo1x_1_compress" >&5 $as_echo "$ac_cv_lib_lzo2_lzo1x_1_compress" >&6; } if test "x$ac_cv_lib_lzo2_lzo1x_1_compress" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBLZO2 1 _ACEOF LIBS="-llzo2 $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: library liblzo2 not found" >&5 $as_echo "$as_me: WARNING: library liblzo2 not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "lzo/lzo1x.h" "ac_cv_header_lzo_lzo1x_h" "$ac_includes_default" if test "x$ac_cv_header_lzo_lzo1x_h" = xyes; then : local_liblzo2="yes" $as_echo "#define HAVE_LZO_LZO1X_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find lzo/lzo1x.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find lzo/lzo1x.h header file" >&2;} local_liblzo2="no" fi if test "$local_liblzo2" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_LZO_LZO1X_H #include #endif } int main () { (void)lzo1x_1_compress(0, 0, 0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBLZO2_AVAILABLE 1" >>confdefs.h else local_liblzo2="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: lzo compression support not available" >&5 $as_echo "$as_me: WARNING: lzo compression support not available" >&2;} fi fi # Check whether --enable-libxz-linking was given. if test "${enable_libxz_linking+set}" = set; then : enableval=$enable_libxz_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libxz compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: libxz compression support has been disabled by user" >&2;} local_libxz="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lzma_code in -llzma" >&5 $as_echo_n "checking for lzma_code in -llzma... " >&6; } if ${ac_cv_lib_lzma_lzma_code+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-llzma $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char lzma_code (); int main () { return lzma_code (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_lzma_lzma_code=yes else ac_cv_lib_lzma_lzma_code=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_lzma_lzma_code" >&5 $as_echo "$ac_cv_lib_lzma_lzma_code" >&6; } if test "x$ac_cv_lib_lzma_lzma_code" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBLZMA 1 _ACEOF LIBS="-llzma $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: library liblzma not found" >&5 $as_echo "$as_me: WARNING: library liblzma not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "lzma.h" "ac_cv_header_lzma_h" "$ac_includes_default" if test "x$ac_cv_header_lzma_h" = xyes; then : local_libxz="yes" $as_echo "#define HAVE_LZMA_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find lzma.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find lzma.h header file" >&2;} local_libxz="no" fi if test "$local_libxz" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_LZMA_H #include #endif } int main () { lzma_stream ptr = LZMA_STREAM_INIT; lzma_ret tmp = lzma_easy_encoder(&ptr, 2, LZMA_CHECK_CRC32); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBLZMA_AVAILABLE 1" >>confdefs.h else local_libxz="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libxz compression support not available" >&5 $as_echo "$as_me: WARNING: libxz compression support not available" >&2;} fi fi # Check whether --enable-libgcrypt-linking was given. if test "${enable_libgcrypt_linking+set}" = set; then : enableval=$enable_libgcrypt_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: strong encryption support has been disabled by user" >&5 $as_echo "$as_me: WARNING: strong encryption support has been disabled by user" >&2;} local_crypto="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gpg_err_init in -lgpg-error" >&5 $as_echo_n "checking for gpg_err_init in -lgpg-error... " >&6; } if ${ac_cv_lib_gpg_error_gpg_err_init+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lgpg-error $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char gpg_err_init (); int main () { return gpg_err_init (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_gpg_error_gpg_err_init=yes else ac_cv_lib_gpg_error_gpg_err_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gpg_error_gpg_err_init" >&5 $as_echo "$ac_cv_lib_gpg_error_gpg_err_init" >&6; } if test "x$ac_cv_lib_gpg_error_gpg_err_init" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBGPG_ERROR 1 _ACEOF LIBS="-lgpg-error $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gcry_check_version in -lgcrypt" >&5 $as_echo_n "checking for gcry_check_version in -lgcrypt... " >&6; } if ${ac_cv_lib_gcrypt_gcry_check_version+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lgcrypt $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char gcry_check_version (); int main () { return gcry_check_version (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_gcrypt_gcry_check_version=yes else ac_cv_lib_gcrypt_gcry_check_version=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gcrypt_gcry_check_version" >&5 $as_echo "$ac_cv_lib_gcrypt_gcry_check_version" >&6; } if test "x$ac_cv_lib_gcrypt_gcry_check_version" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBGCRYPT 1 _ACEOF LIBS="-lgcrypt $LIBS" fi ac_fn_cxx_check_header_mongrel "$LINENO" "gcrypt.h" "ac_cv_header_gcrypt_h" "$ac_includes_default" if test "x$ac_cv_header_gcrypt_h" = xyes; then : local_crypto="yes" $as_echo "#define HAVE_GCRYPT_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannt find gcrypt.h header file" >&5 $as_echo "$as_me: WARNING: Cannt find gcrypt.h header file" >&2;} local_crypto="no" fi if test "$local_crypto" = "yes" ; then min_version_gcrypt="1.4.0" cat >>confdefs.h <<_ACEOF #define MIN_VERSION_GCRYPT "$min_version_gcrypt" _ACEOF min_version_gcrypt_hash_bug="1.6.0" cat >>confdefs.h <<_ACEOF #define MIN_VERSION_GCRYPT_HASH_BUG "$min_version_gcrypt_hash_bug" _ACEOF { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libgcrypt usability" >&5 $as_echo_n "checking for libgcrypt usability... " >&6; } if test "$cross_compiling" = yes; then : { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run test program while cross compiling See \`config.log' for more details" "$LINENO" 5; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_GCRYPT_H #include #endif } #include using namespace std; int main () { if(!gcry_check_version(MIN_VERSION_GCRYPT)) { cout << "ligcrypt version too low, minimum version is " << MIN_VERSION_GCRYPT << endl; exit(1); } else exit(0); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : $as_echo "#define CRYPTO_AVAILABLE 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } if test "$cross_compiling" = yes; then : { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run test program while cross compiling See \`config.log' for more details" "$LINENO" 5; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_GCRYPT_H #include #endif } int main () { if(!gcry_check_version(MIN_VERSION_GCRYPT_HASH_BUG)) exit(1); else exit(0); ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : else libgcrypt_hash_bug="yes" fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else if test "$?" = "1" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed: need libgcypt >= $min_version_gcrypt, disabling strong encryption support" >&5 $as_echo "failed: need libgcypt >= $min_version_gcrypt, disabling strong encryption support" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed: libgcrypt is unusable, cannot even call gcry_check_version(). Disabling strong encryption support" >&5 $as_echo "failed: libgcrypt is unusable, cannot even call gcry_check_version(). Disabling strong encryption support" >&6; } fi local_crypto="no" fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: strong encryption support not available" >&5 $as_echo "$as_me: WARNING: strong encryption support not available" >&2;} fi fi # Check whether --enable-ea-support was given. if test "${enable_ea_support+set}" = set; then : enableval=$enable_ea_support; { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Extended Attribute support" >&5 $as_echo_n "checking for Extended Attribute support... " >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: result: disabled" >&5 $as_echo "disabled" >&6; } else for ac_header in attr/xattr.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "attr/xattr.h" "ac_cv_header_attr_xattr_h" "$ac_includes_default" if test "x$ac_cv_header_attr_xattr_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_ATTR_XATTR_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for lgetxattr in -lattr" >&5 $as_echo_n "checking for lgetxattr in -lattr... " >&6; } if ${ac_cv_lib_attr_lgetxattr+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lattr $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char lgetxattr (); int main () { return lgetxattr (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_attr_lgetxattr=yes else ac_cv_lib_attr_lgetxattr=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_attr_lgetxattr" >&5 $as_echo "$ac_cv_lib_attr_lgetxattr" >&6; } if test "x$ac_cv_lib_attr_lgetxattr" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBATTR 1 _ACEOF LIBS="-lattr $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Unix Extended Attribute support" >&5 $as_echo_n "checking for Unix Extended Attribute support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TYPES_H #include #endif #if HAVE_ATTR_XATTR_H #include #endif } int main () { lgetxattr((char *)0, (char *)0, (void *)0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define EA_SUPPORT /**/" >>confdefs.h local_ea_support="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } for ac_header in sys/xattr.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "sys/xattr.h" "ac_cv_header_sys_xattr_h" "$ac_includes_default" if test "x$ac_cv_header_sys_xattr_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_SYS_XATTR_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for fgetxattr in -lc" >&5 $as_echo_n "checking for fgetxattr in -lc... " >&6; } if ${ac_cv_lib_c_fgetxattr+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lc $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char fgetxattr (); int main () { return fgetxattr (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_c_fgetxattr=yes else ac_cv_lib_c_fgetxattr=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_c_fgetxattr" >&5 $as_echo "$ac_cv_lib_c_fgetxattr" >&6; } if test "x$ac_cv_lib_c_fgetxattr" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBC 1 _ACEOF LIBS="-lc $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for Mac OS X Extended Attribute support" >&5 $as_echo_n "checking for Mac OS X Extended Attribute support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_XATTR_H #include #endif } int main () { getxattr((char *)0, (char *)0, (void *)0, 0, 0, XATTR_NOFOLLOW); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define EA_SUPPORT /**/" >>confdefs.h $as_echo "#define OSX_EA_SUPPORT /**/" >>confdefs.h local_ea_support="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking ext2fs.h availability" >&5 $as_echo_n "checking ext2fs.h availability... " >&6; } # Check whether --enable-nodump-flag was given. if test "${enable_nodump_flag+set}" = set; then : enableval=$enable_nodump_flag; { $as_echo "$as_me:${as_lineno-$LINENO}: result: extX FSA disabled" >&5 $as_echo "extX FSA disabled" >&6; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #include #if HAVE_SYS_IOCTL_H #include #endif } int main () { int fd, f; ioctl(fd, EXT2_IOC_GETFLAGS, &f); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBDAR_NODUMP_FEATURE NODUMP_EXT2FS" >>confdefs.h local_nodump_feature="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: found " >&5 $as_echo "found " >&6; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #include #if HAVE_SYS_IOCTL_H #include #endif } int main () { int fd, f; ioctl(fd, EXT2_IOC_GETFLAGS, &f); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBDAR_NODUMP_FEATURE NODUMP_LINUX" >>confdefs.h local_nodump_feature="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: found " >&5 $as_echo "found " >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: NOT FOUND" >&5 $as_echo "NOT FOUND" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cannot find ext2_fs.h header file, nodump-flag and extX FSA features will not be available" >&5 $as_echo "$as_me: WARNING: cannot find ext2_fs.h header file, nodump-flag and extX FSA features will not be available" >&2;} fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking birth time availability" >&5 $as_echo_n "checking birth time availability... " >&6; } # Check whether --enable-birthtime was given. if test "${enable_birthtime+set}" = set; then : enableval=$enable_birthtime; { $as_echo "$as_me:${as_lineno-$LINENO}: result: hfs+ FSA disabled" >&5 $as_echo "hfs+ FSA disabled" >&6; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TYPE_H #include #endif #if HAVE_SYS_STAT_H #include #endif #if HAVE_UNISTD_H #include #endif } int main () { struct stat tmp; int ret = stat("/", &tmp); time_t birth = tmp.st_birthtime; ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBDAR_BIRTHTIME 1" >>confdefs.h local_birthtime="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: found" >&5 $as_echo "found" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: NOT FOUND" >&5 $as_echo "NOT FOUND" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find support for birthtime, HFS+ FSA support will not be available" >&5 $as_echo "$as_me: WARNING: Cannot find support for birthtime, HFS+ FSA support will not be available" >&2;} fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi # Check whether --enable-gnugetopt was given. if test "${enable_gnugetopt+set}" = set; then : enableval=$enable_gnugetopt; else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt_long in -lgnugetopt" >&5 $as_echo_n "checking for getopt_long in -lgnugetopt... " >&6; } if ${ac_cv_lib_gnugetopt_getopt_long+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lgnugetopt $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char getopt_long (); int main () { return getopt_long (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_gnugetopt_getopt_long=yes else ac_cv_lib_gnugetopt_getopt_long=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gnugetopt_getopt_long" >&5 $as_echo "$ac_cv_lib_gnugetopt_getopt_long" >&6; } if test "x$ac_cv_lib_gnugetopt_getopt_long" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBGNUGETOPT 1 _ACEOF LIBS="-lgnugetopt $LIBS" fi fi # Check whether --enable-librsync-linking was given. if test "${enable_librsync_linking+set}" = set; then : enableval=$enable_librsync_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: librsync delta-compression support has been disabled by user" >&5 $as_echo "$as_me: WARNING: librsync delta-compression support has been disabled by user" >&2;} local_librsync="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for rs_strerror in -lrsync" >&5 $as_echo_n "checking for rs_strerror in -lrsync... " >&6; } if ${ac_cv_lib_rsync_rs_strerror+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lrsync $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char rs_strerror (); int main () { return rs_strerror (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_rsync_rs_strerror=yes else ac_cv_lib_rsync_rs_strerror=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_rsync_rs_strerror" >&5 $as_echo "$ac_cv_lib_rsync_rs_strerror" >&6; } if test "x$ac_cv_lib_rsync_rs_strerror" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBRSYNC 1 _ACEOF LIBS="-lrsync $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: librsync library not found" >&5 $as_echo "$as_me: WARNING: librsync library not found" >&2;} fi ac_fn_cxx_check_header_mongrel "$LINENO" "librsync.h" "ac_cv_header_librsync_h" "$ac_includes_default" if test "x$ac_cv_header_librsync_h" = xyes; then : local_librsync="yes" $as_echo "#define HAVE_LIBRSYNC_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find librsync.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find librsync.h header file" >&2;} local_librsync="no" fi if test "$local_librsync" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_LIBRSYNC_H #include #include #endif } int main () { rs_result err = RS_DONE; (void) rs_strerror(err); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBRSYNC_AVAILABLE 1" >>confdefs.h else local_librsync="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: librsync compression support not available" >&5 $as_echo "$as_me: WARNING: librsync compression support not available" >&2;} fi fi if test "x$ac_cv_env_PKG_CONFIG_set" != "xset"; then if test -n "$ac_tool_prefix"; then # Extract the first word of "${ac_tool_prefix}pkg-config", so it can be a program name with args. set dummy ${ac_tool_prefix}pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_PKG_CONFIG="$PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi PKG_CONFIG=$ac_cv_path_PKG_CONFIG if test -n "$PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $PKG_CONFIG" >&5 $as_echo "$PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi if test -z "$ac_cv_path_PKG_CONFIG"; then ac_pt_PKG_CONFIG=$PKG_CONFIG # Extract the first word of "pkg-config", so it can be a program name with args. set dummy pkg-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_ac_pt_PKG_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $ac_pt_PKG_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_ac_pt_PKG_CONFIG="$ac_pt_PKG_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_ac_pt_PKG_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS ;; esac fi ac_pt_PKG_CONFIG=$ac_cv_path_ac_pt_PKG_CONFIG if test -n "$ac_pt_PKG_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_pt_PKG_CONFIG" >&5 $as_echo "$ac_pt_PKG_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "x$ac_pt_PKG_CONFIG" = x; then PKG_CONFIG="" else case $cross_compiling:$ac_tool_warned in yes:) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: using cross tools not prefixed with host triplet" >&5 $as_echo "$as_me: WARNING: using cross tools not prefixed with host triplet" >&2;} ac_tool_warned=yes ;; esac PKG_CONFIG=$ac_pt_PKG_CONFIG fi else PKG_CONFIG="$ac_cv_path_PKG_CONFIG" fi fi if test -n "$PKG_CONFIG"; then _pkg_min_version=0.9.0 { $as_echo "$as_me:${as_lineno-$LINENO}: checking pkg-config is at least version $_pkg_min_version" >&5 $as_echo_n "checking pkg-config is at least version $_pkg_min_version... " >&6; } if $PKG_CONFIG --atleast-pkgconfig-version $_pkg_min_version; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } PKG_CONFIG="" fi fi # Check whether --enable-libcurl-linking was given. if test "${enable_libcurl_linking+set}" = set; then : enableval=$enable_libcurl_linking; local_libcurl="no" else if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libcurl\""; } >&5 ($PKG_CONFIG --exists --print-errors "libcurl") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBCURL" >&5 $as_echo_n "checking for LIBCURL... " >&6; } if test -n "$LIBCURL_CFLAGS"; then pkg_cv_LIBCURL_CFLAGS="$LIBCURL_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libcurl\""; } >&5 ($PKG_CONFIG --exists --print-errors "libcurl") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LIBCURL_CFLAGS=`$PKG_CONFIG --cflags "libcurl" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$LIBCURL_LIBS"; then pkg_cv_LIBCURL_LIBS="$LIBCURL_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libcurl\""; } >&5 ($PKG_CONFIG --exists --print-errors "libcurl") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LIBCURL_LIBS=`$PKG_CONFIG --libs "libcurl" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then LIBCURL_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libcurl" 2>&1` else LIBCURL_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libcurl" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$LIBCURL_PKG_ERRORS" >&5 as_fn_error $? "libcurl not found, but reported to exist !?!" "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } as_fn_error $? "libcurl not found, but reported to exist !?!" "$LINENO" 5 else LIBCURL_CFLAGS=$pkg_cv_LIBCURL_CFLAGS LIBCURL_LIBS=$pkg_cv_LIBCURL_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi $as_echo "#define HAVE_LIBCURL 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for curl_global_init in -lcurl" >&5 $as_echo_n "checking for curl_global_init in -lcurl... " >&6; } if ${ac_cv_lib_curl_curl_global_init+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lcurl $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char curl_global_init (); int main () { return curl_global_init (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_curl_curl_global_init=yes else ac_cv_lib_curl_curl_global_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_curl_curl_global_init" >&5 $as_echo "$ac_cv_lib_curl_curl_global_init" >&6; } if test "x$ac_cv_lib_curl_curl_global_init" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBCURL 1 _ACEOF LIBS="-lcurl $LIBS" else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: libcurl library not found" >&5 $as_echo "$as_me: WARNING: libcurl library not found" >&2;} fi $as_echo "#define HAVE_LIBCURL 1" >>confdefs.h fi CPPFLAGS__cache="$CFLAGS" CPPFLAGS="$LIBCURL_CFLAGS $CPPFLAGS" LIBS___cache="$LIBS" LIBS="$LIBCURL_LIBS $LIBS" ac_fn_cxx_check_header_mongrel "$LINENO" "curl/curl.h" "ac_cv_header_curl_curl_h" "$ac_includes_default" if test "x$ac_cv_header_curl_curl_h" = xyes; then : local_libcurl="yes" $as_echo "#define HAVE_CURL_CURL_H 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find libcurl.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find libcurl.h header file" >&2;} local_libcurl="no" fi if test "$local_libcurl" = "yes" ; then cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_CURL_CURL_H #include #endif } int main () { (void) curl_global_init(CURL_GLOBAL_ALL); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define LIBCURL_AVAILABLE 1" >>confdefs.h else local_libcurl="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: remote repository support not available" >&5 $as_echo "$as_me: WARNING: remote repository support not available" >&2;} fi CPPFLAGS="$CPPFLAGS___cache" LIBS="$LIBS___cache" unset CPPFLAGS___cache unset LIBS___cache fi # Check whether --enable-fadvise was given. if test "${enable_fadvise+set}" = set; then : enableval=$enable_fadvise; else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for posix_fadvise support" >&5 $as_echo_n "checking for posix_fadvise support... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_FCNTL_H #include #endif } int main () { (void)posix_fadvise(0,0,1,POSIX_FADV_NORMAL); ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_POSIX_FADVISE 1" >>confdefs.h local_posix_fadvise="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: not available" >&5 $as_echo "not available" >&6; } fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt() availability" >&5 $as_echo_n "checking for getopt() availability... " >&6; }; cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif } int main () { getopt(0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } else as_fn_error $? "absent but required" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for getopt_long() availability" >&5 $as_echo_n "checking for getopt_long() availability... " >&6; }; cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_GETOPT_H #include #else #if HAVE_UNISTD_H #include #endif #endif } int main () { getopt_long(0, 0, 0, 0, 0); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : local_have_getopt_long="yes" $as_echo "#define HAVE_GETOPT_LONG 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: NOT AVAILABLE" >&5 $as_echo "NOT AVAILABLE" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext # Check whether --enable-examples was given. if test "${enable_examples+set}" = set; then : enableval=$enable_examples; examples="yes" else examples="false" fi # Check whether --enable-os-bits was given. if test "${enable_os_bits+set}" = set; then : enableval=$enable_os_bits; cat >>confdefs.h <<_ACEOF #define OS_BITS $enableval _ACEOF else ac_fn_cxx_check_header_mongrel "$LINENO" "inttypes.h" "ac_cv_header_inttypes_h" "$ac_includes_default" if test "x$ac_cv_header_inttypes_h" = xyes; then : cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #include } int main () { uint16_t a = 0; uint32_t b = 0; uint64_t c = 0; int16_t d = 0; int32_t e = 0; int64_t f = 0; return a+b+c+d+e+f; ; return 0; } _ACEOF if ac_fn_cxx_try_compile "$LINENO"; then : $as_echo "#define HAVE_INTTYPES_H 1" >>confdefs.h else as_fn_error $? "Cannot find *int*_t type declarations in headerfile, --enable-os-bits=... option must be used" "$LINENO" 5 fi rm -f core conftest.err conftest.$ac_objext conftest.$ac_ext else as_fn_error $? "Cannot find inttypes.h headerfile, --enable-os-bits=... option must be used" "$LINENO" 5 fi fi # Check whether --enable-mode was given. if test "${enable_mode+set}" = set; then : enableval=$enable_mode; build_mode=$enableval if test "$build_mode" != "32" -a "$build_mode" != "64" -a "$build_mode" != "infinint" ; then as_fn_error $? "Invalid argument given to --enable-mode option" "$LINENO" 5 fi else build_mode=64 fi # Check whether --enable-furtive-read was given. if test "${enable_furtive_read+set}" = set; then : enableval=$enable_furtive_read; local_furtive_read_mode="no" { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Furtive read mode disabled" >&5 $as_echo "$as_me: WARNING: Furtive read mode disabled" >&2;} else { $as_echo "$as_me:${as_lineno-$LINENO}: checking furtive read mode availability" >&5 $as_echo_n "checking furtive read mode availability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_SYS_TYPE_H #include #endif #if HAVE_SYS_STAT_H #include #endif #if HAVE_FCNTL_H #include #endif #if HAVE_DIRENT_H #include #endif } int main () { int x = O_NOATIME; int fd = open("/",O_RDONLY|O_NOATIME); #if HAVE_FDOPENDIR (void)fdopendir(fd); #else syntaxically incorrect statement here to force compilation to fail! #endif ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define FURTIVE_READ_MODE_AVAILABLE 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: available" >&5 $as_echo "available" >&6; } local_furtive_read_mode="yes" else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no available" >&5 $as_echo "no available" >&6; } local_furtive_read_mode="no" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi # Check whether --enable-debug was given. if test "${enable_debug+set}" = set; then : enableval=$enable_debug; CXXFLAGS="-g -Wall" CFLAGS="-g -Wall" LDFLAGS="-g -Wall" debug_static="yes" $as_echo "#define LIBDAR_NO_OPTIMIZATION 1" >>confdefs.h else debug_static="no" fi # Check whether --enable-pedantic was given. if test "${enable_pedantic+set}" = set; then : enableval=$enable_pedantic; CXXFLAGS="$CXXFLAGS -pedantic -Wno-long-long" fi # Check whether --enable-build-html was given. if test "${enable_build_html+set}" = set; then : enableval=$enable_build_html; doxygen="no" groff="no" else # Extract the first word of "doxygen", so it can be a program name with args. set dummy doxygen; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_doxygen+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$doxygen"; then ac_cv_prog_doxygen="$doxygen" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_doxygen="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_doxygen" && ac_cv_prog_doxygen="no" fi fi doxygen=$ac_cv_prog_doxygen if test -n "$doxygen"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $doxygen" >&5 $as_echo "$doxygen" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for doxygen version" >&5 $as_echo_n "checking for doxygen version... " >&6; } if test "$doxygen" = "yes" ; then n1=`doxygen --version | cut -d '.' -f 1` n2=`doxygen --version | cut -d '.' -f 2` if test $n1 -gt 1 -o $n2 -ge 3 ; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: >= 1.3" >&5 $as_echo " >= 1.3" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: too old (< 1.3) ignoring doxygen" >&5 $as_echo " too old (< 1.3) ignoring doxygen" >&6; } doxygen="no" fi fi # Extract the first word of "dot", so it can be a program name with args. set dummy dot; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_dot+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$dot"; then ac_cv_prog_dot="$dot" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_dot="YES" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_dot" && ac_cv_prog_dot="NO" fi fi dot=$ac_cv_prog_dot if test -n "$dot"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $dot" >&5 $as_echo "$dot" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi # Extract the first word of "man", so it can be a program name with args. set dummy man; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_tmp+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$tmp"; then ac_cv_prog_tmp="$tmp" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_tmp="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_tmp" && ac_cv_prog_tmp="no" fi fi tmp=$ac_cv_prog_tmp if test -n "$tmp"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $tmp" >&5 $as_echo "$tmp" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "$tmp" = "yes" ; then # Extract the first word of "groff", so it can be a program name with args. set dummy groff; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_groff+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$groff"; then ac_cv_prog_groff="$groff" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_groff="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_groff" && ac_cv_prog_groff="no" fi fi groff=$ac_cv_prog_groff if test -n "$groff"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $groff" >&5 $as_echo "$groff" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi else groff = "no"; fi fi # Check whether --enable-upx was given. if test "${enable_upx+set}" = set; then : enableval=$enable_upx; { $as_echo "$as_me:${as_lineno-$LINENO}: ignoring UPX" >&5 $as_echo "$as_me: ignoring UPX" >&6;} upx="no" else # Extract the first word of "upx", so it can be a program name with args. set dummy upx; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_prog_upx+:} false; then : $as_echo_n "(cached) " >&6 else if test -n "$upx"; then ac_cv_prog_upx="$upx" # Let the user override the test. else as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_prog_upx="yes" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_prog_upx" && ac_cv_prog_upx="no" fi fi upx=$ac_cv_prog_upx if test -n "$upx"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $upx" >&5 $as_echo "$upx" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi fi # Check whether --enable-fast-dir was given. if test "${enable_fast_dir+set}" = set; then : enableval=$enable_fast_dir; else $as_echo "#define LIBDAR_FAST_DIR 1" >>confdefs.h local_fast_dir="yes" fi # Check whether --with-gpgme-prefix was given. if test "${with_gpgme_prefix+set}" = set; then : withval=$with_gpgme_prefix; gpgme_config_prefix="$withval" else gpgme_config_prefix="" fi if test x"${GPGME_CONFIG}" = x ; then if test x"${gpgme_config_prefix}" != x ; then GPGME_CONFIG="${gpgme_config_prefix}/bin/gpgme-config" else case "${SYSROOT}" in /*) if test -x "${SYSROOT}/bin/gpgme-config" ; then GPGME_CONFIG="${SYSROOT}/bin/gpgme-config" fi ;; '') ;; *) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Ignoring \$SYSROOT as it is not an absolute path." >&5 $as_echo "$as_me: WARNING: Ignoring \$SYSROOT as it is not an absolute path." >&2;} ;; esac fi fi # Extract the first word of "gpgme-config", so it can be a program name with args. set dummy gpgme-config; ac_word=$2 { $as_echo "$as_me:${as_lineno-$LINENO}: checking for $ac_word" >&5 $as_echo_n "checking for $ac_word... " >&6; } if ${ac_cv_path_GPGME_CONFIG+:} false; then : $as_echo_n "(cached) " >&6 else case $GPGME_CONFIG in [\\/]* | ?:[\\/]*) ac_cv_path_GPGME_CONFIG="$GPGME_CONFIG" # Let the user override the test with a path. ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. for ac_exec_ext in '' $ac_executable_extensions; do if as_fn_executable_p "$as_dir/$ac_word$ac_exec_ext"; then ac_cv_path_GPGME_CONFIG="$as_dir/$ac_word$ac_exec_ext" $as_echo "$as_me:${as_lineno-$LINENO}: found $as_dir/$ac_word$ac_exec_ext" >&5 break 2 fi done done IFS=$as_save_IFS test -z "$ac_cv_path_GPGME_CONFIG" && ac_cv_path_GPGME_CONFIG="no" ;; esac fi GPGME_CONFIG=$ac_cv_path_GPGME_CONFIG if test -n "$GPGME_CONFIG"; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: $GPGME_CONFIG" >&5 $as_echo "$GPGME_CONFIG" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi if test "$GPGME_CONFIG" != "no" ; then gpgme_version=`$GPGME_CONFIG --version` fi gpgme_version_major=`echo $gpgme_version | \ sed 's/\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\).*/\1/'` gpgme_version_minor=`echo $gpgme_version | \ sed 's/\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\).*/\2/'` gpgme_version_micro=`echo $gpgme_version | \ sed 's/\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\).*/\3/'` # Check whether --enable-gpgme-linking was given. if test "${enable_gpgme_linking+set}" = set; then : enableval=$enable_gpgme_linking; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: asymetrical encryption support has been disabled by user" >&5 $as_echo "$as_me: WARNING: asymetrical encryption support has been disabled by user" >&2;} local_gpgme="no" else if test $local_crypto != no ; then gpgme_min_version="1.2.0" cat >>confdefs.h <<_ACEOF #define GPGME_MIN_VERSION "$gpgme_min_version" _ACEOF tmp=$gpgme_min_version if echo "$tmp" | grep ':' >/dev/null 2>/dev/null ; then req_gpgme_api=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\1/'` min_gpgme_version=`echo "$tmp" | sed 's/\(.*\):\(.*\)/\2/'` else req_gpgme_api=0 min_gpgme_version="$tmp" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for GPGME - version >= $min_gpgme_version" >&5 $as_echo_n "checking for GPGME - version >= $min_gpgme_version... " >&6; } ok=no if test "$GPGME_CONFIG" != "no" ; then req_major=`echo $min_gpgme_version | \ sed 's/\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\)/\1/'` req_minor=`echo $min_gpgme_version | \ sed 's/\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\)/\2/'` req_micro=`echo $min_gpgme_version | \ sed 's/\([0-9]*\)\.\([0-9]*\)\.\([0-9]*\)/\3/'` if test "$gpgme_version_major" -gt "$req_major"; then ok=yes else if test "$gpgme_version_major" -eq "$req_major"; then if test "$gpgme_version_minor" -gt "$req_minor"; then ok=yes else if test "$gpgme_version_minor" -eq "$req_minor"; then if test "$gpgme_version_micro" -ge "$req_micro"; then ok=yes fi fi fi fi fi fi if test $ok = yes; then # If we have a recent GPGME, we should also check that the # API is compatible. if test "$req_gpgme_api" -gt 0 ; then tmp=`$GPGME_CONFIG --api-version 2>/dev/null || echo 0` if test "$tmp" -gt 0 ; then if test "$req_gpgme_api" -ne "$tmp" ; then ok=no fi fi fi fi if test $ok = yes; then GPGME_CFLAGS=`$GPGME_CONFIG --cflags` GPGME_LIBS=`$GPGME_CONFIG --libs` { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } CPPFLAGS___cache="$CPPFLAGS" CPPFLAGS="$GPGME_CFLAGS $CPPFLAGS" LIBS___cache="$LIBS" LIBS="$GPGME_LIBS $LIBS" for ac_header in gpgme.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "gpgme.h" "ac_cv_header_gpgme_h" "$ac_includes_default" if test "x$ac_cv_header_gpgme_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_GPGME_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for gpgme_signers_add in -lgpgme" >&5 $as_echo_n "checking for gpgme_signers_add in -lgpgme... " >&6; } if ${ac_cv_lib_gpgme_gpgme_signers_add+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lgpgme $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char gpgme_signers_add (); int main () { return gpgme_signers_add (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_gpgme_gpgme_signers_add=yes else ac_cv_lib_gpgme_gpgme_signers_add=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_gpgme_gpgme_signers_add" >&5 $as_echo "$ac_cv_lib_gpgme_gpgme_signers_add" >&6; } if test "x$ac_cv_lib_gpgme_gpgme_signers_add" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBGPGME 1 _ACEOF LIBS="-lgpgme $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libgpgme usability" >&5 $as_echo_n "checking for libgpgme usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if HAVE_GPGME_H #include #endif int main () { gpgme_ctx_t context; gpgme_error_t err = gpgme_new(&context); gpgme_release(context); return err; ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : local_gpgme="yes" $as_echo "#define GPGME_SUPPORT 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } else local_gpgme="no" { $as_echo "$as_me:${as_lineno-$LINENO}: result: not usable! See config.log for details" >&5 $as_echo "not usable! See config.log for details" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext CPPFLAGS="$CPPFLAGS___cache" unset CPPFLAGS___cache LIBS="$LIBS___cache" unset LIBS___cache gpgme_config_host=`$GPGME_CONFIG --host 2>/dev/null || echo none` if test x"$gpgme_config_host" != xnone ; then if test x"$gpgme_config_host" != x"$host" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: *** *** The config script $GPGME_CONFIG was *** built for $gpgme_config_host and thus may not match the *** used host $host. *** You may want to use the configure option --with-gpgme-prefix *** to specify a matching config script or use \$SYSROOT. ***" >&5 $as_echo "$as_me: WARNING: *** *** The config script $GPGME_CONFIG was *** built for $gpgme_config_host and thus may not match the *** used host $host. *** You may want to use the configure option --with-gpgme-prefix *** to specify a matching config script or use \$SYSROOT. ***" >&2;} gpg_config_script_warn="$gpg_config_script_warn gpgme" fi fi else GPGME_CFLAGS="" GPGME_LIBS="" { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Public key support (GPGME linking) requires version greater than $gpgme_min_version" >&5 $as_echo "$as_me: WARNING: Public key support (GPGME linking) requires version greater than $gpgme_min_version" >&2;} fi else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Public key support (GPGME linking) requires libgcrypt (strong encryption support)" >&5 $as_echo "$as_me: WARNING: Public key support (GPGME linking) requires libgcrypt (strong encryption support)" >&2;} fi fi # Check whether --enable-thread-safe was given. if test "${enable_thread_safe+set}" = set; then : enableval=$enable_thread_safe; { $as_echo "$as_me:${as_lineno-$LINENO}: thread-safe support disabled" >&5 $as_echo "$as_me: thread-safe support disabled" >&6;} else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for pthread_mutex_init in -lpthread" >&5 $as_echo_n "checking for pthread_mutex_init in -lpthread... " >&6; } if ${ac_cv_lib_pthread_pthread_mutex_init+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lpthread $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char pthread_mutex_init (); int main () { return pthread_mutex_init (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_pthread_pthread_mutex_init=yes else ac_cv_lib_pthread_pthread_mutex_init=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_pthread_pthread_mutex_init" >&5 $as_echo "$ac_cv_lib_pthread_pthread_mutex_init" >&6; } if test "x$ac_cv_lib_pthread_pthread_mutex_init" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBPTHREAD 1 _ACEOF LIBS="-lpthread $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for POSIX mutex" >&5 $as_echo_n "checking for POSIX mutex... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_PTHREAD_H #include #endif } int main () { pthread_mutex_t mutex; pthread_mutex_init(&mutex, (const pthread_mutexattr_t*)0); pthread_mutex_lock(&mutex); pthread_mutex_unlock(&mutex); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define MUTEX_WORKS 1" >>confdefs.h local_mutex_works="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext { $as_echo "$as_me:${as_lineno-$LINENO}: checking for reentrant stdlib calls" >&5 $as_echo_n "checking for reentrant stdlib calls... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_TIME_H #include #endif #if HAVE_SYS_TYPE_H #include #endif #if HAVE_GRP_H #include #endif #if HAVE_PWD_H #include #endif #if HAVE_DIRENT_H #include #endif } int main () { #if HAVE_CTIME_R char *val1 = ctime_r(0, 0); #else error(); // should not compile as expected #endif #if HAVE_GETGRNAM_R int val2 = getgrnam_r(0, 0, 0, 0, 0); #else error(); // should not compile as expected #endif #if HAVE_GETPWNAM_R int val3 = getpwnam_r(0, 0, 0, 0, 0); #else error(); // should not compile as expected #endif #if HAVE_LOCALTIME_R struct tm *val4 = localtime_r(0, 0); #else error(); // should not compile as expected #endif #if HAVE_READDIR_R int val5 = readdir_r(0, 0, 0); #else error(); // should not compile as expected #endif ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: all could be found" >&5 $as_echo "all could be found" >&6; } else $as_echo "#define MISSING_REENTRANT_LIBCALL 1" >>confdefs.h local_missing_reentrant_libcal="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: some are missing" >&5 $as_echo "some are missing" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi # Check whether --enable-execinfo was given. if test "${enable_execinfo+set}" = set; then : enableval=$enable_execinfo; { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: ignoring execinfo even if available" >&5 $as_echo "$as_me: WARNING: ignoring execinfo even if available" >&2;} else for ac_header in execinfo.h do : ac_fn_cxx_check_header_mongrel "$LINENO" "execinfo.h" "ac_cv_header_execinfo_h" "$ac_includes_default" if test "x$ac_cv_header_execinfo_h" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_EXECINFO_H 1 _ACEOF fi done { $as_echo "$as_me:${as_lineno-$LINENO}: checking for backtrace in -lexecinfo" >&5 $as_echo_n "checking for backtrace in -lexecinfo... " >&6; } if ${ac_cv_lib_execinfo_backtrace+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lexecinfo $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char backtrace (); int main () { return backtrace (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_execinfo_backtrace=yes else ac_cv_lib_execinfo_backtrace=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_execinfo_backtrace" >&5 $as_echo "$ac_cv_lib_execinfo_backtrace" >&6; } if test "x$ac_cv_lib_execinfo_backtrace" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBEXECINFO 1 _ACEOF LIBS="-lexecinfo $LIBS" fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for backtrace() usability" >&5 $as_echo_n "checking for backtrace() usability... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_EXECINFO_H #include #endif } int main () { const int buf_size = 20; void *buffer[buf_size]; int x = backtrace(buffer, buf_size); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : $as_echo "#define BACKTRACE_AVAILABLE 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } else { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext fi # Check whether --enable-profiling was given. if test "${enable_profiling+set}" = set; then : enableval=$enable_profiling; profiling="yes" fi # Check whether --enable-debug-memory was given. if test "${enable_debug_memory+set}" = set; then : enableval=$enable_debug_memory; $as_echo "#define LIBDAR_DEBUG_MEMORY 1" >>confdefs.h fi # Check whether --enable-dar-static was given. if test "${enable_dar_static+set}" = set; then : enableval=$enable_dar_static; build_static="no" else build_static="yes" fi # Check whether --enable-threadar was given. if test "${enable_threadar+set}" = set; then : enableval=$enable_threadar; else if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libthreadar\""; } >&5 ($PKG_CONFIG --exists --print-errors "libthreadar") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_failed=no { $as_echo "$as_me:${as_lineno-$LINENO}: checking for LIBTHREADAR" >&5 $as_echo_n "checking for LIBTHREADAR... " >&6; } if test -n "$LIBTHREADAR_CFLAGS"; then pkg_cv_LIBTHREADAR_CFLAGS="$LIBTHREADAR_CFLAGS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libthreadar\""; } >&5 ($PKG_CONFIG --exists --print-errors "libthreadar") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LIBTHREADAR_CFLAGS=`$PKG_CONFIG --cflags "libthreadar" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test -n "$LIBTHREADAR_LIBS"; then pkg_cv_LIBTHREADAR_LIBS="$LIBTHREADAR_LIBS" elif test -n "$PKG_CONFIG"; then if test -n "$PKG_CONFIG" && \ { { $as_echo "$as_me:${as_lineno-$LINENO}: \$PKG_CONFIG --exists --print-errors \"libthreadar\""; } >&5 ($PKG_CONFIG --exists --print-errors "libthreadar") 2>&5 ac_status=$? $as_echo "$as_me:${as_lineno-$LINENO}: \$? = $ac_status" >&5 test $ac_status = 0; }; then pkg_cv_LIBTHREADAR_LIBS=`$PKG_CONFIG --libs "libthreadar" 2>/dev/null` test "x$?" != "x0" && pkg_failed=yes else pkg_failed=yes fi else pkg_failed=untried fi if test $pkg_failed = yes; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } if $PKG_CONFIG --atleast-pkgconfig-version 0.20; then _pkg_short_errors_supported=yes else _pkg_short_errors_supported=no fi if test $_pkg_short_errors_supported = yes; then LIBTHREADAR_PKG_ERRORS=`$PKG_CONFIG --short-errors --print-errors --cflags --libs "libthreadar" 2>&1` else LIBTHREADAR_PKG_ERRORS=`$PKG_CONFIG --print-errors --cflags --libs "libthreadar" 2>&1` fi # Put the nasty error message in config.log where it belongs echo "$LIBTHREADAR_PKG_ERRORS" >&5 as_fn_error $? "libthreadar not found, but reported to exist !?!" "$LINENO" 5 elif test $pkg_failed = untried; then { $as_echo "$as_me:${as_lineno-$LINENO}: result: no" >&5 $as_echo "no" >&6; } as_fn_error $? "libthreadar not found, but reported to exist !?!" "$LINENO" 5 else LIBTHREADAR_CFLAGS=$pkg_cv_LIBTHREADAR_CFLAGS LIBTHREADAR_LIBS=$pkg_cv_LIBTHREADAR_LIBS { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes" >&5 $as_echo "yes" >&6; } fi else { $as_echo "$as_me:${as_lineno-$LINENO}: checking for for_autoconf in -lthreadar" >&5 $as_echo_n "checking for for_autoconf in -lthreadar... " >&6; } if ${ac_cv_lib_threadar_for_autoconf+:} false; then : $as_echo_n "(cached) " >&6 else ac_check_lib_save_LIBS=$LIBS LIBS="-lthreadar $LIBS" cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ /* Override any GCC internal prototype to avoid an error. Use char because int might match the return type of a GCC builtin and then its argument prototype would still apply. */ #ifdef __cplusplus extern "C" #endif char for_autoconf (); int main () { return for_autoconf (); ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : ac_cv_lib_threadar_for_autoconf=yes else ac_cv_lib_threadar_for_autoconf=no fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext LIBS=$ac_check_lib_save_LIBS fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: $ac_cv_lib_threadar_for_autoconf" >&5 $as_echo "$ac_cv_lib_threadar_for_autoconf" >&6; } if test "x$ac_cv_lib_threadar_for_autoconf" = xyes; then : cat >>confdefs.h <<_ACEOF #define HAVE_LIBTHREADAR 1 _ACEOF LIBS="-lthreadar $LIBS" fi fi CPPFLAGS__cache="$CPPFLAGS" CPPFLAGS="$LIBTHREADAR_CFLAGS $CPPFLAGS" CXXFLAGS__cache="$CXXFLAGS" CXXFLAGS="$LIBTHREADAR_CFLAGS $CXXFLAGS" LIBS__cache="$LIBS" LIBS="$LIBTHREADAR_LIBS $LIBS" ac_fn_cxx_check_header_mongrel "$LINENO" "libthreadar/libthreadar.hpp" "ac_cv_header_libthreadar_libthreadar_hpp" "$ac_includes_default" if test "x$ac_cv_header_libthreadar_libthreadar_hpp" = xyes; then : $as_echo "#define HAVE_LIBTHREADAR_LIBTHREADAR_HPP 1" >>confdefs.h else { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: Cannot find libthreadar.h header file" >&5 $as_echo "$as_me: WARNING: Cannot find libthreadar.h header file" >&2;} fi { $as_echo "$as_me:${as_lineno-$LINENO}: checking for libthreadar() operationability" >&5 $as_echo_n "checking for libthreadar() operationability... " >&6; } if test "$cross_compiling" = yes; then : { { $as_echo "$as_me:${as_lineno-$LINENO}: error: in \`$ac_pwd':" >&5 $as_echo "$as_me: error: in \`$ac_pwd':" >&2;} as_fn_error $? "cannot run test program while cross compiling See \`config.log' for more details" "$LINENO" 5; } else cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ #if HAVE_LIBTHREADAR_LIBTHREADAR_HPP #include #endif #include int main () { class mythread: public libthreadar::thread { public: mythread(int x): myx(x) {}; int getx() const { return myx; }; protected: virtual void inherited_run() { --myx; }; private: int myx; }; mythread toto(10); toto.run(); toto.join(); toto.getx(); std::cout << "ok" << std::endl; ; return 0; } _ACEOF if ac_fn_cxx_try_run "$LINENO"; then : local_threadar=yes $as_echo "#define LIBTHREADAR_AVAILABLE 1" >>confdefs.h { $as_echo "$as_me:${as_lineno-$LINENO}: result: fine" >&5 $as_echo "fine" >&6; } else local_threadar=no { $as_echo "$as_me:${as_lineno-$LINENO}: result: wrong" >&5 $as_echo "wrong" >&6; } fi rm -f core *.core core.conftest.* gmon.out bb.out conftest$ac_exeext \ conftest.$ac_objext conftest.beam conftest.$ac_ext fi CPPFLAGS="$CPPFLAGS__cache" unset CPPFLAGS__cache CXXFLAGS="$CXXFLAGS__cache" unset CXXFLAGS__cache LIBS="$LIBS__cache" unset LIBS__cache fi CPPFLAGS__cache="$CPPFLAGS" CPPFLAGS="$CPPFLAGS $GPGME_CFLAGS $LIBCURL_CFLAGS $LIBTHREADAR_CFLAGS $CXXSTDFLAGS" CXXFLAGS__cache="$CXXFLAGS" CXXFLAGS="$CXXFLAGS" LDFLAGS__cache="$LDFLAGS" LDFLAGS="$LDFLAGS -static" LIBS__cache="$LIBS" LIBS="$LIBS $GPGME_LIBS $LIBCURL_LIBS $LIBTHREADAR_LIBS $CXXSTDFLAGS" { $as_echo "$as_me:${as_lineno-$LINENO}: checking static linking" >&5 $as_echo_n "checking static linking... " >&6; } cat confdefs.h - <<_ACEOF >conftest.$ac_ext /* end confdefs.h. */ extern "C" { #if HAVE_STDIO_H #include #endif #if HAVE_EXECINFO_H #include #endif #if HAVE_STDLIB_H #include #endif #if HAVE_PTHREAD_H #include #endif #if HAVE_LIBRSYNC_H #include #endif #if HAVE_PTHREAD_H #include #endif #if HAVE_LIBRSYNC_H #include #include #endif #if HAVE_ZLIB_H #include #endif #if HAVE_BZLIB_H #include #endif #if HAVE_LZO_LZO1X_H #include #endif #if HAVE_LZMA_H #include #endif #if HAVE_GCRYPT_H #include #endif #if HAVE_CURL_CURL_H #include #endif #if HAVE_GPGME_H #include #endif } int main () { #if BACKTRACE_AVAILABLE const int buf_size = 20; void *buffer[buf_size]; int size = backtrace(buffer, buf_size); char **symbols = backtrace_symbols(buffer, size); if(symbols != 0) free(symbols); printf("testing execinfo info in static linked mode..."); #endif #if MUTEX_WORKS if(1) { pthread_mutex_t test; if(pthread_mutex_init(&test, NULL) == 0) { if(pthread_mutex_lock(&test) == 0) pthread_mutex_unlock(&test); } pthread_mutex_destroy(&test); printf("testing mutex availability in static linked mode..."); } #endif #if LIBRSYNC_AVAILABLE if(1) { rs_result err = RS_DONE; (void) rs_strerror(err); printf("testing librsync availability in static linked mode..."); } #endif #if LIBZ_AVAILABLE if(1) { z_stream *ptr = (z_stream *)0; deflate(ptr, 0); printf("testing libz availability in static linked mode..."); } #endif #if LIBBZ2_AVAILABLE if(1) { bz_stream *ptr = (bz_stream *)0; BZ2_bzCompress(ptr, 0); printf("testing libbz2 availability in static linked mode..."); } #endif #if LIBLZO2_AVAILABLE if(1) { int x; printf("testing liblzo2 availability in static linked mode..."); x = lzo1x_1_compress(0, 0, 0, 0, 0); } #endif #if LIBLZMA_AVAILABLE if(1) { lzma_stream ptr = LZMA_STREAM_INIT; lzma_ret tmp = lzma_easy_encoder(&ptr, 2, LZMA_CHECK_CRC32); printf("testing libxz/lzma availability in static linked mode..."); } #endif #if CRYPTO_AVAILABLE printf("testing gcrypt availability in static linked mode..."); if(!gcry_check_version(MIN_VERSION_GCRYPT)) { printf("ligcrypt version too low"); exit(1); } else exit(0); #endif #if LIBCURL_AVAILABLE printf("testing librsync availability in static linked mode..."); (void) curl_global_init(CURL_GLOBAL_ALL); #endif #if GPGME_SUPPORT if(1) { gpgme_ctx_t context; gpgme_error_t err = gpgme_new(&context); gpgme_release(context); } #endif return 0; ; return 0; } _ACEOF if ac_fn_cxx_try_link "$LINENO"; then : { $as_echo "$as_me:${as_lineno-$LINENO}: result: yes, perfect!" >&5 $as_echo "yes, perfect!" >&6; } static_pb="no" else { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 $as_echo "failed" >&6; } static_pb="yes" fi rm -f core conftest.err conftest.$ac_objext \ conftest$ac_exeext conftest.$ac_ext CPPFLAGS="$CPPFLAGS__cache" unset CPPFLAGS__cache CXXFLAGS="$CXXFLAGS__cache" unset CXXFLAGS__cache LDFLAGS="$LDFLAGS__cache" unset LDFLAGS__cache LIBS="$LIBS__cache" unset LIBS__cache # Check whether --enable-python-binding was given. if test "${enable_python_binding+set}" = set; then : enableval=$enable_python_binding; local_python="no" else if test "$debug_static" != "yes" ; then { $as_echo "$as_me:${as_lineno-$LINENO}: checking for python binding" >&5 $as_echo_n "checking for python binding... " >&6; } pyext="python3-config --extension-suffix" if $pyext 1> /dev/null 2> /dev/null ; then PYEXT="`$pyext`" else local_python="no" fi pyflags="python3 -m pybind11 --includes" if test "$local_python" != "no" && $pyflags 1> /dev/null 2> /dev/null ; then PYFLAGS="`$pyflags`" local_python="yes" { $as_echo "$as_me:${as_lineno-$LINENO}: result: ok" >&5 $as_echo "ok" >&6; } PYEXT=$PYEXT PYFLAGS=$PYFLAGS else local_python="no" { $as_echo "$as_me:${as_lineno-$LINENO}: result: failed" >&5 $as_echo "failed" >&6; } fi else { $as_echo "$as_me:${as_lineno-$LINENO}: result: Cannot build python binding when --enable-debug is set" >&5 $as_echo "Cannot build python binding when --enable-debug is set" >&6; } fi fi if test $examples = "yes"; then MAKE_ALL_DIR_TRUE= MAKE_ALL_DIR_FALSE='#' else MAKE_ALL_DIR_TRUE='#' MAKE_ALL_DIR_FALSE= fi if test $build_static = "yes" -a $static_pb = "no"; then BUILD_DAR_STATIC_TRUE= BUILD_DAR_STATIC_FALSE='#' else BUILD_DAR_STATIC_TRUE='#' BUILD_DAR_STATIC_FALSE= fi if test $debug_static = "yes" -a $static_pb = "no"; then DEBUG_STATIC_TRUE= DEBUG_STATIC_FALSE='#' else DEBUG_STATIC_TRUE='#' DEBUG_STATIC_FALSE= fi if test "$build_mode" = "32"; then BUILD_MODE32_TRUE= BUILD_MODE32_FALSE='#' else BUILD_MODE32_TRUE='#' BUILD_MODE32_FALSE= fi if test "$build_mode" = "64" -o -z "$build_mode"; then BUILD_MODE64_TRUE= BUILD_MODE64_FALSE='#' else BUILD_MODE64_TRUE='#' BUILD_MODE64_FALSE= fi if test "$upx" = "yes"; then USE_UPX_TRUE= USE_UPX_FALSE='#' else USE_UPX_TRUE='#' USE_UPX_FALSE= fi if test "$doxygen" = "yes"; then USE_DOXYGEN_TRUE= USE_DOXYGEN_FALSE='#' else USE_DOXYGEN_TRUE='#' USE_DOXYGEN_FALSE= fi if test "$groff" = "yes"; then USE_GROFF_TRUE= USE_GROFF_FALSE='#' else USE_GROFF_TRUE='#' USE_GROFF_FALSE= fi if test "$profiling" = "yes"; then PROFILING_TRUE= PROFILING_FALSE='#' else PROFILING_TRUE='#' PROFILING_FALSE= fi if test "$local_sed" = "bsd"; then BSD_SED_TRUE= BSD_SED_FALSE='#' else BSD_SED_TRUE='#' BSD_SED_FALSE= fi if test "$local_threadar" = "yes"; then WITH_LIBTHREADAR_TRUE= WITH_LIBTHREADAR_FALSE='#' else WITH_LIBTHREADAR_TRUE='#' WITH_LIBTHREADAR_FALSE= fi if test "$local_python" = "yes"; then PYTHON_BINDING_TRUE= PYTHON_BINDING_FALSE='#' else PYTHON_BINDING_TRUE='#' PYTHON_BINDING_FALSE= fi CXXSTDFLAGS=$CXXSTDFLAGS UPX_PROG=upx DOXYGEN_PROG=doxygen HAS_DOT=$dot # defaults # hack from libtool mailing-list to know from source point of view whether we are compiling for dynamic or static way ac_config_commands="$ac_config_commands hack-libtool" ac_config_files="$ac_config_files Makefile man/Makefile src/Makefile src/libdar/Makefile src/dar_suite/Makefile src/testing/Makefile src/examples/Makefile doc/Makefile doc/samples/Makefile misc/Makefile doc/mini-howto/Makefile src/libdar/libdar.pc.tmpl doc/man/Makefile src/check/Makefile src/python/Makefile po/Makefile.in" cat >confcache <<\_ACEOF # This file is a shell script that caches the results of configure # tests run on this system so they can be shared between configure # scripts and configure runs, see configure's option --config-cache. # It is not useful on other systems. If it contains results you don't # want to keep, you may remove or edit it. # # config.status only pays attention to the cache file if you give it # the --recheck option to rerun configure. # # `ac_cv_env_foo' variables (set or unset) will be overridden when # loading this file, other *unset* `ac_cv_foo' will be assigned the # following values. _ACEOF # The following way of writing the cache mishandles newlines in values, # but we know of no workaround that is simple, portable, and efficient. # So, we kill variables containing newlines. # Ultrix sh set writes to stderr and can't be redirected directly, # and sets the high bit in the cache file unless we assign to the vars. ( for ac_var in `(set) 2>&1 | sed -n 's/^\([a-zA-Z_][a-zA-Z0-9_]*\)=.*/\1/p'`; do eval ac_val=\$$ac_var case $ac_val in #( *${as_nl}*) case $ac_var in #( *_cv_*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: cache variable $ac_var contains a newline" >&5 $as_echo "$as_me: WARNING: cache variable $ac_var contains a newline" >&2;} ;; esac case $ac_var in #( _ | IFS | as_nl) ;; #( BASH_ARGV | BASH_SOURCE) eval $ac_var= ;; #( *) { eval $ac_var=; unset $ac_var;} ;; esac ;; esac done (set) 2>&1 | case $as_nl`(ac_space=' '; set) 2>&1` in #( *${as_nl}ac_space=\ *) # `set' does not quote correctly, so add quotes: double-quote # substitution turns \\\\ into \\, and sed turns \\ into \. sed -n \ "s/'/'\\\\''/g; s/^\\([_$as_cr_alnum]*_cv_[_$as_cr_alnum]*\\)=\\(.*\\)/\\1='\\2'/p" ;; #( *) # `set' quotes correctly as required by POSIX, so do not add quotes. sed -n "/^[_$as_cr_alnum]*_cv_[_$as_cr_alnum]*=/p" ;; esac | sort ) | sed ' /^ac_cv_env_/b end t clear :clear s/^\([^=]*\)=\(.*[{}].*\)$/test "${\1+set}" = set || &/ t end s/^\([^=]*\)=\(.*\)$/\1=${\1=\2}/ :end' >>confcache if diff "$cache_file" confcache >/dev/null 2>&1; then :; else if test -w "$cache_file"; then if test "x$cache_file" != "x/dev/null"; then { $as_echo "$as_me:${as_lineno-$LINENO}: updating cache $cache_file" >&5 $as_echo "$as_me: updating cache $cache_file" >&6;} if test ! -f "$cache_file" || test -h "$cache_file"; then cat confcache >"$cache_file" else case $cache_file in #( */* | ?:*) mv -f confcache "$cache_file"$$ && mv -f "$cache_file"$$ "$cache_file" ;; #( *) mv -f confcache "$cache_file" ;; esac fi fi else { $as_echo "$as_me:${as_lineno-$LINENO}: not updating unwritable cache $cache_file" >&5 $as_echo "$as_me: not updating unwritable cache $cache_file" >&6;} fi fi rm -f confcache test "x$prefix" = xNONE && prefix=$ac_default_prefix # Let make expand exec_prefix. test "x$exec_prefix" = xNONE && exec_prefix='${prefix}' DEFS=-DHAVE_CONFIG_H ac_libobjs= ac_ltlibobjs= U= for ac_i in : $LIBOBJS; do test "x$ac_i" = x: && continue # 1. Remove the extension, and $U if already installed. ac_script='s/\$U\././;s/\.o$//;s/\.obj$//' ac_i=`$as_echo "$ac_i" | sed "$ac_script"` # 2. Prepend LIBOBJDIR. When used with automake>=1.10 LIBOBJDIR # will be set to the directory where LIBOBJS objects are built. as_fn_append ac_libobjs " \${LIBOBJDIR}$ac_i\$U.$ac_objext" as_fn_append ac_ltlibobjs " \${LIBOBJDIR}$ac_i"'$U.lo' done LIBOBJS=$ac_libobjs LTLIBOBJS=$ac_ltlibobjs { $as_echo "$as_me:${as_lineno-$LINENO}: checking that generated files are newer than configure" >&5 $as_echo_n "checking that generated files are newer than configure... " >&6; } if test -n "$am_sleep_pid"; then # Hide warnings about reused PIDs. wait $am_sleep_pid 2>/dev/null fi { $as_echo "$as_me:${as_lineno-$LINENO}: result: done" >&5 $as_echo "done" >&6; } if test -n "$EXEEXT"; then am__EXEEXT_TRUE= am__EXEEXT_FALSE='#' else am__EXEEXT_TRUE='#' am__EXEEXT_FALSE= fi if test -z "${AMDEP_TRUE}" && test -z "${AMDEP_FALSE}"; then as_fn_error $? "conditional \"AMDEP\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCXX_TRUE}" && test -z "${am__fastdepCXX_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCXX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${am__fastdepCC_TRUE}" && test -z "${am__fastdepCC_FALSE}"; then as_fn_error $? "conditional \"am__fastdepCC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${MAKE_ALL_DIR_TRUE}" && test -z "${MAKE_ALL_DIR_FALSE}"; then as_fn_error $? "conditional \"MAKE_ALL_DIR\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${BUILD_DAR_STATIC_TRUE}" && test -z "${BUILD_DAR_STATIC_FALSE}"; then as_fn_error $? "conditional \"BUILD_DAR_STATIC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${DEBUG_STATIC_TRUE}" && test -z "${DEBUG_STATIC_FALSE}"; then as_fn_error $? "conditional \"DEBUG_STATIC\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${BUILD_MODE32_TRUE}" && test -z "${BUILD_MODE32_FALSE}"; then as_fn_error $? "conditional \"BUILD_MODE32\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${BUILD_MODE64_TRUE}" && test -z "${BUILD_MODE64_FALSE}"; then as_fn_error $? "conditional \"BUILD_MODE64\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_UPX_TRUE}" && test -z "${USE_UPX_FALSE}"; then as_fn_error $? "conditional \"USE_UPX\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_DOXYGEN_TRUE}" && test -z "${USE_DOXYGEN_FALSE}"; then as_fn_error $? "conditional \"USE_DOXYGEN\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${USE_GROFF_TRUE}" && test -z "${USE_GROFF_FALSE}"; then as_fn_error $? "conditional \"USE_GROFF\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PROFILING_TRUE}" && test -z "${PROFILING_FALSE}"; then as_fn_error $? "conditional \"PROFILING\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${BSD_SED_TRUE}" && test -z "${BSD_SED_FALSE}"; then as_fn_error $? "conditional \"BSD_SED\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${WITH_LIBTHREADAR_TRUE}" && test -z "${WITH_LIBTHREADAR_FALSE}"; then as_fn_error $? "conditional \"WITH_LIBTHREADAR\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi if test -z "${PYTHON_BINDING_TRUE}" && test -z "${PYTHON_BINDING_FALSE}"; then as_fn_error $? "conditional \"PYTHON_BINDING\" was never defined. Usually this means the macro was only invoked conditionally." "$LINENO" 5 fi : "${CONFIG_STATUS=./config.status}" ac_write_fail=0 ac_clean_files_save=$ac_clean_files ac_clean_files="$ac_clean_files $CONFIG_STATUS" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $CONFIG_STATUS" >&5 $as_echo "$as_me: creating $CONFIG_STATUS" >&6;} as_write_fail=0 cat >$CONFIG_STATUS <<_ASEOF || as_write_fail=1 #! $SHELL # Generated by $as_me. # Run this file to recreate the current configuration. # Compiler output produced by configure, useful for debugging # configure, is in config.log if it exists. debug=false ac_cs_recheck=false ac_cs_silent=false SHELL=\${CONFIG_SHELL-$SHELL} export SHELL _ASEOF cat >>$CONFIG_STATUS <<\_ASEOF || as_write_fail=1 ## -------------------- ## ## M4sh Initialization. ## ## -------------------- ## # Be more Bourne compatible DUALCASE=1; export DUALCASE # for MKS sh if test -n "${ZSH_VERSION+set}" && (emulate sh) >/dev/null 2>&1; then : emulate sh NULLCMD=: # Pre-4.2 versions of Zsh do word splitting on ${1+"$@"}, which # is contrary to our usage. Disable this feature. alias -g '${1+"$@"}'='"$@"' setopt NO_GLOB_SUBST else case `(set -o) 2>/dev/null` in #( *posix*) : set -o posix ;; #( *) : ;; esac fi as_nl=' ' export as_nl # Printing a long string crashes Solaris 7 /usr/bin/printf. as_echo='\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\\' as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo as_echo=$as_echo$as_echo$as_echo$as_echo$as_echo$as_echo # Prefer a ksh shell builtin over an external printf program on Solaris, # but without wasting forks for bash or zsh. if test -z "$BASH_VERSION$ZSH_VERSION" \ && (test "X`print -r -- $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='print -r --' as_echo_n='print -rn --' elif (test "X`printf %s $as_echo`" = "X$as_echo") 2>/dev/null; then as_echo='printf %s\n' as_echo_n='printf %s' else if test "X`(/usr/ucb/echo -n -n $as_echo) 2>/dev/null`" = "X-n $as_echo"; then as_echo_body='eval /usr/ucb/echo -n "$1$as_nl"' as_echo_n='/usr/ucb/echo -n' else as_echo_body='eval expr "X$1" : "X\\(.*\\)"' as_echo_n_body='eval arg=$1; case $arg in #( *"$as_nl"*) expr "X$arg" : "X\\(.*\\)$as_nl"; arg=`expr "X$arg" : ".*$as_nl\\(.*\\)"`;; esac; expr "X$arg" : "X\\(.*\\)" | tr -d "$as_nl" ' export as_echo_n_body as_echo_n='sh -c $as_echo_n_body as_echo' fi export as_echo_body as_echo='sh -c $as_echo_body as_echo' fi # The user is always right. if test "${PATH_SEPARATOR+set}" != set; then PATH_SEPARATOR=: (PATH='/bin;/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 && { (PATH='/bin:/bin'; FPATH=$PATH; sh -c :) >/dev/null 2>&1 || PATH_SEPARATOR=';' } fi # IFS # We need space, tab and new line, in precisely that order. Quoting is # there to prevent editors from complaining about space-tab. # (If _AS_PATH_WALK were called with IFS unset, it would disable word # splitting by setting IFS to empty value.) IFS=" "" $as_nl" # Find who we are. Look in the path if we contain no directory separator. as_myself= case $0 in #(( *[\\/]* ) as_myself=$0 ;; *) as_save_IFS=$IFS; IFS=$PATH_SEPARATOR for as_dir in $PATH do IFS=$as_save_IFS test -z "$as_dir" && as_dir=. test -r "$as_dir/$0" && as_myself=$as_dir/$0 && break done IFS=$as_save_IFS ;; esac # We did not find ourselves, most probably we were run as `sh COMMAND' # in which case we are not to be found in the path. if test "x$as_myself" = x; then as_myself=$0 fi if test ! -f "$as_myself"; then $as_echo "$as_myself: error: cannot find myself; rerun with an absolute file name" >&2 exit 1 fi # Unset variables that we do not need and which cause bugs (e.g. in # pre-3.0 UWIN ksh). But do not cause bugs in bash 2.01; the "|| exit 1" # suppresses any "Segmentation fault" message there. '((' could # trigger a bug in pdksh 5.2.14. for as_var in BASH_ENV ENV MAIL MAILPATH do eval test x\${$as_var+set} = xset \ && ( (unset $as_var) || exit 1) >/dev/null 2>&1 && unset $as_var || : done PS1='$ ' PS2='> ' PS4='+ ' # NLS nuisances. LC_ALL=C export LC_ALL LANGUAGE=C export LANGUAGE # CDPATH. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH # as_fn_error STATUS ERROR [LINENO LOG_FD] # ---------------------------------------- # Output "`basename $0`: error: ERROR" to stderr. If LINENO and LOG_FD are # provided, also output the error to LOG_FD, referencing LINENO. Then exit the # script with STATUS, using 1 if that was 0. as_fn_error () { as_status=$1; test $as_status -eq 0 && as_status=1 if test "$4"; then as_lineno=${as_lineno-"$3"} as_lineno_stack=as_lineno_stack=$as_lineno_stack $as_echo "$as_me:${as_lineno-$LINENO}: error: $2" >&$4 fi $as_echo "$as_me: error: $2" >&2 as_fn_exit $as_status } # as_fn_error # as_fn_set_status STATUS # ----------------------- # Set $? to STATUS, without forking. as_fn_set_status () { return $1 } # as_fn_set_status # as_fn_exit STATUS # ----------------- # Exit the shell with STATUS, even in a "trap 0" or "set -e" context. as_fn_exit () { set +e as_fn_set_status $1 exit $1 } # as_fn_exit # as_fn_unset VAR # --------------- # Portably unset VAR. as_fn_unset () { { eval $1=; unset $1;} } as_unset=as_fn_unset # as_fn_append VAR VALUE # ---------------------- # Append the text in VALUE to the end of the definition contained in VAR. Take # advantage of any shell optimizations that allow amortized linear growth over # repeated appends, instead of the typical quadratic growth present in naive # implementations. if (eval "as_var=1; as_var+=2; test x\$as_var = x12") 2>/dev/null; then : eval 'as_fn_append () { eval $1+=\$2 }' else as_fn_append () { eval $1=\$$1\$2 } fi # as_fn_append # as_fn_arith ARG... # ------------------ # Perform arithmetic evaluation on the ARGs, and store the result in the # global $as_val. Take advantage of shells that can avoid forks. The arguments # must be portable across $(()) and expr. if (eval "test \$(( 1 + 1 )) = 2") 2>/dev/null; then : eval 'as_fn_arith () { as_val=$(( $* )) }' else as_fn_arith () { as_val=`expr "$@" || test $? -eq 1` } fi # as_fn_arith if expr a : '\(a\)' >/dev/null 2>&1 && test "X`expr 00001 : '.*\(...\)'`" = X001; then as_expr=expr else as_expr=false fi if (basename -- /) >/dev/null 2>&1 && test "X`basename -- / 2>&1`" = "X/"; then as_basename=basename else as_basename=false fi if (as_dir=`dirname -- /` && test "X$as_dir" = X/) >/dev/null 2>&1; then as_dirname=dirname else as_dirname=false fi as_me=`$as_basename -- "$0" || $as_expr X/"$0" : '.*/\([^/][^/]*\)/*$' \| \ X"$0" : 'X\(//\)$' \| \ X"$0" : 'X\(/\)' \| . 2>/dev/null || $as_echo X/"$0" | sed '/^.*\/\([^/][^/]*\)\/*$/{ s//\1/ q } /^X\/\(\/\/\)$/{ s//\1/ q } /^X\/\(\/\).*/{ s//\1/ q } s/.*/./; q'` # Avoid depending upon Character Ranges. as_cr_letters='abcdefghijklmnopqrstuvwxyz' as_cr_LETTERS='ABCDEFGHIJKLMNOPQRSTUVWXYZ' as_cr_Letters=$as_cr_letters$as_cr_LETTERS as_cr_digits='0123456789' as_cr_alnum=$as_cr_Letters$as_cr_digits ECHO_C= ECHO_N= ECHO_T= case `echo -n x` in #((((( -n*) case `echo 'xy\c'` in *c*) ECHO_T=' ';; # ECHO_T is single tab character. xy) ECHO_C='\c';; *) echo `echo ksh88 bug on AIX 6.1` > /dev/null ECHO_T=' ';; esac;; *) ECHO_N='-n';; esac rm -f conf$$ conf$$.exe conf$$.file if test -d conf$$.dir; then rm -f conf$$.dir/conf$$.file else rm -f conf$$.dir mkdir conf$$.dir 2>/dev/null fi if (echo >conf$$.file) 2>/dev/null; then if ln -s conf$$.file conf$$ 2>/dev/null; then as_ln_s='ln -s' # ... but there are two gotchas: # 1) On MSYS, both `ln -s file dir' and `ln file dir' fail. # 2) DJGPP < 2.04 has no symlinks; `ln -s' creates a wrapper executable. # In both cases, we have to default to `cp -pR'. ln -s conf$$.file conf$$.dir 2>/dev/null && test ! -f conf$$.exe || as_ln_s='cp -pR' elif ln conf$$.file conf$$ 2>/dev/null; then as_ln_s=ln else as_ln_s='cp -pR' fi else as_ln_s='cp -pR' fi rm -f conf$$ conf$$.exe conf$$.dir/conf$$.file conf$$.file rmdir conf$$.dir 2>/dev/null # as_fn_mkdir_p # ------------- # Create "$as_dir" as a directory, including parents if necessary. as_fn_mkdir_p () { case $as_dir in #( -*) as_dir=./$as_dir;; esac test -d "$as_dir" || eval $as_mkdir_p || { as_dirs= while :; do case $as_dir in #( *\'*) as_qdir=`$as_echo "$as_dir" | sed "s/'/'\\\\\\\\''/g"`;; #'( *) as_qdir=$as_dir;; esac as_dirs="'$as_qdir' $as_dirs" as_dir=`$as_dirname -- "$as_dir" || $as_expr X"$as_dir" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$as_dir" : 'X\(//\)[^/]' \| \ X"$as_dir" : 'X\(//\)$' \| \ X"$as_dir" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$as_dir" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` test -d "$as_dir" && break done test -z "$as_dirs" || eval "mkdir $as_dirs" } || test -d "$as_dir" || as_fn_error $? "cannot create directory $as_dir" } # as_fn_mkdir_p if mkdir -p . 2>/dev/null; then as_mkdir_p='mkdir -p "$as_dir"' else test -d ./-p && rmdir ./-p as_mkdir_p=false fi # as_fn_executable_p FILE # ----------------------- # Test if FILE is an executable regular file. as_fn_executable_p () { test -f "$1" && test -x "$1" } # as_fn_executable_p as_test_x='test -x' as_executable_p=as_fn_executable_p # Sed expression to map a string onto a valid CPP name. as_tr_cpp="eval sed 'y%*$as_cr_letters%P$as_cr_LETTERS%;s%[^_$as_cr_alnum]%_%g'" # Sed expression to map a string onto a valid variable name. as_tr_sh="eval sed 'y%*+%pp%;s%[^_$as_cr_alnum]%_%g'" exec 6>&1 ## ----------------------------------- ## ## Main body of $CONFIG_STATUS script. ## ## ----------------------------------- ## _ASEOF test $as_write_fail = 0 && chmod +x $CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Save the log message, to keep $0 and so on meaningful, and to # report actual input values of CONFIG_FILES etc. instead of their # values after options handling. ac_log=" This file was extended by DAR $as_me 2.6.8, which was generated by GNU Autoconf 2.69. Invocation command line was CONFIG_FILES = $CONFIG_FILES CONFIG_HEADERS = $CONFIG_HEADERS CONFIG_LINKS = $CONFIG_LINKS CONFIG_COMMANDS = $CONFIG_COMMANDS $ $0 $@ on `(hostname || uname -n) 2>/dev/null | sed 1q` " _ACEOF case $ac_config_files in *" "*) set x $ac_config_files; shift; ac_config_files=$*;; esac case $ac_config_headers in *" "*) set x $ac_config_headers; shift; ac_config_headers=$*;; esac cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # Files that config.status was made for. config_files="$ac_config_files" config_headers="$ac_config_headers" config_commands="$ac_config_commands" _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 ac_cs_usage="\ \`$as_me' instantiates files and other configuration actions from templates according to the current configuration. Unless the files and actions are specified as TAGs, all are instantiated by default. Usage: $0 [OPTION]... [TAG]... -h, --help print this help, then exit -V, --version print version number and configuration settings, then exit --config print configuration, then exit -q, --quiet, --silent do not print progress messages -d, --debug don't remove temporary files --recheck update $as_me by reconfiguring in the same conditions --file=FILE[:TEMPLATE] instantiate the configuration file FILE --header=FILE[:TEMPLATE] instantiate the configuration header FILE Configuration files: $config_files Configuration headers: $config_headers Configuration commands: $config_commands Report bugs to ." _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_cs_config="`$as_echo "$ac_configure_args" | sed 's/^ //; s/[\\""\`\$]/\\\\&/g'`" ac_cs_version="\\ DAR config.status 2.6.8 configured by $0, generated by GNU Autoconf 2.69, with options \\"\$ac_cs_config\\" Copyright (C) 2012 Free Software Foundation, Inc. This config.status script is free software; the Free Software Foundation gives unlimited permission to copy, distribute and modify it." ac_pwd='$ac_pwd' srcdir='$srcdir' INSTALL='$INSTALL' MKDIR_P='$MKDIR_P' AWK='$AWK' test -n "\$AWK" || AWK=awk _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # The default lists apply if the user does not specify any file. ac_need_defaults=: while test $# != 0 do case $1 in --*=?*) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg=`expr "X$1" : 'X[^=]*=\(.*\)'` ac_shift=: ;; --*=) ac_option=`expr "X$1" : 'X\([^=]*\)='` ac_optarg= ac_shift=: ;; *) ac_option=$1 ac_optarg=$2 ac_shift=shift ;; esac case $ac_option in # Handling of the options. -recheck | --recheck | --rechec | --reche | --rech | --rec | --re | --r) ac_cs_recheck=: ;; --version | --versio | --versi | --vers | --ver | --ve | --v | -V ) $as_echo "$ac_cs_version"; exit ;; --config | --confi | --conf | --con | --co | --c ) $as_echo "$ac_cs_config"; exit ;; --debug | --debu | --deb | --de | --d | -d ) debug=: ;; --file | --fil | --fi | --f ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; '') as_fn_error $? "missing file argument" ;; esac as_fn_append CONFIG_FILES " '$ac_optarg'" ac_need_defaults=false;; --header | --heade | --head | --hea ) $ac_shift case $ac_optarg in *\'*) ac_optarg=`$as_echo "$ac_optarg" | sed "s/'/'\\\\\\\\''/g"` ;; esac as_fn_append CONFIG_HEADERS " '$ac_optarg'" ac_need_defaults=false;; --he | --h) # Conflict between --help and --header as_fn_error $? "ambiguous option: \`$1' Try \`$0 --help' for more information.";; --help | --hel | -h ) $as_echo "$ac_cs_usage"; exit ;; -q | -quiet | --quiet | --quie | --qui | --qu | --q \ | -silent | --silent | --silen | --sile | --sil | --si | --s) ac_cs_silent=: ;; # This is an error. -*) as_fn_error $? "unrecognized option: \`$1' Try \`$0 --help' for more information." ;; *) as_fn_append ac_config_targets " $1" ac_need_defaults=false ;; esac shift done ac_configure_extra_args= if $ac_cs_silent; then exec 6>/dev/null ac_configure_extra_args="$ac_configure_extra_args --silent" fi _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 if \$ac_cs_recheck; then set X $SHELL '$0' $ac_configure_args \$ac_configure_extra_args --no-create --no-recursion shift \$as_echo "running CONFIG_SHELL=$SHELL \$*" >&6 CONFIG_SHELL='$SHELL' export CONFIG_SHELL exec "\$@" fi _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 exec 5>>config.log { echo sed 'h;s/./-/g;s/^.../## /;s/...$/ ##/;p;x;p;x' <<_ASBOX ## Running $as_me. ## _ASBOX $as_echo "$ac_log" } >&5 _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 # # INIT-COMMANDS # # Capture the value of obsolete ALL_LINGUAS because we need it to compute # POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES, CATALOGS. But hide it # from automake < 1.5. eval 'OBSOLETE_ALL_LINGUAS''="$ALL_LINGUAS"' # Capture the value of LINGUAS because we need it to compute CATALOGS. LINGUAS="${LINGUAS-%UNSET%}" AMDEP_TRUE="$AMDEP_TRUE" ac_aux_dir="$ac_aux_dir" # The HP-UX ksh and POSIX shell print the target directory to stdout # if CDPATH is set. (unset CDPATH) >/dev/null 2>&1 && unset CDPATH sed_quote_subst='$sed_quote_subst' double_quote_subst='$double_quote_subst' delay_variable_subst='$delay_variable_subst' macro_version='`$ECHO "$macro_version" | $SED "$delay_single_quote_subst"`' macro_revision='`$ECHO "$macro_revision" | $SED "$delay_single_quote_subst"`' enable_shared='`$ECHO "$enable_shared" | $SED "$delay_single_quote_subst"`' enable_static='`$ECHO "$enable_static" | $SED "$delay_single_quote_subst"`' pic_mode='`$ECHO "$pic_mode" | $SED "$delay_single_quote_subst"`' enable_fast_install='`$ECHO "$enable_fast_install" | $SED "$delay_single_quote_subst"`' shared_archive_member_spec='`$ECHO "$shared_archive_member_spec" | $SED "$delay_single_quote_subst"`' SHELL='`$ECHO "$SHELL" | $SED "$delay_single_quote_subst"`' ECHO='`$ECHO "$ECHO" | $SED "$delay_single_quote_subst"`' PATH_SEPARATOR='`$ECHO "$PATH_SEPARATOR" | $SED "$delay_single_quote_subst"`' host_alias='`$ECHO "$host_alias" | $SED "$delay_single_quote_subst"`' host='`$ECHO "$host" | $SED "$delay_single_quote_subst"`' host_os='`$ECHO "$host_os" | $SED "$delay_single_quote_subst"`' build_alias='`$ECHO "$build_alias" | $SED "$delay_single_quote_subst"`' build='`$ECHO "$build" | $SED "$delay_single_quote_subst"`' build_os='`$ECHO "$build_os" | $SED "$delay_single_quote_subst"`' SED='`$ECHO "$SED" | $SED "$delay_single_quote_subst"`' Xsed='`$ECHO "$Xsed" | $SED "$delay_single_quote_subst"`' GREP='`$ECHO "$GREP" | $SED "$delay_single_quote_subst"`' EGREP='`$ECHO "$EGREP" | $SED "$delay_single_quote_subst"`' FGREP='`$ECHO "$FGREP" | $SED "$delay_single_quote_subst"`' LD='`$ECHO "$LD" | $SED "$delay_single_quote_subst"`' NM='`$ECHO "$NM" | $SED "$delay_single_quote_subst"`' LN_S='`$ECHO "$LN_S" | $SED "$delay_single_quote_subst"`' max_cmd_len='`$ECHO "$max_cmd_len" | $SED "$delay_single_quote_subst"`' ac_objext='`$ECHO "$ac_objext" | $SED "$delay_single_quote_subst"`' exeext='`$ECHO "$exeext" | $SED "$delay_single_quote_subst"`' lt_unset='`$ECHO "$lt_unset" | $SED "$delay_single_quote_subst"`' lt_SP2NL='`$ECHO "$lt_SP2NL" | $SED "$delay_single_quote_subst"`' lt_NL2SP='`$ECHO "$lt_NL2SP" | $SED "$delay_single_quote_subst"`' lt_cv_to_host_file_cmd='`$ECHO "$lt_cv_to_host_file_cmd" | $SED "$delay_single_quote_subst"`' lt_cv_to_tool_file_cmd='`$ECHO "$lt_cv_to_tool_file_cmd" | $SED "$delay_single_quote_subst"`' reload_flag='`$ECHO "$reload_flag" | $SED "$delay_single_quote_subst"`' reload_cmds='`$ECHO "$reload_cmds" | $SED "$delay_single_quote_subst"`' OBJDUMP='`$ECHO "$OBJDUMP" | $SED "$delay_single_quote_subst"`' deplibs_check_method='`$ECHO "$deplibs_check_method" | $SED "$delay_single_quote_subst"`' file_magic_cmd='`$ECHO "$file_magic_cmd" | $SED "$delay_single_quote_subst"`' file_magic_glob='`$ECHO "$file_magic_glob" | $SED "$delay_single_quote_subst"`' want_nocaseglob='`$ECHO "$want_nocaseglob" | $SED "$delay_single_quote_subst"`' DLLTOOL='`$ECHO "$DLLTOOL" | $SED "$delay_single_quote_subst"`' sharedlib_from_linklib_cmd='`$ECHO "$sharedlib_from_linklib_cmd" | $SED "$delay_single_quote_subst"`' AR='`$ECHO "$AR" | $SED "$delay_single_quote_subst"`' AR_FLAGS='`$ECHO "$AR_FLAGS" | $SED "$delay_single_quote_subst"`' archiver_list_spec='`$ECHO "$archiver_list_spec" | $SED "$delay_single_quote_subst"`' STRIP='`$ECHO "$STRIP" | $SED "$delay_single_quote_subst"`' RANLIB='`$ECHO "$RANLIB" | $SED "$delay_single_quote_subst"`' old_postinstall_cmds='`$ECHO "$old_postinstall_cmds" | $SED "$delay_single_quote_subst"`' old_postuninstall_cmds='`$ECHO "$old_postuninstall_cmds" | $SED "$delay_single_quote_subst"`' old_archive_cmds='`$ECHO "$old_archive_cmds" | $SED "$delay_single_quote_subst"`' lock_old_archive_extraction='`$ECHO "$lock_old_archive_extraction" | $SED "$delay_single_quote_subst"`' CC='`$ECHO "$CC" | $SED "$delay_single_quote_subst"`' CFLAGS='`$ECHO "$CFLAGS" | $SED "$delay_single_quote_subst"`' compiler='`$ECHO "$compiler" | $SED "$delay_single_quote_subst"`' GCC='`$ECHO "$GCC" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_pipe='`$ECHO "$lt_cv_sys_global_symbol_pipe" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_cdecl='`$ECHO "$lt_cv_sys_global_symbol_to_cdecl" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_import='`$ECHO "$lt_cv_sys_global_symbol_to_import" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address" | $SED "$delay_single_quote_subst"`' lt_cv_sys_global_symbol_to_c_name_address_lib_prefix='`$ECHO "$lt_cv_sys_global_symbol_to_c_name_address_lib_prefix" | $SED "$delay_single_quote_subst"`' lt_cv_nm_interface='`$ECHO "$lt_cv_nm_interface" | $SED "$delay_single_quote_subst"`' nm_file_list_spec='`$ECHO "$nm_file_list_spec" | $SED "$delay_single_quote_subst"`' lt_sysroot='`$ECHO "$lt_sysroot" | $SED "$delay_single_quote_subst"`' lt_cv_truncate_bin='`$ECHO "$lt_cv_truncate_bin" | $SED "$delay_single_quote_subst"`' objdir='`$ECHO "$objdir" | $SED "$delay_single_quote_subst"`' MAGIC_CMD='`$ECHO "$MAGIC_CMD" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag='`$ECHO "$lt_prog_compiler_no_builtin_flag" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic='`$ECHO "$lt_prog_compiler_pic" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_wl='`$ECHO "$lt_prog_compiler_wl" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static='`$ECHO "$lt_prog_compiler_static" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o='`$ECHO "$lt_cv_prog_compiler_c_o" | $SED "$delay_single_quote_subst"`' need_locks='`$ECHO "$need_locks" | $SED "$delay_single_quote_subst"`' MANIFEST_TOOL='`$ECHO "$MANIFEST_TOOL" | $SED "$delay_single_quote_subst"`' DSYMUTIL='`$ECHO "$DSYMUTIL" | $SED "$delay_single_quote_subst"`' NMEDIT='`$ECHO "$NMEDIT" | $SED "$delay_single_quote_subst"`' LIPO='`$ECHO "$LIPO" | $SED "$delay_single_quote_subst"`' OTOOL='`$ECHO "$OTOOL" | $SED "$delay_single_quote_subst"`' OTOOL64='`$ECHO "$OTOOL64" | $SED "$delay_single_quote_subst"`' libext='`$ECHO "$libext" | $SED "$delay_single_quote_subst"`' shrext_cmds='`$ECHO "$shrext_cmds" | $SED "$delay_single_quote_subst"`' extract_expsyms_cmds='`$ECHO "$extract_expsyms_cmds" | $SED "$delay_single_quote_subst"`' archive_cmds_need_lc='`$ECHO "$archive_cmds_need_lc" | $SED "$delay_single_quote_subst"`' enable_shared_with_static_runtimes='`$ECHO "$enable_shared_with_static_runtimes" | $SED "$delay_single_quote_subst"`' export_dynamic_flag_spec='`$ECHO "$export_dynamic_flag_spec" | $SED "$delay_single_quote_subst"`' whole_archive_flag_spec='`$ECHO "$whole_archive_flag_spec" | $SED "$delay_single_quote_subst"`' compiler_needs_object='`$ECHO "$compiler_needs_object" | $SED "$delay_single_quote_subst"`' old_archive_from_new_cmds='`$ECHO "$old_archive_from_new_cmds" | $SED "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds='`$ECHO "$old_archive_from_expsyms_cmds" | $SED "$delay_single_quote_subst"`' archive_cmds='`$ECHO "$archive_cmds" | $SED "$delay_single_quote_subst"`' archive_expsym_cmds='`$ECHO "$archive_expsym_cmds" | $SED "$delay_single_quote_subst"`' module_cmds='`$ECHO "$module_cmds" | $SED "$delay_single_quote_subst"`' module_expsym_cmds='`$ECHO "$module_expsym_cmds" | $SED "$delay_single_quote_subst"`' with_gnu_ld='`$ECHO "$with_gnu_ld" | $SED "$delay_single_quote_subst"`' allow_undefined_flag='`$ECHO "$allow_undefined_flag" | $SED "$delay_single_quote_subst"`' no_undefined_flag='`$ECHO "$no_undefined_flag" | $SED "$delay_single_quote_subst"`' hardcode_libdir_flag_spec='`$ECHO "$hardcode_libdir_flag_spec" | $SED "$delay_single_quote_subst"`' hardcode_libdir_separator='`$ECHO "$hardcode_libdir_separator" | $SED "$delay_single_quote_subst"`' hardcode_direct='`$ECHO "$hardcode_direct" | $SED "$delay_single_quote_subst"`' hardcode_direct_absolute='`$ECHO "$hardcode_direct_absolute" | $SED "$delay_single_quote_subst"`' hardcode_minus_L='`$ECHO "$hardcode_minus_L" | $SED "$delay_single_quote_subst"`' hardcode_shlibpath_var='`$ECHO "$hardcode_shlibpath_var" | $SED "$delay_single_quote_subst"`' hardcode_automatic='`$ECHO "$hardcode_automatic" | $SED "$delay_single_quote_subst"`' inherit_rpath='`$ECHO "$inherit_rpath" | $SED "$delay_single_quote_subst"`' link_all_deplibs='`$ECHO "$link_all_deplibs" | $SED "$delay_single_quote_subst"`' always_export_symbols='`$ECHO "$always_export_symbols" | $SED "$delay_single_quote_subst"`' export_symbols_cmds='`$ECHO "$export_symbols_cmds" | $SED "$delay_single_quote_subst"`' exclude_expsyms='`$ECHO "$exclude_expsyms" | $SED "$delay_single_quote_subst"`' include_expsyms='`$ECHO "$include_expsyms" | $SED "$delay_single_quote_subst"`' prelink_cmds='`$ECHO "$prelink_cmds" | $SED "$delay_single_quote_subst"`' postlink_cmds='`$ECHO "$postlink_cmds" | $SED "$delay_single_quote_subst"`' file_list_spec='`$ECHO "$file_list_spec" | $SED "$delay_single_quote_subst"`' variables_saved_for_relink='`$ECHO "$variables_saved_for_relink" | $SED "$delay_single_quote_subst"`' need_lib_prefix='`$ECHO "$need_lib_prefix" | $SED "$delay_single_quote_subst"`' need_version='`$ECHO "$need_version" | $SED "$delay_single_quote_subst"`' version_type='`$ECHO "$version_type" | $SED "$delay_single_quote_subst"`' runpath_var='`$ECHO "$runpath_var" | $SED "$delay_single_quote_subst"`' shlibpath_var='`$ECHO "$shlibpath_var" | $SED "$delay_single_quote_subst"`' shlibpath_overrides_runpath='`$ECHO "$shlibpath_overrides_runpath" | $SED "$delay_single_quote_subst"`' libname_spec='`$ECHO "$libname_spec" | $SED "$delay_single_quote_subst"`' library_names_spec='`$ECHO "$library_names_spec" | $SED "$delay_single_quote_subst"`' soname_spec='`$ECHO "$soname_spec" | $SED "$delay_single_quote_subst"`' install_override_mode='`$ECHO "$install_override_mode" | $SED "$delay_single_quote_subst"`' postinstall_cmds='`$ECHO "$postinstall_cmds" | $SED "$delay_single_quote_subst"`' postuninstall_cmds='`$ECHO "$postuninstall_cmds" | $SED "$delay_single_quote_subst"`' finish_cmds='`$ECHO "$finish_cmds" | $SED "$delay_single_quote_subst"`' finish_eval='`$ECHO "$finish_eval" | $SED "$delay_single_quote_subst"`' hardcode_into_libs='`$ECHO "$hardcode_into_libs" | $SED "$delay_single_quote_subst"`' sys_lib_search_path_spec='`$ECHO "$sys_lib_search_path_spec" | $SED "$delay_single_quote_subst"`' configure_time_dlsearch_path='`$ECHO "$configure_time_dlsearch_path" | $SED "$delay_single_quote_subst"`' configure_time_lt_sys_library_path='`$ECHO "$configure_time_lt_sys_library_path" | $SED "$delay_single_quote_subst"`' hardcode_action='`$ECHO "$hardcode_action" | $SED "$delay_single_quote_subst"`' enable_dlopen='`$ECHO "$enable_dlopen" | $SED "$delay_single_quote_subst"`' enable_dlopen_self='`$ECHO "$enable_dlopen_self" | $SED "$delay_single_quote_subst"`' enable_dlopen_self_static='`$ECHO "$enable_dlopen_self_static" | $SED "$delay_single_quote_subst"`' old_striplib='`$ECHO "$old_striplib" | $SED "$delay_single_quote_subst"`' striplib='`$ECHO "$striplib" | $SED "$delay_single_quote_subst"`' compiler_lib_search_dirs='`$ECHO "$compiler_lib_search_dirs" | $SED "$delay_single_quote_subst"`' predep_objects='`$ECHO "$predep_objects" | $SED "$delay_single_quote_subst"`' postdep_objects='`$ECHO "$postdep_objects" | $SED "$delay_single_quote_subst"`' predeps='`$ECHO "$predeps" | $SED "$delay_single_quote_subst"`' postdeps='`$ECHO "$postdeps" | $SED "$delay_single_quote_subst"`' compiler_lib_search_path='`$ECHO "$compiler_lib_search_path" | $SED "$delay_single_quote_subst"`' LD_CXX='`$ECHO "$LD_CXX" | $SED "$delay_single_quote_subst"`' reload_flag_CXX='`$ECHO "$reload_flag_CXX" | $SED "$delay_single_quote_subst"`' reload_cmds_CXX='`$ECHO "$reload_cmds_CXX" | $SED "$delay_single_quote_subst"`' old_archive_cmds_CXX='`$ECHO "$old_archive_cmds_CXX" | $SED "$delay_single_quote_subst"`' compiler_CXX='`$ECHO "$compiler_CXX" | $SED "$delay_single_quote_subst"`' GCC_CXX='`$ECHO "$GCC_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_no_builtin_flag_CXX='`$ECHO "$lt_prog_compiler_no_builtin_flag_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_pic_CXX='`$ECHO "$lt_prog_compiler_pic_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_wl_CXX='`$ECHO "$lt_prog_compiler_wl_CXX" | $SED "$delay_single_quote_subst"`' lt_prog_compiler_static_CXX='`$ECHO "$lt_prog_compiler_static_CXX" | $SED "$delay_single_quote_subst"`' lt_cv_prog_compiler_c_o_CXX='`$ECHO "$lt_cv_prog_compiler_c_o_CXX" | $SED "$delay_single_quote_subst"`' archive_cmds_need_lc_CXX='`$ECHO "$archive_cmds_need_lc_CXX" | $SED "$delay_single_quote_subst"`' enable_shared_with_static_runtimes_CXX='`$ECHO "$enable_shared_with_static_runtimes_CXX" | $SED "$delay_single_quote_subst"`' export_dynamic_flag_spec_CXX='`$ECHO "$export_dynamic_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' whole_archive_flag_spec_CXX='`$ECHO "$whole_archive_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' compiler_needs_object_CXX='`$ECHO "$compiler_needs_object_CXX" | $SED "$delay_single_quote_subst"`' old_archive_from_new_cmds_CXX='`$ECHO "$old_archive_from_new_cmds_CXX" | $SED "$delay_single_quote_subst"`' old_archive_from_expsyms_cmds_CXX='`$ECHO "$old_archive_from_expsyms_cmds_CXX" | $SED "$delay_single_quote_subst"`' archive_cmds_CXX='`$ECHO "$archive_cmds_CXX" | $SED "$delay_single_quote_subst"`' archive_expsym_cmds_CXX='`$ECHO "$archive_expsym_cmds_CXX" | $SED "$delay_single_quote_subst"`' module_cmds_CXX='`$ECHO "$module_cmds_CXX" | $SED "$delay_single_quote_subst"`' module_expsym_cmds_CXX='`$ECHO "$module_expsym_cmds_CXX" | $SED "$delay_single_quote_subst"`' with_gnu_ld_CXX='`$ECHO "$with_gnu_ld_CXX" | $SED "$delay_single_quote_subst"`' allow_undefined_flag_CXX='`$ECHO "$allow_undefined_flag_CXX" | $SED "$delay_single_quote_subst"`' no_undefined_flag_CXX='`$ECHO "$no_undefined_flag_CXX" | $SED "$delay_single_quote_subst"`' hardcode_libdir_flag_spec_CXX='`$ECHO "$hardcode_libdir_flag_spec_CXX" | $SED "$delay_single_quote_subst"`' hardcode_libdir_separator_CXX='`$ECHO "$hardcode_libdir_separator_CXX" | $SED "$delay_single_quote_subst"`' hardcode_direct_CXX='`$ECHO "$hardcode_direct_CXX" | $SED "$delay_single_quote_subst"`' hardcode_direct_absolute_CXX='`$ECHO "$hardcode_direct_absolute_CXX" | $SED "$delay_single_quote_subst"`' hardcode_minus_L_CXX='`$ECHO "$hardcode_minus_L_CXX" | $SED "$delay_single_quote_subst"`' hardcode_shlibpath_var_CXX='`$ECHO "$hardcode_shlibpath_var_CXX" | $SED "$delay_single_quote_subst"`' hardcode_automatic_CXX='`$ECHO "$hardcode_automatic_CXX" | $SED "$delay_single_quote_subst"`' inherit_rpath_CXX='`$ECHO "$inherit_rpath_CXX" | $SED "$delay_single_quote_subst"`' link_all_deplibs_CXX='`$ECHO "$link_all_deplibs_CXX" | $SED "$delay_single_quote_subst"`' always_export_symbols_CXX='`$ECHO "$always_export_symbols_CXX" | $SED "$delay_single_quote_subst"`' export_symbols_cmds_CXX='`$ECHO "$export_symbols_cmds_CXX" | $SED "$delay_single_quote_subst"`' exclude_expsyms_CXX='`$ECHO "$exclude_expsyms_CXX" | $SED "$delay_single_quote_subst"`' include_expsyms_CXX='`$ECHO "$include_expsyms_CXX" | $SED "$delay_single_quote_subst"`' prelink_cmds_CXX='`$ECHO "$prelink_cmds_CXX" | $SED "$delay_single_quote_subst"`' postlink_cmds_CXX='`$ECHO "$postlink_cmds_CXX" | $SED "$delay_single_quote_subst"`' file_list_spec_CXX='`$ECHO "$file_list_spec_CXX" | $SED "$delay_single_quote_subst"`' hardcode_action_CXX='`$ECHO "$hardcode_action_CXX" | $SED "$delay_single_quote_subst"`' compiler_lib_search_dirs_CXX='`$ECHO "$compiler_lib_search_dirs_CXX" | $SED "$delay_single_quote_subst"`' predep_objects_CXX='`$ECHO "$predep_objects_CXX" | $SED "$delay_single_quote_subst"`' postdep_objects_CXX='`$ECHO "$postdep_objects_CXX" | $SED "$delay_single_quote_subst"`' predeps_CXX='`$ECHO "$predeps_CXX" | $SED "$delay_single_quote_subst"`' postdeps_CXX='`$ECHO "$postdeps_CXX" | $SED "$delay_single_quote_subst"`' compiler_lib_search_path_CXX='`$ECHO "$compiler_lib_search_path_CXX" | $SED "$delay_single_quote_subst"`' LTCC='$LTCC' LTCFLAGS='$LTCFLAGS' compiler='$compiler_DEFAULT' # A function that is used when there is no print builtin or printf. func_fallback_echo () { eval 'cat <<_LTECHO_EOF \$1 _LTECHO_EOF' } # Quote evaled strings. for var in SHELL \ ECHO \ PATH_SEPARATOR \ SED \ GREP \ EGREP \ FGREP \ LD \ NM \ LN_S \ lt_SP2NL \ lt_NL2SP \ reload_flag \ OBJDUMP \ deplibs_check_method \ file_magic_cmd \ file_magic_glob \ want_nocaseglob \ DLLTOOL \ sharedlib_from_linklib_cmd \ AR \ AR_FLAGS \ archiver_list_spec \ STRIP \ RANLIB \ CC \ CFLAGS \ compiler \ lt_cv_sys_global_symbol_pipe \ lt_cv_sys_global_symbol_to_cdecl \ lt_cv_sys_global_symbol_to_import \ lt_cv_sys_global_symbol_to_c_name_address \ lt_cv_sys_global_symbol_to_c_name_address_lib_prefix \ lt_cv_nm_interface \ nm_file_list_spec \ lt_cv_truncate_bin \ lt_prog_compiler_no_builtin_flag \ lt_prog_compiler_pic \ lt_prog_compiler_wl \ lt_prog_compiler_static \ lt_cv_prog_compiler_c_o \ need_locks \ MANIFEST_TOOL \ DSYMUTIL \ NMEDIT \ LIPO \ OTOOL \ OTOOL64 \ shrext_cmds \ export_dynamic_flag_spec \ whole_archive_flag_spec \ compiler_needs_object \ with_gnu_ld \ allow_undefined_flag \ no_undefined_flag \ hardcode_libdir_flag_spec \ hardcode_libdir_separator \ exclude_expsyms \ include_expsyms \ file_list_spec \ variables_saved_for_relink \ libname_spec \ library_names_spec \ soname_spec \ install_override_mode \ finish_eval \ old_striplib \ striplib \ compiler_lib_search_dirs \ predep_objects \ postdep_objects \ predeps \ postdeps \ compiler_lib_search_path \ LD_CXX \ reload_flag_CXX \ compiler_CXX \ lt_prog_compiler_no_builtin_flag_CXX \ lt_prog_compiler_pic_CXX \ lt_prog_compiler_wl_CXX \ lt_prog_compiler_static_CXX \ lt_cv_prog_compiler_c_o_CXX \ export_dynamic_flag_spec_CXX \ whole_archive_flag_spec_CXX \ compiler_needs_object_CXX \ with_gnu_ld_CXX \ allow_undefined_flag_CXX \ no_undefined_flag_CXX \ hardcode_libdir_flag_spec_CXX \ hardcode_libdir_separator_CXX \ exclude_expsyms_CXX \ include_expsyms_CXX \ file_list_spec_CXX \ compiler_lib_search_dirs_CXX \ predep_objects_CXX \ postdep_objects_CXX \ predeps_CXX \ postdeps_CXX \ compiler_lib_search_path_CXX; do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED \\"\\\$sed_quote_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done # Double-quote double-evaled strings. for var in reload_cmds \ old_postinstall_cmds \ old_postuninstall_cmds \ old_archive_cmds \ extract_expsyms_cmds \ old_archive_from_new_cmds \ old_archive_from_expsyms_cmds \ archive_cmds \ archive_expsym_cmds \ module_cmds \ module_expsym_cmds \ export_symbols_cmds \ prelink_cmds \ postlink_cmds \ postinstall_cmds \ postuninstall_cmds \ finish_cmds \ sys_lib_search_path_spec \ configure_time_dlsearch_path \ configure_time_lt_sys_library_path \ reload_cmds_CXX \ old_archive_cmds_CXX \ old_archive_from_new_cmds_CXX \ old_archive_from_expsyms_cmds_CXX \ archive_cmds_CXX \ archive_expsym_cmds_CXX \ module_cmds_CXX \ module_expsym_cmds_CXX \ export_symbols_cmds_CXX \ prelink_cmds_CXX \ postlink_cmds_CXX; do case \`eval \\\\\$ECHO \\\\""\\\\\$\$var"\\\\"\` in *[\\\\\\\`\\"\\\$]*) eval "lt_\$var=\\\\\\"\\\`\\\$ECHO \\"\\\$\$var\\" | \\\$SED -e \\"\\\$double_quote_subst\\" -e \\"\\\$sed_quote_subst\\" -e \\"\\\$delay_variable_subst\\"\\\`\\\\\\"" ## exclude from sc_prohibit_nested_quotes ;; *) eval "lt_\$var=\\\\\\"\\\$\$var\\\\\\"" ;; esac done ac_aux_dir='$ac_aux_dir' # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes INIT. if test -n "\${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi PACKAGE='$PACKAGE' VERSION='$VERSION' RM='$RM' ofile='$ofile' _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # Handling of arguments. for ac_config_target in $ac_config_targets do case $ac_config_target in "config.h") CONFIG_HEADERS="$CONFIG_HEADERS config.h" ;; "po-directories") CONFIG_COMMANDS="$CONFIG_COMMANDS po-directories" ;; "depfiles") CONFIG_COMMANDS="$CONFIG_COMMANDS depfiles" ;; "libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS libtool" ;; "hack-libtool") CONFIG_COMMANDS="$CONFIG_COMMANDS hack-libtool" ;; "Makefile") CONFIG_FILES="$CONFIG_FILES Makefile" ;; "man/Makefile") CONFIG_FILES="$CONFIG_FILES man/Makefile" ;; "src/Makefile") CONFIG_FILES="$CONFIG_FILES src/Makefile" ;; "src/libdar/Makefile") CONFIG_FILES="$CONFIG_FILES src/libdar/Makefile" ;; "src/dar_suite/Makefile") CONFIG_FILES="$CONFIG_FILES src/dar_suite/Makefile" ;; "src/testing/Makefile") CONFIG_FILES="$CONFIG_FILES src/testing/Makefile" ;; "src/examples/Makefile") CONFIG_FILES="$CONFIG_FILES src/examples/Makefile" ;; "doc/Makefile") CONFIG_FILES="$CONFIG_FILES doc/Makefile" ;; "doc/samples/Makefile") CONFIG_FILES="$CONFIG_FILES doc/samples/Makefile" ;; "misc/Makefile") CONFIG_FILES="$CONFIG_FILES misc/Makefile" ;; "doc/mini-howto/Makefile") CONFIG_FILES="$CONFIG_FILES doc/mini-howto/Makefile" ;; "src/libdar/libdar.pc.tmpl") CONFIG_FILES="$CONFIG_FILES src/libdar/libdar.pc.tmpl" ;; "doc/man/Makefile") CONFIG_FILES="$CONFIG_FILES doc/man/Makefile" ;; "src/check/Makefile") CONFIG_FILES="$CONFIG_FILES src/check/Makefile" ;; "src/python/Makefile") CONFIG_FILES="$CONFIG_FILES src/python/Makefile" ;; "po/Makefile.in") CONFIG_FILES="$CONFIG_FILES po/Makefile.in" ;; *) as_fn_error $? "invalid argument: \`$ac_config_target'" "$LINENO" 5;; esac done # If the user did not use the arguments to specify the items to instantiate, # then the envvar interface is used. Set only those that are not. # We use the long form for the default assignment because of an extremely # bizarre bug on SunOS 4.1.3. if $ac_need_defaults; then test "${CONFIG_FILES+set}" = set || CONFIG_FILES=$config_files test "${CONFIG_HEADERS+set}" = set || CONFIG_HEADERS=$config_headers test "${CONFIG_COMMANDS+set}" = set || CONFIG_COMMANDS=$config_commands fi # Have a temporary directory for convenience. Make it in the build tree # simply because there is no reason against having it here, and in addition, # creating and moving files from /tmp can sometimes cause problems. # Hook for its removal unless debugging. # Note that there is a small window in which the directory will not be cleaned: # after its creation but before its name has been assigned to `$tmp'. $debug || { tmp= ac_tmp= trap 'exit_status=$? : "${ac_tmp:=$tmp}" { test ! -d "$ac_tmp" || rm -fr "$ac_tmp"; } && exit $exit_status ' 0 trap 'as_fn_exit 1' 1 2 13 15 } # Create a (secure) tmp directory for tmp files. { tmp=`(umask 077 && mktemp -d "./confXXXXXX") 2>/dev/null` && test -d "$tmp" } || { tmp=./conf$$-$RANDOM (umask 077 && mkdir "$tmp") } || as_fn_error $? "cannot create a temporary directory in ." "$LINENO" 5 ac_tmp=$tmp # Set up the scripts for CONFIG_FILES section. # No need to generate them if there are no CONFIG_FILES. # This happens for instance with `./config.status config.h'. if test -n "$CONFIG_FILES"; then ac_cr=`echo X | tr X '\015'` # On cygwin, bash can eat \r inside `` if the user requested igncr. # But we know of no other shell where ac_cr would be empty at this # point, so we can use a bashism as a fallback. if test "x$ac_cr" = x; then eval ac_cr=\$\'\\r\' fi ac_cs_awk_cr=`$AWK 'BEGIN { print "a\rb" }' /dev/null` if test "$ac_cs_awk_cr" = "a${ac_cr}b"; then ac_cs_awk_cr='\\r' else ac_cs_awk_cr=$ac_cr fi echo 'BEGIN {' >"$ac_tmp/subs1.awk" && _ACEOF { echo "cat >conf$$subs.awk <<_ACEOF" && echo "$ac_subst_vars" | sed 's/.*/&!$&$ac_delim/' && echo "_ACEOF" } >conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_num=`echo "$ac_subst_vars" | grep -c '^'` ac_delim='%!_!# ' for ac_last_try in false false false false false :; do . ./conf$$subs.sh || as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 ac_delim_n=`sed -n "s/.*$ac_delim\$/X/p" conf$$subs.awk | grep -c X` if test $ac_delim_n = $ac_delim_num; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_STATUS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done rm -f conf$$subs.sh cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 cat >>"\$ac_tmp/subs1.awk" <<\\_ACAWK && _ACEOF sed -n ' h s/^/S["/; s/!.*/"]=/ p g s/^[^!]*!// :repl t repl s/'"$ac_delim"'$// t delim :nl h s/\(.\{148\}\)..*/\1/ t more1 s/["\\]/\\&/g; s/^/"/; s/$/\\n"\\/ p n b repl :more1 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t nl :delim h s/\(.\{148\}\)..*/\1/ t more2 s/["\\]/\\&/g; s/^/"/; s/$/"/ p b :more2 s/["\\]/\\&/g; s/^/"/; s/$/"\\/ p g s/.\{148\}// t delim ' >$CONFIG_STATUS || ac_write_fail=1 rm -f conf$$subs.awk cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 _ACAWK cat >>"\$ac_tmp/subs1.awk" <<_ACAWK && for (key in S) S_is_set[key] = 1 FS = "" } { line = $ 0 nfields = split(line, field, "@") substed = 0 len = length(field[1]) for (i = 2; i < nfields; i++) { key = field[i] keylen = length(key) if (S_is_set[key]) { value = S[key] line = substr(line, 1, len) "" value "" substr(line, len + keylen + 3) len += length(value) + length(field[++i]) substed = 1 } else len += 1 + keylen } print line } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 if sed "s/$ac_cr//" < /dev/null > /dev/null 2>&1; then sed "s/$ac_cr\$//; s/$ac_cr/$ac_cs_awk_cr/g" else cat fi < "$ac_tmp/subs1.awk" > "$ac_tmp/subs.awk" \ || as_fn_error $? "could not setup config files machinery" "$LINENO" 5 _ACEOF # VPATH may cause trouble with some makes, so we remove sole $(srcdir), # ${srcdir} and @srcdir@ entries from VPATH if srcdir is ".", strip leading and # trailing colons and then remove the whole line if VPATH becomes empty # (actually we leave an empty line to preserve line numbers). if test "x$srcdir" = x.; then ac_vpsub='/^[ ]*VPATH[ ]*=[ ]*/{ h s/// s/^/:/ s/[ ]*$/:/ s/:\$(srcdir):/:/g s/:\${srcdir}:/:/g s/:@srcdir@:/:/g s/^:*// s/:*$// x s/\(=[ ]*\).*/\1/ G s/\n// s/^[^=]*=[ ]*$// }' fi cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 fi # test -n "$CONFIG_FILES" # Set up the scripts for CONFIG_HEADERS section. # No need to generate them if there are no CONFIG_HEADERS. # This happens for instance with `./config.status Makefile'. if test -n "$CONFIG_HEADERS"; then cat >"$ac_tmp/defines.awk" <<\_ACAWK || BEGIN { _ACEOF # Transform confdefs.h into an awk script `defines.awk', embedded as # here-document in config.status, that substitutes the proper values into # config.h.in to produce config.h. # Create a delimiter string that does not exist in confdefs.h, to ease # handling of long lines. ac_delim='%!_!# ' for ac_last_try in false false :; do ac_tt=`sed -n "/$ac_delim/p" confdefs.h` if test -z "$ac_tt"; then break elif $ac_last_try; then as_fn_error $? "could not make $CONFIG_HEADERS" "$LINENO" 5 else ac_delim="$ac_delim!$ac_delim _$ac_delim!! " fi done # For the awk script, D is an array of macro values keyed by name, # likewise P contains macro parameters if any. Preserve backslash # newline sequences. ac_word_re=[_$as_cr_Letters][_$as_cr_alnum]* sed -n ' s/.\{148\}/&'"$ac_delim"'/g t rset :rset s/^[ ]*#[ ]*define[ ][ ]*/ / t def d :def s/\\$// t bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3"/p s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2"/p d :bsnl s/["\\]/\\&/g s/^ \('"$ac_word_re"'\)\(([^()]*)\)[ ]*\(.*\)/P["\1"]="\2"\ D["\1"]=" \3\\\\\\n"\\/p t cont s/^ \('"$ac_word_re"'\)[ ]*\(.*\)/D["\1"]=" \2\\\\\\n"\\/p t cont d :cont n s/.\{148\}/&'"$ac_delim"'/g t clear :clear s/\\$// t bsnlc s/["\\]/\\&/g; s/^/"/; s/$/"/p d :bsnlc s/["\\]/\\&/g; s/^/"/; s/$/\\\\\\n"\\/p b cont ' >$CONFIG_STATUS || ac_write_fail=1 cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 for (key in D) D_is_set[key] = 1 FS = "" } /^[\t ]*#[\t ]*(define|undef)[\t ]+$ac_word_re([\t (]|\$)/ { line = \$ 0 split(line, arg, " ") if (arg[1] == "#") { defundef = arg[2] mac1 = arg[3] } else { defundef = substr(arg[1], 2) mac1 = arg[2] } split(mac1, mac2, "(") #) macro = mac2[1] prefix = substr(line, 1, index(line, defundef) - 1) if (D_is_set[macro]) { # Preserve the white space surrounding the "#". print prefix "define", macro P[macro] D[macro] next } else { # Replace #undef with comments. This is necessary, for example, # in the case of _POSIX_SOURCE, which is predefined and required # on some systems where configure will not decide to define it. if (defundef == "undef") { print "/*", prefix defundef, macro, "*/" next } } } { print } _ACAWK _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 as_fn_error $? "could not setup config headers machinery" "$LINENO" 5 fi # test -n "$CONFIG_HEADERS" eval set X " :F $CONFIG_FILES :H $CONFIG_HEADERS :C $CONFIG_COMMANDS" shift for ac_tag do case $ac_tag in :[FHLC]) ac_mode=$ac_tag; continue;; esac case $ac_mode$ac_tag in :[FHL]*:*);; :L* | :C*:*) as_fn_error $? "invalid tag \`$ac_tag'" "$LINENO" 5;; :[FH]-) ac_tag=-:-;; :[FH]*) ac_tag=$ac_tag:$ac_tag.in;; esac ac_save_IFS=$IFS IFS=: set x $ac_tag IFS=$ac_save_IFS shift ac_file=$1 shift case $ac_mode in :L) ac_source=$1;; :[FH]) ac_file_inputs= for ac_f do case $ac_f in -) ac_f="$ac_tmp/stdin";; *) # Look for the file first in the build tree, then in the source tree # (if the path is not absolute). The absolute path cannot be DOS-style, # because $ac_f cannot contain `:'. test -f "$ac_f" || case $ac_f in [\\/$]*) false;; *) test -f "$srcdir/$ac_f" && ac_f="$srcdir/$ac_f";; esac || as_fn_error 1 "cannot find input file: \`$ac_f'" "$LINENO" 5;; esac case $ac_f in *\'*) ac_f=`$as_echo "$ac_f" | sed "s/'/'\\\\\\\\''/g"`;; esac as_fn_append ac_file_inputs " '$ac_f'" done # Let's still pretend it is `configure' which instantiates (i.e., don't # use $as_me), people would be surprised to read: # /* config.h. Generated by config.status. */ configure_input='Generated from '` $as_echo "$*" | sed 's|^[^:]*/||;s|:[^:]*/|, |g' `' by configure.' if test x"$ac_file" != x-; then configure_input="$ac_file. $configure_input" { $as_echo "$as_me:${as_lineno-$LINENO}: creating $ac_file" >&5 $as_echo "$as_me: creating $ac_file" >&6;} fi # Neutralize special characters interpreted by sed in replacement strings. case $configure_input in #( *\&* | *\|* | *\\* ) ac_sed_conf_input=`$as_echo "$configure_input" | sed 's/[\\\\&|]/\\\\&/g'`;; #( *) ac_sed_conf_input=$configure_input;; esac case $ac_tag in *:-:* | *:-) cat >"$ac_tmp/stdin" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; esac ;; esac ac_dir=`$as_dirname -- "$ac_file" || $as_expr X"$ac_file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$ac_file" : 'X\(//\)[^/]' \| \ X"$ac_file" : 'X\(//\)$' \| \ X"$ac_file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$ac_file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir="$ac_dir"; as_fn_mkdir_p ac_builddir=. case "$ac_dir" in .) ac_dir_suffix= ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_dir_suffix=/`$as_echo "$ac_dir" | sed 's|^\.[\\/]||'` # A ".." for each directory in $ac_dir_suffix. ac_top_builddir_sub=`$as_echo "$ac_dir_suffix" | sed 's|/[^\\/]*|/..|g;s|/||'` case $ac_top_builddir_sub in "") ac_top_builddir_sub=. ac_top_build_prefix= ;; *) ac_top_build_prefix=$ac_top_builddir_sub/ ;; esac ;; esac ac_abs_top_builddir=$ac_pwd ac_abs_builddir=$ac_pwd$ac_dir_suffix # for backward compatibility: ac_top_builddir=$ac_top_build_prefix case $srcdir in .) # We are building in place. ac_srcdir=. ac_top_srcdir=$ac_top_builddir_sub ac_abs_top_srcdir=$ac_pwd ;; [\\/]* | ?:[\\/]* ) # Absolute name. ac_srcdir=$srcdir$ac_dir_suffix; ac_top_srcdir=$srcdir ac_abs_top_srcdir=$srcdir ;; *) # Relative name. ac_srcdir=$ac_top_build_prefix$srcdir$ac_dir_suffix ac_top_srcdir=$ac_top_build_prefix$srcdir ac_abs_top_srcdir=$ac_pwd/$srcdir ;; esac ac_abs_srcdir=$ac_abs_top_srcdir$ac_dir_suffix case $ac_mode in :F) # # CONFIG_FILE # case $INSTALL in [\\/$]* | ?:[\\/]* ) ac_INSTALL=$INSTALL ;; *) ac_INSTALL=$ac_top_build_prefix$INSTALL ;; esac ac_MKDIR_P=$MKDIR_P case $MKDIR_P in [\\/$]* | ?:[\\/]* ) ;; */*) ac_MKDIR_P=$ac_top_build_prefix$MKDIR_P ;; esac _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 # If the template does not know about datarootdir, expand it. # FIXME: This hack should be removed a few years after 2.60. ac_datarootdir_hack=; ac_datarootdir_seen= ac_sed_dataroot=' /datarootdir/ { p q } /@datadir@/p /@docdir@/p /@infodir@/p /@localedir@/p /@mandir@/p' case `eval "sed -n \"\$ac_sed_dataroot\" $ac_file_inputs"` in *datarootdir*) ac_datarootdir_seen=yes;; *@datadir@*|*@docdir@*|*@infodir@*|*@localedir@*|*@mandir@*) { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&5 $as_echo "$as_me: WARNING: $ac_file_inputs seems to ignore the --datarootdir setting" >&2;} _ACEOF cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_datarootdir_hack=' s&@datadir@&$datadir&g s&@docdir@&$docdir&g s&@infodir@&$infodir&g s&@localedir@&$localedir&g s&@mandir@&$mandir&g s&\\\${datarootdir}&$datarootdir&g' ;; esac _ACEOF # Neutralize VPATH when `$srcdir' = `.'. # Shell code in configure.ac might set extrasub. # FIXME: do we really want to maintain this feature? cat >>$CONFIG_STATUS <<_ACEOF || ac_write_fail=1 ac_sed_extra="$ac_vpsub $extrasub _ACEOF cat >>$CONFIG_STATUS <<\_ACEOF || ac_write_fail=1 :t /@[a-zA-Z_][a-zA-Z_0-9]*@/!b s|@configure_input@|$ac_sed_conf_input|;t t s&@top_builddir@&$ac_top_builddir_sub&;t t s&@top_build_prefix@&$ac_top_build_prefix&;t t s&@srcdir@&$ac_srcdir&;t t s&@abs_srcdir@&$ac_abs_srcdir&;t t s&@top_srcdir@&$ac_top_srcdir&;t t s&@abs_top_srcdir@&$ac_abs_top_srcdir&;t t s&@builddir@&$ac_builddir&;t t s&@abs_builddir@&$ac_abs_builddir&;t t s&@abs_top_builddir@&$ac_abs_top_builddir&;t t s&@INSTALL@&$ac_INSTALL&;t t s&@MKDIR_P@&$ac_MKDIR_P&;t t $ac_datarootdir_hack " eval sed \"\$ac_sed_extra\" "$ac_file_inputs" | $AWK -f "$ac_tmp/subs.awk" \ >$ac_tmp/out || as_fn_error $? "could not create $ac_file" "$LINENO" 5 test -z "$ac_datarootdir_hack$ac_datarootdir_seen" && { ac_out=`sed -n '/\${datarootdir}/p' "$ac_tmp/out"`; test -n "$ac_out"; } && { ac_out=`sed -n '/^[ ]*datarootdir[ ]*:*=/p' \ "$ac_tmp/out"`; test -z "$ac_out"; } && { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&5 $as_echo "$as_me: WARNING: $ac_file contains a reference to the variable \`datarootdir' which seems to be undefined. Please make sure it is defined" >&2;} rm -f "$ac_tmp/stdin" case $ac_file in -) cat "$ac_tmp/out" && rm -f "$ac_tmp/out";; *) rm -f "$ac_file" && mv "$ac_tmp/out" "$ac_file";; esac \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 ;; :H) # # CONFIG_HEADER # if test x"$ac_file" != x-; then { $as_echo "/* $configure_input */" \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" } >"$ac_tmp/config.h" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 if diff "$ac_file" "$ac_tmp/config.h" >/dev/null 2>&1; then { $as_echo "$as_me:${as_lineno-$LINENO}: $ac_file is unchanged" >&5 $as_echo "$as_me: $ac_file is unchanged" >&6;} else rm -f "$ac_file" mv "$ac_tmp/config.h" "$ac_file" \ || as_fn_error $? "could not create $ac_file" "$LINENO" 5 fi else $as_echo "/* $configure_input */" \ && eval '$AWK -f "$ac_tmp/defines.awk"' "$ac_file_inputs" \ || as_fn_error $? "could not create -" "$LINENO" 5 fi # Compute "$ac_file"'s index in $config_headers. _am_arg="$ac_file" _am_stamp_count=1 for _am_header in $config_headers :; do case $_am_header in $_am_arg | $_am_arg:* ) break ;; * ) _am_stamp_count=`expr $_am_stamp_count + 1` ;; esac done echo "timestamp for $_am_arg" >`$as_dirname -- "$_am_arg" || $as_expr X"$_am_arg" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$_am_arg" : 'X\(//\)[^/]' \| \ X"$_am_arg" : 'X\(//\)$' \| \ X"$_am_arg" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$_am_arg" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'`/stamp-h$_am_stamp_count ;; :C) { $as_echo "$as_me:${as_lineno-$LINENO}: executing $ac_file commands" >&5 $as_echo "$as_me: executing $ac_file commands" >&6;} ;; esac case $ac_file$ac_mode in "po-directories":C) for ac_file in $CONFIG_FILES; do # Support "outfile[:infile[:infile...]]" case "$ac_file" in *:*) ac_file=`echo "$ac_file"|sed 's%:.*%%'` ;; esac # PO directories have a Makefile.in generated from Makefile.in.in. case "$ac_file" in */Makefile.in) # Adjust a relative srcdir. ac_dir=`echo "$ac_file"|sed 's%/[^/][^/]*$%%'` ac_dir_suffix=/`echo "$ac_dir"|sed 's%^\./%%'` ac_dots=`echo "$ac_dir_suffix"|sed 's%/[^/]*%../%g'` # In autoconf-2.13 it is called $ac_given_srcdir. # In autoconf-2.50 it is called $srcdir. test -n "$ac_given_srcdir" || ac_given_srcdir="$srcdir" case "$ac_given_srcdir" in .) top_srcdir=`echo $ac_dots|sed 's%/$%%'` ;; /*) top_srcdir="$ac_given_srcdir" ;; *) top_srcdir="$ac_dots$ac_given_srcdir" ;; esac # Treat a directory as a PO directory if and only if it has a # POTFILES.in file. This allows packages to have multiple PO # directories under different names or in different locations. if test -f "$ac_given_srcdir/$ac_dir/POTFILES.in"; then rm -f "$ac_dir/POTFILES" test -n "$as_me" && echo "$as_me: creating $ac_dir/POTFILES" || echo "creating $ac_dir/POTFILES" gt_tab=`printf '\t'` cat "$ac_given_srcdir/$ac_dir/POTFILES.in" | sed -e "/^#/d" -e "/^[ ${gt_tab}]*\$/d" -e "s,.*, $top_srcdir/& \\\\," | sed -e "\$s/\(.*\) \\\\/\1/" > "$ac_dir/POTFILES" POMAKEFILEDEPS="POTFILES.in" # ALL_LINGUAS, POFILES, UPDATEPOFILES, DUMMYPOFILES, GMOFILES depend # on $ac_dir but don't depend on user-specified configuration # parameters. if test -f "$ac_given_srcdir/$ac_dir/LINGUAS"; then # The LINGUAS file contains the set of available languages. if test -n "$OBSOLETE_ALL_LINGUAS"; then test -n "$as_me" && echo "$as_me: setting ALL_LINGUAS in configure.in is obsolete" || echo "setting ALL_LINGUAS in configure.in is obsolete" fi ALL_LINGUAS_=`sed -e "/^#/d" -e "s/#.*//" "$ac_given_srcdir/$ac_dir/LINGUAS"` # Hide the ALL_LINGUAS assignment from automake < 1.5. eval 'ALL_LINGUAS''=$ALL_LINGUAS_' POMAKEFILEDEPS="$POMAKEFILEDEPS LINGUAS" else # The set of available languages was given in configure.in. # Hide the ALL_LINGUAS assignment from automake < 1.5. eval 'ALL_LINGUAS''=$OBSOLETE_ALL_LINGUAS' fi # Compute POFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).po) # Compute UPDATEPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).po-update) # Compute DUMMYPOFILES # as $(foreach lang, $(ALL_LINGUAS), $(lang).nop) # Compute GMOFILES # as $(foreach lang, $(ALL_LINGUAS), $(srcdir)/$(lang).gmo) case "$ac_given_srcdir" in .) srcdirpre= ;; *) srcdirpre='$(srcdir)/' ;; esac POFILES= UPDATEPOFILES= DUMMYPOFILES= GMOFILES= for lang in $ALL_LINGUAS; do POFILES="$POFILES $srcdirpre$lang.po" UPDATEPOFILES="$UPDATEPOFILES $lang.po-update" DUMMYPOFILES="$DUMMYPOFILES $lang.nop" GMOFILES="$GMOFILES $srcdirpre$lang.gmo" done # CATALOGS depends on both $ac_dir and the user's LINGUAS # environment variable. INST_LINGUAS= if test -n "$ALL_LINGUAS"; then for presentlang in $ALL_LINGUAS; do useit=no if test "%UNSET%" != "$LINGUAS"; then desiredlanguages="$LINGUAS" else desiredlanguages="$ALL_LINGUAS" fi for desiredlang in $desiredlanguages; do # Use the presentlang catalog if desiredlang is # a. equal to presentlang, or # b. a variant of presentlang (because in this case, # presentlang can be used as a fallback for messages # which are not translated in the desiredlang catalog). case "$desiredlang" in "$presentlang"*) useit=yes;; esac done if test $useit = yes; then INST_LINGUAS="$INST_LINGUAS $presentlang" fi done fi CATALOGS= if test -n "$INST_LINGUAS"; then for lang in $INST_LINGUAS; do CATALOGS="$CATALOGS $lang.gmo" done fi test -n "$as_me" && echo "$as_me: creating $ac_dir/Makefile" || echo "creating $ac_dir/Makefile" sed -e "/^POTFILES =/r $ac_dir/POTFILES" -e "/^# Makevars/r $ac_given_srcdir/$ac_dir/Makevars" -e "s|@POFILES@|$POFILES|g" -e "s|@UPDATEPOFILES@|$UPDATEPOFILES|g" -e "s|@DUMMYPOFILES@|$DUMMYPOFILES|g" -e "s|@GMOFILES@|$GMOFILES|g" -e "s|@CATALOGS@|$CATALOGS|g" -e "s|@POMAKEFILEDEPS@|$POMAKEFILEDEPS|g" "$ac_dir/Makefile.in" > "$ac_dir/Makefile" for f in "$ac_given_srcdir/$ac_dir"/Rules-*; do if test -f "$f"; then case "$f" in *.orig | *.bak | *~) ;; *) cat "$f" >> "$ac_dir/Makefile" ;; esac fi done fi ;; esac done ;; "depfiles":C) test x"$AMDEP_TRUE" != x"" || { # Older Autoconf quotes --file arguments for eval, but not when files # are listed without --file. Let's play safe and only enable the eval # if we detect the quoting. case $CONFIG_FILES in *\'*) eval set x "$CONFIG_FILES" ;; *) set x $CONFIG_FILES ;; esac shift for mf do # Strip MF so we end up with the name of the file. mf=`echo "$mf" | sed -e 's/:.*$//'` # Check whether this is an Automake generated Makefile or not. # We used to match only the files named 'Makefile.in', but # some people rename them; so instead we look at the file content. # Grep'ing the first line is not enough: some people post-process # each Makefile.in and add a new line on top of each file to say so. # Grep'ing the whole file is not good either: AIX grep has a line # limit of 2048, but all sed's we know have understand at least 4000. if sed -n 's,^#.*generated by automake.*,X,p' "$mf" | grep X >/dev/null 2>&1; then dirpart=`$as_dirname -- "$mf" || $as_expr X"$mf" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$mf" : 'X\(//\)[^/]' \| \ X"$mf" : 'X\(//\)$' \| \ X"$mf" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$mf" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` else continue fi # Extract the definition of DEPDIR, am__include, and am__quote # from the Makefile without running 'make'. DEPDIR=`sed -n 's/^DEPDIR = //p' < "$mf"` test -z "$DEPDIR" && continue am__include=`sed -n 's/^am__include = //p' < "$mf"` test -z "$am__include" && continue am__quote=`sed -n 's/^am__quote = //p' < "$mf"` # Find all dependency output files, they are included files with # $(DEPDIR) in their names. We invoke sed twice because it is the # simplest approach to changing $(DEPDIR) to its actual value in the # expansion. for file in `sed -n " s/^$am__include $am__quote\(.*(DEPDIR).*\)$am__quote"'$/\1/p' <"$mf" | \ sed -e 's/\$(DEPDIR)/'"$DEPDIR"'/g'`; do # Make sure the directory exists. test -f "$dirpart/$file" && continue fdir=`$as_dirname -- "$file" || $as_expr X"$file" : 'X\(.*[^/]\)//*[^/][^/]*/*$' \| \ X"$file" : 'X\(//\)[^/]' \| \ X"$file" : 'X\(//\)$' \| \ X"$file" : 'X\(/\)' \| . 2>/dev/null || $as_echo X"$file" | sed '/^X\(.*[^/]\)\/\/*[^/][^/]*\/*$/{ s//\1/ q } /^X\(\/\/\)[^/].*/{ s//\1/ q } /^X\(\/\/\)$/{ s//\1/ q } /^X\(\/\).*/{ s//\1/ q } s/.*/./; q'` as_dir=$dirpart/$fdir; as_fn_mkdir_p # echo "creating $dirpart/$file" echo '# dummy' > "$dirpart/$file" done done } ;; "libtool":C) # See if we are running on zsh, and set the options that allow our # commands through without removal of \ escapes. if test -n "${ZSH_VERSION+set}"; then setopt NO_GLOB_SUBST fi cfgfile=${ofile}T trap "$RM \"$cfgfile\"; exit 1" 1 2 15 $RM "$cfgfile" cat <<_LT_EOF >> "$cfgfile" #! $SHELL # Generated automatically by $as_me ($PACKAGE) $VERSION # NOTE: Changes made to this file will be lost: look at ltmain.sh. # Provide generalized library-building support services. # Written by Gordon Matzigkeit, 1996 # Copyright (C) 2014 Free Software Foundation, Inc. # This is free software; see the source for copying conditions. There is NO # warranty; not even for MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. # GNU Libtool is free software; you can redistribute it and/or modify # it under the terms of the GNU General Public License as published by # the Free Software Foundation; either version 2 of of the License, or # (at your option) any later version. # # As a special exception to the GNU General Public License, if you # distribute this file as part of a program or library that is built # using GNU Libtool, you may include this file under the same # distribution terms that you use for the rest of that program. # # GNU Libtool is distributed in the hope that it will be useful, but # WITHOUT ANY WARRANTY; without even the implied warranty of # MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the # GNU General Public License for more details. # # You should have received a copy of the GNU General Public License # along with this program. If not, see . # The names of the tagged configurations supported by this script. available_tags='CXX ' # Configured defaults for sys_lib_dlsearch_path munging. : \${LT_SYS_LIBRARY_PATH="$configure_time_lt_sys_library_path"} # ### BEGIN LIBTOOL CONFIG # Which release of libtool.m4 was used? macro_version=$macro_version macro_revision=$macro_revision # Whether or not to build shared libraries. build_libtool_libs=$enable_shared # Whether or not to build static libraries. build_old_libs=$enable_static # What type of objects to build. pic_mode=$pic_mode # Whether or not to optimize for fast installation. fast_install=$enable_fast_install # Shared archive member basename,for filename based shared library versioning on AIX. shared_archive_member_spec=$shared_archive_member_spec # Shell to use when invoking shell scripts. SHELL=$lt_SHELL # An echo program that protects backslashes. ECHO=$lt_ECHO # The PATH separator for the build system. PATH_SEPARATOR=$lt_PATH_SEPARATOR # The host system. host_alias=$host_alias host=$host host_os=$host_os # The build system. build_alias=$build_alias build=$build build_os=$build_os # A sed program that does not truncate output. SED=$lt_SED # Sed that helps us avoid accidentally triggering echo(1) options like -n. Xsed="\$SED -e 1s/^X//" # A grep program that handles long lines. GREP=$lt_GREP # An ERE matcher. EGREP=$lt_EGREP # A literal string matcher. FGREP=$lt_FGREP # A BSD- or MS-compatible name lister. NM=$lt_NM # Whether we need soft or hard links. LN_S=$lt_LN_S # What is the maximum length of a command? max_cmd_len=$max_cmd_len # Object file suffix (normally "o"). objext=$ac_objext # Executable file suffix (normally ""). exeext=$exeext # whether the shell understands "unset". lt_unset=$lt_unset # turn spaces into newlines. SP2NL=$lt_lt_SP2NL # turn newlines into spaces. NL2SP=$lt_lt_NL2SP # convert \$build file names to \$host format. to_host_file_cmd=$lt_cv_to_host_file_cmd # convert \$build files to toolchain format. to_tool_file_cmd=$lt_cv_to_tool_file_cmd # An object symbol dumper. OBJDUMP=$lt_OBJDUMP # Method to check whether dependent libraries are shared objects. deplibs_check_method=$lt_deplibs_check_method # Command to use when deplibs_check_method = "file_magic". file_magic_cmd=$lt_file_magic_cmd # How to find potential files when deplibs_check_method = "file_magic". file_magic_glob=$lt_file_magic_glob # Find potential files using nocaseglob when deplibs_check_method = "file_magic". want_nocaseglob=$lt_want_nocaseglob # DLL creation program. DLLTOOL=$lt_DLLTOOL # Command to associate shared and link libraries. sharedlib_from_linklib_cmd=$lt_sharedlib_from_linklib_cmd # The archiver. AR=$lt_AR # Flags to create an archive. AR_FLAGS=$lt_AR_FLAGS # How to feed a file listing to the archiver. archiver_list_spec=$lt_archiver_list_spec # A symbol stripping program. STRIP=$lt_STRIP # Commands used to install an old-style archive. RANLIB=$lt_RANLIB old_postinstall_cmds=$lt_old_postinstall_cmds old_postuninstall_cmds=$lt_old_postuninstall_cmds # Whether to use a lock for old archive extraction. lock_old_archive_extraction=$lock_old_archive_extraction # A C compiler. LTCC=$lt_CC # LTCC compiler flags. LTCFLAGS=$lt_CFLAGS # Take the output of nm and produce a listing of raw symbols and C names. global_symbol_pipe=$lt_lt_cv_sys_global_symbol_pipe # Transform the output of nm in a proper C declaration. global_symbol_to_cdecl=$lt_lt_cv_sys_global_symbol_to_cdecl # Transform the output of nm into a list of symbols to manually relocate. global_symbol_to_import=$lt_lt_cv_sys_global_symbol_to_import # Transform the output of nm in a C name address pair. global_symbol_to_c_name_address=$lt_lt_cv_sys_global_symbol_to_c_name_address # Transform the output of nm in a C name address pair when lib prefix is needed. global_symbol_to_c_name_address_lib_prefix=$lt_lt_cv_sys_global_symbol_to_c_name_address_lib_prefix # The name lister interface. nm_interface=$lt_lt_cv_nm_interface # Specify filename containing input files for \$NM. nm_file_list_spec=$lt_nm_file_list_spec # The root where to search for dependent libraries,and where our libraries should be installed. lt_sysroot=$lt_sysroot # Command to truncate a binary pipe. lt_truncate_bin=$lt_lt_cv_truncate_bin # The name of the directory that contains temporary libtool files. objdir=$objdir # Used to examine libraries when file_magic_cmd begins with "file". MAGIC_CMD=$MAGIC_CMD # Must we lock files when doing compilation? need_locks=$lt_need_locks # Manifest tool. MANIFEST_TOOL=$lt_MANIFEST_TOOL # Tool to manipulate archived DWARF debug symbol files on Mac OS X. DSYMUTIL=$lt_DSYMUTIL # Tool to change global to local symbols on Mac OS X. NMEDIT=$lt_NMEDIT # Tool to manipulate fat objects and archives on Mac OS X. LIPO=$lt_LIPO # ldd/readelf like tool for Mach-O binaries on Mac OS X. OTOOL=$lt_OTOOL # ldd/readelf like tool for 64 bit Mach-O binaries on Mac OS X 10.4. OTOOL64=$lt_OTOOL64 # Old archive suffix (normally "a"). libext=$libext # Shared library suffix (normally ".so"). shrext_cmds=$lt_shrext_cmds # The commands to extract the exported symbol list from a shared archive. extract_expsyms_cmds=$lt_extract_expsyms_cmds # Variables whose values should be saved in libtool wrapper scripts and # restored at link time. variables_saved_for_relink=$lt_variables_saved_for_relink # Do we need the "lib" prefix for modules? need_lib_prefix=$need_lib_prefix # Do we need a version for libraries? need_version=$need_version # Library versioning type. version_type=$version_type # Shared library runtime path variable. runpath_var=$runpath_var # Shared library path variable. shlibpath_var=$shlibpath_var # Is shlibpath searched before the hard-coded library search path? shlibpath_overrides_runpath=$shlibpath_overrides_runpath # Format of library name prefix. libname_spec=$lt_libname_spec # List of archive names. First name is the real one, the rest are links. # The last name is the one that the linker finds with -lNAME library_names_spec=$lt_library_names_spec # The coded name of the library, if different from the real name. soname_spec=$lt_soname_spec # Permission mode override for installation of shared libraries. install_override_mode=$lt_install_override_mode # Command to use after installation of a shared archive. postinstall_cmds=$lt_postinstall_cmds # Command to use after uninstallation of a shared archive. postuninstall_cmds=$lt_postuninstall_cmds # Commands used to finish a libtool library installation in a directory. finish_cmds=$lt_finish_cmds # As "finish_cmds", except a single script fragment to be evaled but # not shown. finish_eval=$lt_finish_eval # Whether we should hardcode library paths into libraries. hardcode_into_libs=$hardcode_into_libs # Compile-time system search path for libraries. sys_lib_search_path_spec=$lt_sys_lib_search_path_spec # Detected run-time system search path for libraries. sys_lib_dlsearch_path_spec=$lt_configure_time_dlsearch_path # Explicit LT_SYS_LIBRARY_PATH set during ./configure time. configure_time_lt_sys_library_path=$lt_configure_time_lt_sys_library_path # Whether dlopen is supported. dlopen_support=$enable_dlopen # Whether dlopen of programs is supported. dlopen_self=$enable_dlopen_self # Whether dlopen of statically linked programs is supported. dlopen_self_static=$enable_dlopen_self_static # Commands to strip libraries. old_striplib=$lt_old_striplib striplib=$lt_striplib # The linker used to build libraries. LD=$lt_LD # How to create reloadable object files. reload_flag=$lt_reload_flag reload_cmds=$lt_reload_cmds # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds # A language specific compiler. CC=$lt_compiler # Is the compiler the GNU compiler? with_gcc=$GCC # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds archive_expsym_cmds=$lt_archive_expsym_cmds # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds module_expsym_cmds=$lt_module_expsym_cmds # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \$shlibpath_var if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms # Symbols that must always be exported. include_expsyms=$lt_include_expsyms # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds # Commands necessary for finishing linking programs. postlink_cmds=$lt_postlink_cmds # Specify filename containing input files. file_list_spec=$lt_file_list_spec # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects postdep_objects=$lt_postdep_objects predeps=$lt_predeps postdeps=$lt_postdeps # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path # ### END LIBTOOL CONFIG _LT_EOF cat <<'_LT_EOF' >> "$cfgfile" # ### BEGIN FUNCTIONS SHARED WITH CONFIGURE # func_munge_path_list VARIABLE PATH # ----------------------------------- # VARIABLE is name of variable containing _space_ separated list of # directories to be munged by the contents of PATH, which is string # having a format: # "DIR[:DIR]:" # string "DIR[ DIR]" will be prepended to VARIABLE # ":DIR[:DIR]" # string "DIR[ DIR]" will be appended to VARIABLE # "DIRP[:DIRP]::[DIRA:]DIRA" # string "DIRP[ DIRP]" will be prepended to VARIABLE and string # "DIRA[ DIRA]" will be appended to VARIABLE # "DIR[:DIR]" # VARIABLE will be replaced by "DIR[ DIR]" func_munge_path_list () { case x$2 in x) ;; *:) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'` \$$1\" ;; x:*) eval $1=\"\$$1 `$ECHO $2 | $SED 's/:/ /g'`\" ;; *::*) eval $1=\"\$$1\ `$ECHO $2 | $SED -e 's/.*:://' -e 's/:/ /g'`\" eval $1=\"`$ECHO $2 | $SED -e 's/::.*//' -e 's/:/ /g'`\ \$$1\" ;; *) eval $1=\"`$ECHO $2 | $SED 's/:/ /g'`\" ;; esac } # Calculate cc_basename. Skip known compiler wrappers and cross-prefix. func_cc_basename () { for cc_temp in $*""; do case $cc_temp in compile | *[\\/]compile | ccache | *[\\/]ccache ) ;; distcc | *[\\/]distcc | purify | *[\\/]purify ) ;; \-*) ;; *) break;; esac done func_cc_basename_result=`$ECHO "$cc_temp" | $SED "s%.*/%%; s%^$host_alias-%%"` } # ### END FUNCTIONS SHARED WITH CONFIGURE _LT_EOF case $host_os in aix3*) cat <<\_LT_EOF >> "$cfgfile" # AIX sometimes has problems with the GCC collect2 program. For some # reason, if we set the COLLECT_NAMES environment variable, the problems # vanish in a puff of smoke. if test set != "${COLLECT_NAMES+set}"; then COLLECT_NAMES= export COLLECT_NAMES fi _LT_EOF ;; esac ltmain=$ac_aux_dir/ltmain.sh # We use sed instead of cat because bash on DJGPP gets confused if # if finds mixed CR/LF and LF-only lines. Since sed operates in # text mode, it properly converts lines to CR/LF. This bash problem # is reportedly fixed, but why not run on old versions too? sed '$q' "$ltmain" >> "$cfgfile" \ || (rm -f "$cfgfile"; exit 1) mv -f "$cfgfile" "$ofile" || (rm -f "$ofile" && cp "$cfgfile" "$ofile" && rm -f "$cfgfile") chmod +x "$ofile" cat <<_LT_EOF >> "$ofile" # ### BEGIN LIBTOOL TAG CONFIG: CXX # The linker used to build libraries. LD=$lt_LD_CXX # How to create reloadable object files. reload_flag=$lt_reload_flag_CXX reload_cmds=$lt_reload_cmds_CXX # Commands used to build an old-style archive. old_archive_cmds=$lt_old_archive_cmds_CXX # A language specific compiler. CC=$lt_compiler_CXX # Is the compiler the GNU compiler? with_gcc=$GCC_CXX # Compiler flag to turn off builtin functions. no_builtin_flag=$lt_lt_prog_compiler_no_builtin_flag_CXX # Additional compiler flags for building library objects. pic_flag=$lt_lt_prog_compiler_pic_CXX # How to pass a linker flag through the compiler. wl=$lt_lt_prog_compiler_wl_CXX # Compiler flag to prevent dynamic linking. link_static_flag=$lt_lt_prog_compiler_static_CXX # Does compiler simultaneously support -c and -o options? compiler_c_o=$lt_lt_cv_prog_compiler_c_o_CXX # Whether or not to add -lc for building shared libraries. build_libtool_need_lc=$archive_cmds_need_lc_CXX # Whether or not to disallow shared libs when runtime libs are static. allow_libtool_libs_with_static_runtimes=$enable_shared_with_static_runtimes_CXX # Compiler flag to allow reflexive dlopens. export_dynamic_flag_spec=$lt_export_dynamic_flag_spec_CXX # Compiler flag to generate shared objects directly from archives. whole_archive_flag_spec=$lt_whole_archive_flag_spec_CXX # Whether the compiler copes with passing no objects directly. compiler_needs_object=$lt_compiler_needs_object_CXX # Create an old-style archive from a shared archive. old_archive_from_new_cmds=$lt_old_archive_from_new_cmds_CXX # Create a temporary old-style archive to link instead of a shared archive. old_archive_from_expsyms_cmds=$lt_old_archive_from_expsyms_cmds_CXX # Commands used to build a shared archive. archive_cmds=$lt_archive_cmds_CXX archive_expsym_cmds=$lt_archive_expsym_cmds_CXX # Commands used to build a loadable module if different from building # a shared archive. module_cmds=$lt_module_cmds_CXX module_expsym_cmds=$lt_module_expsym_cmds_CXX # Whether we are building with GNU ld or not. with_gnu_ld=$lt_with_gnu_ld_CXX # Flag that allows shared libraries with undefined symbols to be built. allow_undefined_flag=$lt_allow_undefined_flag_CXX # Flag that enforces no undefined symbols. no_undefined_flag=$lt_no_undefined_flag_CXX # Flag to hardcode \$libdir into a binary during linking. # This must work even if \$libdir does not exist hardcode_libdir_flag_spec=$lt_hardcode_libdir_flag_spec_CXX # Whether we need a single "-rpath" flag with a separated argument. hardcode_libdir_separator=$lt_hardcode_libdir_separator_CXX # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary. hardcode_direct=$hardcode_direct_CXX # Set to "yes" if using DIR/libNAME\$shared_ext during linking hardcodes # DIR into the resulting binary and the resulting library dependency is # "absolute",i.e impossible to change by setting \$shlibpath_var if the # library is relocated. hardcode_direct_absolute=$hardcode_direct_absolute_CXX # Set to "yes" if using the -LDIR flag during linking hardcodes DIR # into the resulting binary. hardcode_minus_L=$hardcode_minus_L_CXX # Set to "yes" if using SHLIBPATH_VAR=DIR during linking hardcodes DIR # into the resulting binary. hardcode_shlibpath_var=$hardcode_shlibpath_var_CXX # Set to "yes" if building a shared library automatically hardcodes DIR # into the library and all subsequent libraries and executables linked # against it. hardcode_automatic=$hardcode_automatic_CXX # Set to yes if linker adds runtime paths of dependent libraries # to runtime path list. inherit_rpath=$inherit_rpath_CXX # Whether libtool must link a program against all its dependency libraries. link_all_deplibs=$link_all_deplibs_CXX # Set to "yes" if exported symbols are required. always_export_symbols=$always_export_symbols_CXX # The commands to list exported symbols. export_symbols_cmds=$lt_export_symbols_cmds_CXX # Symbols that should not be listed in the preloaded symbols. exclude_expsyms=$lt_exclude_expsyms_CXX # Symbols that must always be exported. include_expsyms=$lt_include_expsyms_CXX # Commands necessary for linking programs (against libraries) with templates. prelink_cmds=$lt_prelink_cmds_CXX # Commands necessary for finishing linking programs. postlink_cmds=$lt_postlink_cmds_CXX # Specify filename containing input files. file_list_spec=$lt_file_list_spec_CXX # How to hardcode a shared library path into an executable. hardcode_action=$hardcode_action_CXX # The directories searched by this compiler when creating a shared library. compiler_lib_search_dirs=$lt_compiler_lib_search_dirs_CXX # Dependencies to place before and after the objects being linked to # create a shared library. predep_objects=$lt_predep_objects_CXX postdep_objects=$lt_postdep_objects_CXX predeps=$lt_predeps_CXX postdeps=$lt_postdeps_CXX # The library search path used internally by the compiler when linking # a shared library. compiler_lib_search_path=$lt_compiler_lib_search_path_CXX # ### END LIBTOOL TAG CONFIG: CXX _LT_EOF ;; "hack-libtool":C) sed 's,^pic_flag=,pic_flag=" -D__DYNAMIC__ ",' libtool > libtoolT \ && mv -f libtoolT libtool && chmod 755 libtool ;; esac done # for ac_tag as_fn_exit 0 _ACEOF ac_clean_files=$ac_clean_files_save test $ac_write_fail = 0 || as_fn_error $? "write failure creating $CONFIG_STATUS" "$LINENO" 5 # configure is writing to config.log, and then calls config.status. # config.status does its own redirection, appending to config.log. # Unfortunately, on DOS this fails, as config.log is still kept open # by configure, so config.status won't be able to write to it; its # output is simply discarded. So we exec the FD to /dev/null, # effectively closing config.log, so it can be properly (re)opened and # appended to by config.status. When coming back to configure, we # need to make the FD available again. if test "$no_create" != yes; then ac_cs_success=: ac_config_status_args= test "$silent" = yes && ac_config_status_args="$ac_config_status_args --quiet" exec 5>/dev/null $SHELL $CONFIG_STATUS $ac_config_status_args || ac_cs_success=false exec 5>>config.log # Use ||, not &&, to avoid exiting from the if with $? = 1, which # would make configure fail if this is the last instruction. $ac_cs_success || as_fn_exit 1 fi if test -n "$ac_unrecognized_opts" && test "$enable_option_checking" != no; then { $as_echo "$as_me:${as_lineno-$LINENO}: WARNING: unrecognized options: $ac_unrecognized_opts" >&5 $as_echo "$as_me: WARNING: unrecognized options: $ac_unrecognized_opts" >&2;} fi echo "" echo "--" echo "dar and libdar have been successfully configured with the following parameters:" echo "" echo " LIBDAR parameters:" printf " Zlib compression (gzip) : " if [ "$local_libz" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Libbz2 compression (bzip2) : " if [ "$local_libbz2" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Liblzo2 compression (lzo) : " if [ "$local_liblzo2" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Liblxz compression (xz) : " if [ "$local_libxz" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Strong encryption support : " if [ "$local_crypto" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Public key cipher support : " if [ "$local_gpgme" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Extended Attributes support: " if [ "$local_ea_support" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Large files support (> 2GB): " if [ ! -z "$ac_cv_sys_file_offset_bits" -o ! -z "$ac_cv_sys_large_files" ] ; then echo "YES" else echo "NO" fi printf " extX FSA / nodump support : " if [ "$local_nodump_feature" = "yes" ] ; then echo "YES" else echo "NO" fi printf " HFS+ FSA support : " if [ "$local_birthtime" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Integer size used : " if [ "$build_mode" = "infinint" ] ; then echo "infinint" else if [ -z "$build_mode" ] ; then build_mode=64 fi echo "$build_mode" fi printf " Thread safe support : " if [ "$local_mutex_works" = "yes" -a -z "$local_test_memory" -a "$local_missing_reentrant_libcal" != "yes" ] ; then echo "YES" else echo "NO" fi printf " Furtive read mode : " if [ "$local_furtive_read_mode" = "yes" ]; then echo "YES" else echo "NO" fi printf " Large directory optim. : " if [ "$local_fast_dir" = "yes" ] ; then echo "YES" else echo "NO" fi printf " posix fadvise support : " if [ "$local_posix_fadvise" = "yes" ] ; then echo "YES" else echo "NO" fi printf " microsecond read accuracy : " if [ "$local_microsecond_read" = "yes" ] ; then echo "YES" else echo "NO" fi printf " microsecond write accuracy : " if [ "$local_microsecond_write" = "yes" ] ; then echo "YES" else echo "NO" fi printf " can restore symlink dates : " if [ "$local_lutimes" = "yes" ] ; then echo "YES" else echo "NO" fi printf " can uses multiple threads : " if [ "$local_threadar" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Delta-compression support : " if [ "$local_librsync" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Remote repository support : " if [ "$local_libcurl" = "yes" -a "$local_threadar" = "yes" ] ; then echo "YES" else echo "NO" fi echo "" echo " DAR SUITE command line programs:" printf " Long options available : " if [ "$local_have_getopt_long" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Building examples : " if [ "$examples" = "yes" ] ; then echo "YES" else echo "NO" fi printf " Building dar_static : " if [ "$build_static" = "yes" ]; then if [ "$static_pb" = "yes" ]; then echo "NO (system does not support static linking, see note below)" else echo "YES" fi else echo "NO" fi printf " using upx at install : " if [ "$upx" = "yes" ] ; then echo "YES" else echo "NO" fi printf " building documentation : " if [ "$doxygen" = "yes" ] ; then echo "YES" else echo "NO" fi printf " building python binding: " if [ "$local_python" = "yes" ] ; then echo "YES" else echo "NO" fi if [ "$static_pb" = "yes" -a "$build_static" = "yes" ] ; then echo "" echo " Note:" echo "" echo " If you want to know which libraries are not available as static" echo " libraries check the logs in the config.log generated file. the command" echo "" echo " 'grep -e -static -A 2 config.log'" echo "" echo " should bring you to the essentials." echo "" echo "You also might want to speed up the compilation process running ./configure" echo "with the --disable-static option" echo "" fi if [ -z "$build_mode" ] ; then echo "" echo "--------------------- N O T E -------------------------------------------" echo "Note: You are about to build a libdar/dar binary relying on \"infinint\"" echo "integer type. You shall also consider using 64 bits integers (whatever" echo "your CPU has 32 or 64 bits registers) for better performances and reduced" echo "memory requirements, at the cost of the limitations explained here:" echo " ./doc/Limitations.html (Paragraph about Integers)" echo "Document which is also available online at:" echo " http://dar.linux.free.fr/doc/Limitations.html#Integers" echo "Unless you are impacted by these limitations, you can rerun ./configure" echo "adding the option --enable-mode=64 for dar/libdar better performances" echo "-------------------------------------------------------------------------" fi if [ "$libgcrypt_hash_bug" = "yes" ] ; then echo "" echo "" echo "#################### W A R N I N G ######################################" echo "" echo "libgcrypt version is lower than $min_version_gcrypt_hash_bug and has a bug" echo "concerning hash calculation for large files. Expect sha1 and md5 hash" echo "results for slices larger than 256 Gio (gibioctet) to be incorrect." echo "" echo "#################### W A R N I N G ######################################" fi echo "" dar-2.6.8/ChangeLog0000444000175000017520000027225013617552305011007 00000000000000from 2.6.7 to 2.6.8 - fixing bug leading binary delta failed to be read from an archive in some quite rare condition. - fixed bug that was not listing file with delta path when filtering out unsaved inodes - updated source package for the python binding tutorial document be installed with the rest of the documentation - adding date_past_N_days helper script to backup only files later than "today minus N days" - incorporated the "args" support built script in dar source package from 2.6.6 to 2.6.7 - fixing shell_interaction_emulator class declaration to avoid compilation errors and warning under MacOS - fixed bug: dar failed creating an archive on its standard output reporting the error message "Skipping backward is not possible on a pipe" - new feature: added python binding to libdar! from 2.6.5 to 2.6.6 - fixing script that builds windows binary packages to include missing cygwin libraries - fixing bug: dar_manager batch command (-@ option) contains a inverted test in a sanity check, leading the execution to systematically abort reporting an internal error message. - fixed message error type when asymmetrical encryption is requested and gpgme has not been activated at compilation time - fixed dar/libdar behavior when gpg binary is not available and gpgme has been activated at compilation time. Instead of aborting, dar now signal the gpgme error and proposes to retry initialization without gpgme support. This makes sense for dar_static binary which stays usable in that context when all options have been activated from 2.6.4 to 2.6.5 - fixed bug: dar crashed when the HOME environment variable was not defined (for example running dar from crontab) - removed useless skip() in cache layer - cache layer improvement, flushing write pending data before asking lower layer about skippability - fixed bug met when several consecutive compressed files were asked to be compressed and failed getting reduced in size by libdar. In that situation as expected, libdar tries to skip backward and stores the file uncompressed. However the cache layer was introducing an offset of a few bytes leading the next file to be written over the end of the previous one, which dar reported as data corruption when testing the archive. - updating licensing information with the new address of the FSF - clarifying message about possibly truncated filename returned by the operating system from 2.6.3 to 2.6.4 - fixed display bug indicating delta signatures were about to be calculated even when this was not the case. - updating dar man page about the fact aes256 replaced blowfish as the default strong encryption algorithm - bug fix: -D option used at creation time was not adding escape mark of skipped directories. This lead the empty directories that would replace each skipped one to be inaccessible and unable to be restored only in sequential read mode (it worked as expected in direct mode) from 2.6.2 to 2.6.3 - feature enhancement: added option to specify the block size used to create delta signatures. - feature enhancement: added the ability to provide login for sftp/ftp remote access, that contain @ and other special characters. - fixed bug in dar_xform, leading dar not finding source archive if destination was not placed in the same directory as source from 2.6.1 to 2.6.2 - fixed incoherence in documentation - updating in-lined help information (-h option) - fixed unexpected behavior of the dar command-line filtering mechanism met when the provided path to -P or -g options was ending with a slash - renaming 'path operator + (std::string)' as method append() to avoid compiler using it when a std::string need first to be converted to path before adding it to an existing path. - adding check test to detect when path::append() is used to add a path instead of a filename to an existing path object. - adding a warning when restoring a Unix socket if the path to that socket is larger than what the sockaddr_un system structure can handle - fixing bug due to Linux system removing file capabilities (stored as EA) when file ownership is changed. Though restoring EA after ownership may lead to the impossibility to restore them due to lack of permission when dar/libdar is not run as root. Thus we try restoring EA a second time after ownership restoration. This is not efficient but restores the file as close as possible to their original state whatever permission dar has been granted for a restoration operation. from 2.6.0 to 2.6.1 - fixed error in man page - fixing bug in the routine removing files for local filesystem, used at archive creation time to remove an existing archive (after user confirmation), or at restoration time used to remove file that had been removed since the archive of reference was done. The file to remove was always removed from the current directory (missing the path part), most of the time this was leading to the error message "Error removing file ...: Not such file or directory". It could also lead to incorrectly removing files (not directory) located in the directory from which dar was run. - fixing bug met while repairing an archive containing delta signature for unsaved files - merging patch from ballsystemlord updating list of file extension not to compress (see compress-exclusion defined in /etc/darrc) - review cat_delta_signature implementation in order to be able to fix memory consumption problem when delta signature are used - fixed missing mark for data CRC when the data is a delta patch, leading sequential reading to fail when a delta patch was encountered - fixed bug in XML output about deleted entries - fixed XML output to be identical to the one of dar 2.5.x for deleted entries. - Adding the deleted date in 'mtime' field for deleted entries in XML output - fixing bug in xz/lzma routine wrongly reporting internal error when corrupted data was met - fixed code for compilation with clang to succeed (it concerns MAC OS X in particular) - fixed inconsistencies in libdar API that avoided gdar to compile with libdar released in 2.6.0 from 2.5.x to 2.6.0 - new feature: support for binary delta in incremental/differential backups (relying on librsync) - new feature: support ftp/sftp to read an archive from a cloud storage. (relying on libcurl) reading is optimized to not transfer a whole slice but only the needed part to proceed to the operation (restoration, listing, and so on) - new feature: support ftp/sftp to write an archive eventually with hash files to a remote cloud storage (relying on libcurl) - modified behavior: While creating a single sliced archive, DUC file is now executed unless user interrupted dar/libdar. This to stay coherent with multi sliced archive behavior - new feature: display filters nature (-vmasks option) - new feature: follow some symlinks as defined by the --ignored-as-symlink option - new feature: one can define the compression algorithm a dar_manager database will use. This choice is only available at database creation using the new dar_manager's -z option. In particular "-z none" can be used to avoid using compression at all - repair mode added to re-create a completed archive (suitable for direct access mode and merging) from an interrupted one due to lack of disk space, power outage or other reasons leading to similar problem. - Dar can now only save metadata inode change without re-saving the whole file if its data has not changed. Dar_manager also handle this by restoring the full backup and then the inode metadata only when necessary. - In regard to previous point, if you want to keep having dar saving the data when only metadata has changed use --modified-data-detection option - moved dar_slave code into libdar as class libdar::libdar_slave - moved dar_xform code into libdar as class libdar::libdar_xform - added libdar_slave and libdar_xform in libdar API - modified dar_xform and dar_slave to rely on new libdar API - API: simplified user_interface class - API: using std::shared_ptr and std::unique_ptr to explicitly show the ownership of the given pointed objects (C++11 standard) - API: simplified class archive to only require user_interaction at object construction time - API: simplified class database to only require user_interaction at object construction time - API: making enum crypto_algo an C++11 "enum class" type - security refresh: default crypto algo is now AES256. As you do not need anymore since 2.5.0 to specify the -K option when reading an archive this should not bring any backward compatibility issue - security refresh: adding salt per archive (one is still present per block inside an archive) - security refresh/new feature: adding option --kdf-param to define the iteration count for key derivation, which now defaults to 200,000 and hash algorithm used to derived key, still using sha1 by default - slide effect of previous feature due to starvation of free letters to add a new command, the -T option with argument is no more available, one need to provide explicitly the desired argument - security refresh: improving seed randomization for the pseudo-random generator used in elastic buffers - feature enhancement: activate needed Linux capabilities in the "effective" set if it is permitted but not effective. This concerns cap_chown at restoration time, cap_fchown for furtive read mode, cap_linux_immutable to restore the immutable flag, and cap_sys_ resource to set some linux FSA. This let one set the capabilities for dar binary only in the "permitted" set, capabilities will then be allowed only for users having them in the "inheritable" set of their calling process (usually a shell), without root privilege need. - the ./configure --enable-mode option now defaults to 64, which will setup a libdar64 in place of infinint based libdar by default. You can still build a infinint based libdar by passing --enable-mode=infinint to the ./configure script. from 2.5.21 to 2.5.22 - removed useless skip() in cache layer - cache layer improvement, flushing write pending data before asking lower layer about skippability - fixed bug met when several consecutive compressed files were asked to be compressed and failed getting reduced in size by libdar. In that situation as expected, libdar tries to skip backward and stores the file uncompressed. However the cache layer was introducing an offset of a few bytes leading the next file to be written over the end of the previous one, which dar reported as data corruption when testing the archive. - updating licensing information with the new address of the FSF - fixing bug met when restoring file having FSA but EA and overwriting an existing file in filesystem - clarifying message about possibly truncated filename returned by the operating system from 2.5.20 to 2.5.21 - bug fix: -D option used at creation time was not adding escape mark of skipped directories. This lead the empty directories that would replace each skipped one to be inaccessible and unable to be restored only in sequential read mode (it worked as expected in direct mode) from 2.5.19 to 2.5.20 - adding a warning when restoring a unix socket if the path to that socket is larger than what the sockaddr_un system structure can handle - fixing bug due to Linux system removing file capabilities (stored as EA) when file ownership is changed. Though restoring EA after ownership may lead to the impossibility to restore them due to lack of permission when dar/libdar is not run as root. Thus we try restoring EA a second time after ownership restoration. This is not efficient but restores the file as close as possible to their original state whatever permission dar has been granted for a restoration operation. - fixing compilation problem with recent clang++ compiler from 2.5.18 to 2.5.19 - fixed compilation issue on system that to not have ENOATTR defined - fixed compilation warning about deprecated dynamic exception specifications in C++11 - fixing bug in xz/lzma routine wrongly reporting internal error when corrupted data was met - fixed compilation warning with gcc about deprecated readdir_r system call from 2.5.17 to 2.5.18 - fixed compilation issue in context where EA are not supported - fixed typo in dar man page (--sequential-mode in place of --sequential-read) - moved the "no EA support warning" trigger when restoring an archive later in the EA restoration process, in order to have the possibility thanks to the -u "*" option to restore an archive containing EA using a dar/libdar without EA support activated at compilation time, - at restoration time, avoiding issuing an "EA are about to be overwritten" warning when the in place file has in fact not only one EA set. from 2.5.16 to 2.5.17 - bug fix: dar failed to restore EA when file permission to restore did not included user write access. Fix consists in temporarily adding user write access in order to restore EA and removing this extra permission afterward if necessary - updated FAQ - fixed typos in dar man page - fixed bug met when writing slices to a read-only filesystem - fixed compilation problem under Solaris - fixed typos in dar man page - bug fix: self reporting bug in filtre.cpp line 2932 or 2925 depending or dar's version (report occurs in a normal but rare condition that was not imagined by developer, leading dar to abort the backup) - bug fix: wrong evaluation of possibility to seek backward in the escape layer (layer managing tape marks) which lead to useless but harlmess skip trials in some rare conditions. from 2.5.15 to 2.5.16 - bug fix: while rechecking sparse file (-ah option) during a merging operation, dar wrongly reported CRC mismatch for saved plain files - fixed man page about sparse-file handling while merging: To remove sparse file datastructure during a merging operation you need to set --sparse-file-min-size to a valuer larger than all file sizes contained in the archive (for example 1E for one exabyte) - bug fix: met when using compression and creating the archive to dar's standard output (ssh) and leading files to be corrupted in the archive and reported as such. - optimisation of escape_sequence skippability (avoids trying skipping and failing for some corner cases, when we can detect it does even not worth trying) from 2.5.14-bis to 2.5.15 - fixing self report bug message met when trying to create an isolated catalogue into a directory that does not exist - adding slice overwriting verification before creating a isolated catalogue, to be coherent with other operations creating an archive (backup and merging) - storage size of compressed files was often wrongly stored in archive (shorter than reality), the only impact took place at archive listing time where the compression ratio displayed was better than reality - fixed auto-detected bug condition triggered when -Tslicing is used with --sequential-read. Both options are not compatible and have been excluded by a nicer message than this auto-detection bug message. from 2.5.14 to 2.5.14-bis - avoiding using the syncfs() system call in dar_split when the platform does not support it (replacing it by sync() in that case for compilation to be successful) from 2.5.13 to 2.5.14 - made libgcrypt built-in memory guard be initialized before obtaining ligcrypt version, to respect libgcrypt usage (but no problem was seen nor reported about this inconsistency) - fixed syntax error in XML listing output (EA_entry and Attributes tags) - fixed typos in dar man page - Updating Tutorial for restoration - fixed bugs in dar_split: cygwin support, filedescriptors were not explicitly closed at end of execution, allocating buffer on heap rather than in the stack for better size flexibility, avoiding buffer size to be greater than SSIZE_MAX. - added -s option to dar_split in order to disable the by default SYNC write that was used and which caused poor performance. To keep the same behavior as the older dar_split (and its poor performances) you need now using -s option. - dar_split enhancement: added call to syncfs before closing the file descriptor in split_output mode - fixed bug in dar_split that was did not lead dar_split to completely fulfill an device before asking for user to change the media when used in split_output mode, this was sometimes leading dar reporting file as corrupted at dar_split at media boundary. - added feature in dar_split to show the amount of data written since the last media change from 2.5.12 to 2.5.13 - added -az option to automatically nullify negative dates returned from the system in the archive under creation (filesystem is not modified) - included the birthtime (HFS FSA) into the negative dates handling - modified behavior: dar now fails upon unknown option instead of warning the option is unknown and thus ignored - bug fix: dar 2.5.12 and below in 2.5.x branch could not read archive generated by dar 2.4.x and below (unless in infinint compilation mode) when the old archive included a file which date(s) was returned by the system as a negative integer at the time of the backup. Note that if dar can now read old archive in that particular case, such date stay recorded in the dar archive as very far in the future and not in the past, because 2.4.x and below blindly assumed the system would always return a positive integer as number of second since 1970. Since 2.5.12 release, when the system provides a negative date the date is assumed as zero (Jan 1970) with user agreement. - fixed missing throw in tools.cpp (exception condition was not reported) from 2.5.11 to 2.5.12 - documenting in man page the limitation of -[ and -] options concerning the maximum line length that could be used in a listing file. This limitation was only described in doc/Limitations.html - dar now aborts if a line exceeding 20479 bytes is met in a listing file - improved error message issued when a file listing (-[ or -] option) is missing for it provides the missing filename in the error message - improved error message issued when a line of a file listing exceeds 20479 characters for it display the start of that line - fixed bug in file listing (-[ option) leading some directories and their content to be excluded in a somehow rare condition - improved behavior when dar reads a negative date. Instead of aborting it now asks the user if it can substitute such value by zero - improved behavior when dar is asked to read an archive located in a directory that does not exist. DUC file passed to -E option is now properly run in that case too and has the possibility for example to create that directory and download requested file from 2.5.10 to 2.5.11 - minor feature: displays the archive header which is never ciphered and aborts. This feature is activated while listing archive content and adding the -aheader option. This brings the side effect to invert two lines in the archive summary (dar -l archive -q) "catalogue size" and "user comment". - adding date format info for -w option in "dar_manager -h" usage help - fixed several mistakes in tools.cpp leading compilation to fail under certain environments - fixed a typo in filesystem.cpp and portability issue that lead compilation to fail under openbsd 6.0 - fixed bug in the filtering mechanism relying on file listing (-[ and -] options) that could not find an entry in the listing upon certain condition leading a file not being excluded as requested or not included as requested from 2.5.9 to 2.5.10 - fixed bug: -r option (only more recent overwriting policy) was considering a file to be more recent when it had the exact same date as the file in place. - updating documentation about requirements for compiling dar from sources - fixed bug: bug met when restoring of a file that has the immutable flag set. Dar/libdar failed restoring such file in the context of differential/incremental backup. The fix consists of the removal of the immutable flag from filesystem before restoring the new version of the file's data, then setting back the immutable flag afterward. - updating FAQ with description of the way dar uses lzo compression compared to the lzop program - fixed bug: aborting an archive was leading to an unreadable archive in direct mode, most of the time when strong encryption was used - minor new feature: added two flavors of lzo algorithm: lzop-1 and lzop-3 in order to match compression levels 1 and 3 of the lzop command from 2.5.8 to 2.5.9 - fixed typos in documentation about dar internal use of symmetric encryption - fixed bug: merging operation could wrongly melt different unrelated hard linked inodes when merging using an archive which results from a previous merging operation. - fixed bug: aborting an archive was sometimes leading to an unreadable archive in direct mode (was readable only in --sequential-read mode) - fixed bug: libgpgme was only present at linking time of final binaries (dar, dar_slave, dar_xform, dar_manager, dar_cp, dar_split), not at linking time of libdar, which caused problem under Linux Rosa distro where the "no-undefined" flag is passed to the linker. - minor new feature: -ay option has been added to display sizes in bytes instead of the default which uses the largest possible unit (Kio, Mio, and so on.) from 2.5.7 to 2.5.8 - fixed double memory release occurring in a particular case of read error - improving robustness of FSA code against data corruption - fixed bug: DAR_DUC_PATH was not used with -F and -~ options - new feature: added -aduc option to combine several -E options using the shell '&&' operator rather than the shell ';' operator. The consequence is that with -aduc option a non zero exist status of any script (and not only of the script given to the last -E option) will lead dar to report the error. - man page updated about combination of several -E options - fixed bug: merging partial FSA led to self reported bug in cat_inode.cpp at line 615 from 2.5.6 to 2.5.7 - fixed bug leading dar to not include directories given to -g option nor to exclude directories given to -P option when at the same time the directory given to -R option starts by a dot ("-R ./here" in place of "-R here") - bug fix and speed improvement: under certain circumstances dar was reading several times the data at slice boundary, leading dar to ask for slice N then N-1 then again N, this caused sub-optimal performance and was triggering user script unnecessarily from 2.5.5 to 2.5.6 - added speed optimization when comparing dates with hourshift flexibility (-H option) - fixed bug met when using as reference an archive generated by dar 2.5.4 or older, bug that lead dar saving almost all file even those that did not change. from 2.5.4 to 2.5.5 - fixed message displayed when reading old archives - fixed bug that avoided dar-2.5.x code to read old archive format when special allocation was set (by defaut) at compilation time - disabling special-alloc by default reducing memory footprint - fixed error in FAQ about the way ctime/atime/mtime are modified during normal operating system life. - new implementation of class datetime with better memory footprint - avoding storing sub-microsecond part of date to preserve limitint ability to store large dates - moving field cat_inode::last_cha from pointer-to-field to plain field of the class, this slightly reduce catalogue memory footprint. - fixing bug in the returned exit status when dar failed executing DUC command due to system error (now returning the expected code 6 in that case too) from 2.5.3 to 2.5.4 - fixing missing included files for libdar API - removed extra try/catch block introduced by commit 72da5cad5e52f959414b3163a2e2a320c2bc721e - removed sanity check that caused problem when writing an archive to a FUSE based filesystem. - fixing non call of the -E script/command after last slice creation, when encryption or slice hashing was used - fixed bug in dar_manager: archive permutation in database lead libdar to check an archive number of range under certain circumstances - fixed inversion of the condition triggering a warning about archive date order in a dar_manager database while moving an archive within a database - fixed typos in documentation - catalogue memory optimization, with the drawback to limit the number of entry in an archive to the max integer supported by the libdar flavor (32 bits/64 bits/infinint). - fix configure script to temporarily rely on LIBS rather LDFLAGS to check for gpgme availability - removed order dependency between -A and -9 options of dar_manager: -9 can now be specified before or after -A option. - resetting to "false" the "inode_wrote" flag of hard link data-structure before testing and merging. Merging a previously tested archive or testing a second time would not include hard linked inode in the operation. This situation does not occurs with dar but could succeed with some external tools that keep the catalogue in memory to perform different operations on it. - fixed bug in the routine that detects existing slices to warn the user and/or avoid overwriting, bug that lead dar to "bark" when an archive base name started by a + character. - avoiding to use AM_PATH_GPGME in configure script when gpgme.m4 is not available - adding new methods in libdar API to obtain the archive offset and storage size of saved files (class list_entry) - adding new method in libdar API to translate archive offset to file offset (class archive) - reporting a specific error message when filename returned by the system has the maximum length supported by the system itself, assuming filename has been truncated from 2.5.2 to 2.5.3 - Fixing a 2.5.x build issue met when a 2.4.x libdar library is already installed in an FreeBSD system. - Improving message and behavior of libdar in lax mod when a truncated archive is read - Fixing self reported bug at "tronconneuse.cpp line 561" met while reading truncated/corrupted archive - Fixed not closed filedescriptors, met when saving a filesystem that has not ExtX FSA available - Fixing configure script to be more robust in front of system where gpgme.h is installed in a non standard path and user did not provide coherent CPPFLAGS, LDFLAGS before calling ./configure - Displaying CRC values when listing isolated catalog as XML output - Fixing compilation issue when system does not provide strerror_r() call - Avoiding warning about FSA absence when fsa-scope is set to "none" - Adding --disable-fadvise option to configure script for those that want back full pressure from dar on the system cache (same behavior as 2.4.x) - Fixing bug, fadvise() called a wrong time making it having no effect - updating FAQ about comparative performance from 2.4.x to 2.5.x - optimization: reduced the number of call to dup() at libdar startup - improvement: printing file type on verbose output - new feature: added %t macro reflecting the inode type in dar's --backup-hook-execute option from 2.5.1 to 2.5.2 - fixed bug met when permission is denied while reading or writing slices - fixing bug that avoided creating an archive at the root of the filesystem - fixing bug met in rare situation while reading in sequential-read mode an archive encrypted using gnupg encryption. In that situation libdar may fail reading the archive (but succeeds in normal read mode) issuing an obscure message (message has also been fixed). - code simplification, removing field reading_verion from class crypto_sym as its parent class tronconneuse already have such information - removed extra newline displayed by dar at end of execution - fixed bug avoiding dar to properly read an entry (reporting CRC error) when specific sequence of character (start of escape sequence) fall at end of the read buffer of the escape layer. - speed optimization for datetime class - fixed bug that avoided dar reading archives in sequential read mode while reading from a pipe - fixed bug in non regression test routine provided beside dar/libdar - fixing display message showing not always in the correct context - fixing case inversion leading the cache layer not to be used when necessary and used when useless while reading an archive - improved heuristic in dar_manager to determine the date a file has been deleted. from 2.5.0 to 2.5.1 - fixed display bug in dar_manager met when using -o option and adding options for dar that does not exist for dar_manager (like -R option) - reactivating disabled (by mistake) optimization for some read-only dar manager database operations - fixing compilation issue with dar against gcc 4.9.2 - fixing syntax error in dar_manager message - fixed bug that forbade dar_manager to write down modified database when only database header was modified (-o, -b, -p switches). - adding dar_manager database format version information with -l option - fixed libdar inability to read dar_manager's database format version 4 - adapting code to build under cygwin environment, where thread_local seems broken - fixed output to stderr in place of stdout for Licensing information - fixed bug met when permission is denied while reading or writing slices - fixing bug that avoided creating an archive at the root of the filesystem from 2.4.x to 2.5.0 - added support for posix_fadvise() - added entrepot class hierarchy to support in the future other storage types than local filesystem for slices - API: added access to the entrepot through the API - modified class hash_fichier for it becomes entrepot independent - API: extended libdar API with an additional and more simple way to read an archive: archive::get_children_in_table() method, see doc/API_tutorial.html for details - added support for extX (see lsattr(1)) and HFS+ (birthtime date) Filesystem Specific Attributes (FSA). - dar is now able to skip backward when a file is found to be "dirty" at backup time. This avoids wasting space in archive but is only possible if the backward position is located in the current slice and no slice hashing nor strong encryption is used. Of course if the archive is written to a pipe or to stdout, skipping back to retry saving data at the same place is neither possible, --retry-on-change option stays possible in that cases at the cost of data duplication (wasted bytes amount, see --retry-on-change man page). - by default dar now performs up to 3 retries but do not allow for wasting bytes if file has changed at the time it was read for backup, this can be modied using --retry-on-change option. - With the same constraints as for a changing file, if a file is saved compressed but its compressed data uses more space than uncompressed, the file's data is resaved as uncompressed. However, if skipping backward is not possible, data is kept compressed. - if system provides it, dar uses "Linux capabilities" to check for the ability to set file ownership when dar is not run as root. This allows dar to restore ownership when allowed even when it is not run as superuser. - removing dar-help tool used to build dar -h messages. That tool became useless for a long time now. - added several more specific verbosity options: -vm, -vf and -vt - added support for microsecond timestamps (atime, mtime, ctime, birthtime) - Using lutime() to restore atime/mtimes of symlink on systems that support it. - API: removed backward compatible API for old libdar 4.4.x - API: simplified implementation of archive isolation thanks to isolation evolution features brought by release 2.4.0. Memory requirement is now devided by two compared to releases of previous branch (2.4.x). - dar has been updated to use this new API for archive isolation - added exclude-by-ea feature to avoid saving inodes that have a particular user defined EA set. - added comparison of an isolated catalogue with a filesystem, relying on embedded data CRC and inode metadata in absence of the saved data. - The new archive format (version 9) holds the ciphering algorithm used at creation time, only the passphrase is now required at reading time and -K option may be ignored which will lead dar to prompt for passphrase. - Adding support for public key encryption (GnuPG) supporting several encryption keys/recipients for a given archive - Adding support for public key signature when public key encryption is used - While listing archive contents, directories now show the size and average compression ratio of the data they contain - Archive summary (-l with -q options) now reports the global compression ratio - added the -vd switch to only display current directory under process for creation, diff, test, extraction and merging operations - added xz/lzma compression support - added -Tslicing listing option to show slice location of files inside an archive archive. - isolated catalogues now keep a record of the slicing layout of their archive of reference in order to provide -Tslicing feature when used on the isolated catalogue alone. - However if an archive has been resliced (using dar_xform) after its isolated catalogue has been generated, using -Tslicing option with the isolated catalogue would give wrong information. To overcome that, it is possible to specify what is the new slicing of the archive of reference by using the -s and -S options in conjunction with -Tslicing - added dar_split command to provide on-fly multi-volume archive support for tape media - experimental feature to have libdar using several threads (not activated by default due to poor performance gain) - dar now aborts when a given user target cannot be found in included file - added sha512 hashing algorithm beside already available md5 and sha1, the generated hash file can be used with 'sha512sum -c ' command - removed useless --jog option for memory management - removed previously deprecated -y/--bzip2 command, bzip2 compression remains available using -z option (-zbzip2 or --compression=bzip2) - replaced SHA1 by SHA224 to generate IV for encryption blocks, this slightly improves randomness of IV and stay available when libgcrypt is run in FIPS mode from 2.4.23 to 2.4.24 - fixed bug: merging operation could wrongly melt different unrelated hard linked inodes when merging using an archive which results from a previous merging operation. from 2.4.22 to 2.4.23 - fixed bug leading dar to not include directories given to -g option nor to exclude directories given to -P option when at the same time the directory given to -R option starts by a dot ("-R ./here" in place of "-R here") from 2.4.21 to 2.4.22 - fixing bug in the returned exit status when dar failed executing DUC command due to system error (now returning the expected code 6 in that case too) from 2.4.20 to 2.4.21 - removed sanity check that caused problem when writing an archive to a FUSE based filesystem. - fixed bug in dar_manager: archive permutation in database lead libdar to check an archive number out of range under certain circumstances - fixed inversion of the condition triggering a warning about archive date order in a dar_manager database while moving an archive within a database - removed order dependency between -A and -9 options of dar_manager: -9 can now be specified before or after -A option. - resetting to "false" the "inode_wrote" flag of hard link datastructure before testing and merging. Merging a previously tested archive or testing a second time would not include hard linked inode in the operation. This situation does not occurs with dar but could succeed with some external tools that keep the catalogue in memory to perform different operations on it. - fixed bug in the routine that detects existing slices to warn the user and/or avoid overwriting, bug that lead dar to "bark" when an archive base name started by a + character. from 2.4.19 to 2.4.20 - fixed display bug in dar_manager met when using -o option and adding options for dar that does not exist for dar_manager (like -R option) - reactivating disabled (by mistake) optimization for some read-only dar manager database operations - fixing compilation issue with dar against gcc 4.9.2 - fixing syntax error in dar_manager message - fixing bug that avoided creating an archive at the root of the filesystem from 2.4.18 to 2.4.19 - fixed missing quote in dar_par.dcf which is called by the par2 directive - fixed bug in dar_manager's -u option, not displaying most recent files of an archive when they have been marked as removed in a more recent archive of the same dar_manager database. - fixed bug met while restoring in sequential read mode a file having several copies (was modified at the time it was saved and retry-on-change was set). from 2.4.17 to 2.4.18 - Initial Vector used for strong encryption was set with pseudo-random data generated using SHA1 message digest and blowfish cipher, which are not available when ligcrypt is running in FIPS mode. Since 2.4.18 we now use SHA256 and AES256 for IV assignment in order to have libdar compatible with FIPS mode. For data encryption nothing changes: the cipher specified (-K, -J, -$ options on CLI) are used as before. - fixing bug met when performing archive isolation in sequential-read mode, If an archive corruption or truncated archive leads an inode to not have its CRC readable, dar aborts and issues a BUG report. - updating list of project relying on dar/libdar from 2.4.16 to 2.4.17 - fixing issue when case insensitive comparison was requested and invalid wide char for the current local was met in a filename. In such situation the corresponding file was never saved before (considering a filesystem error for that file), while now the ASCII case insensitivity is used as fallback. from 2.4.15 to 2.4.16 - fixing archive listing displayed information for catalogue size when archive is read in --sequential-read mode - fixing bug that avoided dar releases 2.4.x up to 2.4.15 to read encrypted archive generated by dar release 2.3.x and below - adding informational note at the end of ./configure script execution when --enable-mode has not been used. - adding support for case sensitivity in filename comparison (-an option) for other character sets than POSIX/C locale like Cyrillic for example. - fixing bashisms in doc/samples scripts from 2.4.14 to 2.4.15 - fixing bug met when reading an encrypted archive in sequential mode - fixing bug met when reading an encrypted archive in sequential mode from an anonymous pipe - changed option '-;' to -9 as '-;' does not work on all systems with getopt (only long option equivalent --min-digits worked) for dar, dar_cp, dar_manager, dar_xform and dar_slave commands. - fixing bug met when restoring deleted files in sequential read mode and some directory where they should be "restored" are not readable or could not be restored earlier - adding extra buffer to handle sequential read of encrypted archive when the last crypto block contains some but not all clear data after encrypted one (the archive trailer). - fixing compilation issue using clang - fixing bug that prevents using -~ option with on-fly catalogue isolation in order to execute an user command once on-fly isolation has completed - added some autoconf magic to determine the correct (BSD/GNU) flag to use with sed in order to activate regular expression parsing - new implementation of mask_list class which is compatible with libc++ - fixed bug met on FreeBSD with dar_xform where the system provides a standard input file descriptor in read-write instead of read-only mode. from 2.4.13 to 2.4.14 - limiting memory consumption of the cache layer to stay below 10 MiB, under certain circumstances (very large archive), it could grow up to an insane value like 50% or the available RAM. reducing to 10 MiB does not impact performance in a noticeable manner while it avoids system to swap out due to the libdar cache layer becoming huge. - added --with-pkgconfigdir to define an alternative path for libdar pkgconfig file (to ease portability to FreeBSD) - modified some Makefile.am for better FreeBSD support - fixed display bug in XML listing output concerning hard linked inodes - fixing typo in man page - fixing bug met while isolating a catalogue in --sequential-read mode. Using such isolated catalogue lead dar report an error about inaccessible EA. - displaying compression rate for sparse files even when they are uncompressed, sparse file detection also leads to compress files - fixing bug that lead libdar to fail comparing an inode having EA when comparison is done in --sequential-read mode - fixing display bug in in ligcrypt check of configure script for minimum required version - fixing 'make clean' to remove some forgotten files generated by 'make' from 2.4.12 to 2.4.13 - adding initialization value for two variables to avoid inappropriate warning when compiling with -Wall option - reducing UNIX_PATH_MAX by the system when not defined from 108 to 104 bytes to accommodate BSD systems - fixing assignment operator of class criterium that was not returning any value as it should - removing useless boolean expression that always succeeds in logical AND expression - adding support for back-slash of quoting characters in DCF files - fixed compilation issues with clang / FreeBSD, Thanks to Neil Darlow's server ;-) - fixed compilation warning due to deprecated symbols in libgcrypt header files - replaced gnu make specific rules by legacy ones to avoid automake warning about them - removed old unused stuff from misc sub-directory - adding warning at compilation time if libgcrypt used is older than 1.6.0 - adding warning at execution time if hash computation is requested with slices greater than 256 Gio and ligbcrypt dynamically or statically linked is older than 1.6.0 - adding alternative methods in list_entry API class to return dates as number of seconds - fixed bug in hour-shift (-H option) when comparing dates from an old extracted catalogue (archive format 7 or older). - fixed documentation bug about the meaning of the compression ratio - fixed a display bug about the "compression flag" wrongly displayed for uncompressed files - fixed unhandled exception when giving non number argument to -1 option from 2.4.11 to 2.4.12 - for correctness fixed delete vs delete[] on vector of char (not incidence reported) - fixed out of range access in routine used to read very old archive format - fixed error in logical expression leading a sanity test to be useless - removed duplicated variable assignment - updated FAQ - fixed typo and spelling errors - fixed bug (reported by Torsten Bronger) in the escape layer leading libdar to wrongly reporting a file as corrupted at reading time - fixed bug in the sparse file detection mechanism that lead the minimum size hole detection to become a multiple of the default value or specified one. This implied a less efficient reduction of sparse files because smaller holes in files were ignored - fixed and updated man page about --go-into option - updated full-from-diff target in /etc/darrc default file - added a debug option in hash_file class (option only used from testing tools) to troubleshoot sha1/md5 hash problem on slices larger than (2**38)+63 bytes, bug reported by Mike Lenzen and understood by Yuriy Kaminskiy at libgcrypt. Note: This bug is still open due to an integer overflow in libgcrypt. - backported from current development code an additional and more simple way to read an archive using the libdar API. This API extension is not used by dar command-line tools for now. - Fixing installation of libdar header files on Darwin, where "DARwin" macros were not filtered out from the generated libdar header files. - Fixing self reported bug 'generic_file.cpp line 309' met while comparing an archive with a filesystem - Update code in order to compile with gcc-4.8.2 in g++11 mode (partial implementation and adaptation of Fabian Stanke's patch) - Fixing bug met while performing a verbose archive listing in sequential read mode - Added Ryan Schmidt's Patch to properly display status at end of ./configure script under BSD systems (in particular Mac OS X) - Updating configure.ac script to fix warning reported by autoconf when generating the ./configure script - Addressed portability problem with BSD systems that do not provide a -d option to the 'cp' command, preventing proper installation of the Doxygen documentation. Fix based on patch provided by Jan Gosmann. from 2.4.10 to 2.4.11 - Modified behavior of 'dar -h' and 'dar -V', both now return 0 as exist status instead of 1 (which means syntax error). - Fixed bug: -Q is now available with -V under the collapsed form -QV or -VQ - fixed typo in documentation - fixed memory leakage met when dar fails a merging operation because the resulting archive is specified in an directory that does not exist. - fixed bug met when isolating a differential backup in sequential read mode - fixed bug about slice file permission not taking care about umask variable when the --hash feature is used. - fixed performance issue when reading an archive over a pair of piles using dar_slave (possibly over ssh) when the archive makes use of escape marks and when no encryption is used - added target "full-from-diff" in /etc/darrc default file - fixed bug avoiding reading an truncated archive in direct access mode with the help of an external catalogue. - new and better implementation of archive extraction in sequential read mode - fixing bug (segfault) met when hitting CTRL-C while reading an archive in sequential mode - fixing libdar.pc for pkg-config for the cflags given to external applications - fixed memory allocation/desallocation mismatches (delete vs delete [] ) concerning four vector of chars. - fixed error in logical expression leading a sanity test to be useless from 2.4.9 to to 2.4.10 - fixing libdar about dar_manager database corruption that occurred when deleting the first archive of a base containing a plain file only existing in that first archive. - Added code to cleanup databases instead of aborting and reporting that previously described type of database corruption. - Added feature when comparing archive with filesystem in order to report the offset of the first difference found in a file. This was necessary to help solving the following bug: - fixed bug in sparse file detection mechanism that could lead in some very particular (and rare) situations to the loss of one byte from file being saved. In that case testing the archive reported a CRC error for that file. So if you keep testing achives in your backup process and have not detect any problem, you can then keep relying on your old backups. This bug also expressed when merging archives: dar aborted and reported that a merged file had a different CRC than the one stored in the archive of reference. from 2.4.8 to 2.4.9 - fixed bug: during differential backup dar saved unchanged hard linked inode when a hard link on that inode was out of the -R root directory. This also has the effect to always save files with long names on NTFS filesystems (!) - Adapted patch provided by Kevin Wormington (new messages displayed) - Fixed syntax error in configure script about execinfo detection - Removed unused AM_ICONV macro from configure script - fixed bug met under Cygwin when auxiliary test command failed to link when libgcrypt was not available. - updated mini-howto by Grzegorz Adam Hankiewicz - updating French message translations - restricted security warning for plain files and hard linked plain files - fixed display bug in dar_cp when manipulating files larger than 2 GB - fixed SEGFAULT met when adding to a dar_manager database an archive which base name is an empty string - improved error message, reporting the -B included file in which a syntax error has been met - modified dar_manager database to consider both ctime and mtime as timestamp value for data of saved files. This suppresses the warning about badly ordered archives in database when at some files have been restores from a old backup. from 2.4.7 to 2.4.8 - documentation fixes and updates - improved database listing efficiency - reduced memory usage of the caching layer in libdar - fixed self reported bug caused by memory allocation failure - fixed a SIGSEGV caused by double free in dar_xform when syntax error is met on command-line - dar_xform was not able to properly transform archive generated by dar older than release 2.4.0 - fixed bug that lead dar be unable to remove a directory at restoration time - replaced old remaining "bcopy" occurrence by a call to memcpy - fixed compilation warning under ArchLinux - fixed crash met while creating a backup with on-fly isolation - fixed libdar behavior when reading a strongly corrupted encrypted archive from 2.4.6 to 2.4.7 - fixing memory allocation bug in crc class, that lead glibc aborting dar - reviewed code and replaced some remaining occurences of bzero/bcopy by their recommended replacement version - fixed compilation problem under Solaris - fixed bug that could lead a file to be wrongly reported as different from the one on filesystem, when that file has been changed while it was saved, then saved a second time but has its size modified since the first time it was saved. from 2.4.5 to 2.4.6 - fixed bug met while interrupting compressed archive creation, the resulting archive was only readable in --sequential-read mode - fixed bug met while reading an interrupted archive in sequential reading mode. It lead dar to not release some objects from memory at the end of the operation, which displayed an ugly error message from libdar selfcheck routine. - fixed message reporting unknown system group when converting gid to name (was reporting unknow "user" instead of unknown "group") - removing the $Id:$ macro from file as we moved from CVS to GIT - updating package to distribute Patrick Nagel's scripts and documentation - updated URL pointing to Patrick Nagel's web site - updating documentation describing how to get source code from GIT (no more from CVS) - fixed typo in configure.ac - added info on how to build a brand-new dar tarball from source in GIT - modifies the end of messages shown by -h option to point to man page for more _options_ rather than _details_ - replaced − in the HTML generated documentation by a standard ASCII dash - fixed alignement bug in CRC calculation that lead libdar based application to crash on sparc-based systems. from 2.4.4 to 2.4.5 - updated sample scripts to be compatible with dar's --min-digit option - added missing included file to be able to compile with gcc-4.7.0 - removing an unused variable in filtre.cpp - fixed a display bug when comparing archive with filesystem, leading to a segmentation fault (%S in place of %i in mask) - fixed bug leading dar to not restore some directories from differential backups when they are absent in the filesystem - fixed bug that show a "uncaught exception" message at the end of archive listing for dar shared binaries only, compiled in infinint mode, under ArchLinux - updated the configure script to link with libexecinfo when available - added possibility to disable the use of execinfo in libdar thanks to the new --disable-execinfo option for the ./configure script - added Andreas Wolff patch to fix bug under Cygwin (segfault on program termination). from 2.4.3 to 2.4.4 - fixed man pages in the NAME section: added whatis entry - fixed segfault: in the internal error reporting code (delete[] in place of free()) - fixed bug: dar_manager was not able to read properly the latest generated databases version when having Extended Attributes recorded for some files - avoided reporting unreleased memory block when compilation optimization have been used (dar, dar_manager, dar_cp, dar_slave, dar_xform do all reported unreleased memory when gcc optimization was used in "infinint" mode) from 2.4.2 to 2.4.3 - fixed absurd compilation warning about possibly uninitialized variable - added -ai switch to dar_manager to disable warning about improper file order in database. - fixed bug met while changing order of archives in a dar_manager database - avoiding concurrent use of -p and -Q options, error message shown in that situation. - modified slice overwriting detection code to use a single atomic system call to create a new slice - replaced delete by delete[] for conversion routine of user/group to uid/gid - added the possibility to disable speed optimization for large directories - added memory troubleshooting option --enable-debug-memory - simplified class CRC implementation - fixed failed memory release upon exception thrown in class deci - modified tlv, tlv_list classes and ea_filesystem routines to not require any corresponding temporary objects in libdar (saves a few new/delete calls) - fixed silent bug in tlv class: due to the absence of copy constructor and destructor, some memory was not released and referred after the corresponding object's destruction - modified generic_file class to avoid temporary crc objects - fixed bug in header class that lead unreleased field (this class lacked a destructor), memory impact was however little: 10 bytes per slice - fixing bug in class tlv: unreleased memory - added protection code in class deci to properly release memory against exception thrown from called routines when user interrupts the operation. - replace previous internal stack report code by backtrace()/backtrace_symbols() - complete change of the implementation of the 'special-alloc' feature: the old code eat too much memory not to be adapted to new features added in release 2.4.0. This new implementation also bring some speed improvement from 2.4.1 to 2.4.2 - fixing bug met when reading an archive in sequential-read mode - fixing bug while filtering in sequential-read mode - fixing backward compatibility in dar_manager with old archives (wrong dates for deleted files). - fixing compilation problem on certain systems (missing #include statement) - fixing documentation syntax and spelling from 2.4.0 to 2.4.1 - adding information about "Cache Directory Tagging Standard" in doc/Feature.html - fixing typo in doc/presentation.html - fixing incomplete information in doc/usage_notes.html - rewriting sample scripts from tcsh to bash in doc/usage_notes.html - updating Swedish translation with the last version from Peter Landgren which has been forgotten for 2.4.0, sorry. - fixing installation problem, where src/libdar/nls_swap.hpp was not installed - fixing version returned by libdar_4_4::get_version to let kdar (or other external program relying on the backward compatible API) working as expected - fixed bug in the code determining whether a directory is a subdirectory of another. This bug could lead dar to restore more files that just the one that were specified with -g option. - added -k option to dar_manager for backward compatible behavior of dar_manager - fixed bug in dar_manager, was recording the wrong date of EA removal (when an inode has dropped all its EA since the archive of reference was done). - adapted dar_par_test.duc sample script to dar-2.4.x new behavior - adapted libdar to MacOS X to restore mtime date after EA, as on this system, modifying some system specific EA implies updating the mtime. But dar cannot yet store and restore the "creation date", it needs specific MacOS X code, as this value is not available through Posix EA. - fixed backward compatibility bug where dar 2.4.0 was not able to read archive containing only a catalogue (differential backup when no change occurred, snapshot backup, extracted catalogue) generated by dar 2.3.x or older. - fixed self reported internal error met when dar is merging archives generated by dar 2.3.x versions. from 2.3.x to 2.4.0 - hard links support for pipes, soft links, char and block devices has been added (so far, only hard links on plain files were supported) - added rich overwriting feature for merging archives (-/ option) - changed default behavior of dar: it no more tries to preserve the atime of read files, which had as side effect to modify the ctime. See man page for -aa and -ac options for details - simplified the use of the "sed" command in Makefiles.am files - integrated Wiebe Cazemier's patch for man page - -E option has been extended to work also when generating a single sliced archive (no need to have -s option to be able to use -E option). - slice header has been extended to store additional information (slice layout is now redundant in each each slice and may be used as backup from a slice to another in case of corruption). - dar does no more need to read the first then the last slice of an archive to get its contents, it now instead only needs the last slice. - an isolated catalogue can now be used as backup of the original archive's internal catalogue (-A option in conjunction with -x option for example) - added directory look-up optimization (adaptation of Erik Wasser's patch) - added -e option support (aka dry-run) to archive testing - added the possibility to set permission and ownership of generated slices - re-designed the libdar API to have all optional parameters carried by class object in a single argument, the aim to not break backward compatibility of the API upon each new feature addition. The libdar_4_4 namespace can be used for backward compatibility with older applications (see API documentation) - added retry on change feature (-_ option). - changed storage for UID and GID from U_16 to infinint to support arbitrarily larger UID and GID - added lzo compression support - dar_manager now uses an anonymous pipe to send configuration to dar, this solves the problem due to command-line limitation. - dar now stores a "removal date" when a file disappeared since the archive of reference was done (so far only the information that a file was removed was stored). This is needed for dar_manager (see next new feature) - dar_manager can now better restore the status of a set of files exactly as it was at any given time from a set of full and differential backups. In particular, it does no more restore files that were removed at the requested date. - added check in dar_manager to detect conditions where a file has a modification date that has been set to the past. Two objectives are at the root of this feature: proper restoration of files and detection of possible rootkit - added mode for restoration that avoid restoring directory tree which do not contain any saved files (in particular when restoring a differential backup) see man page for -D option for more details. - reviewed implementation of code managing Extended Attributes (much faster now) - added batch feature (-@ option) to dar_manager - added Furtive Read Mode support (O_NOATIME + fdopendir): when the system supports it, while reading data, dar does not modify any date (ctime or atime) - added the possibility to have sequential reading of archives (ala tar) see option --sequential-read - added the possibility to read from a pipe (single pipe, without dar_slave) (use '-' as filename in conjunction with --sequential-read) - added -P -g -[ and -] options to archive listing (-l option) - added sparse file detection mechanism (can save and restore sparse files) - added dirty flag in archive for file that changed while being saved. By default a warning is issued when the user is about to restore a dirty file, this can be changed thanks to the --dirty-behavior option - -R option can receive an arbitrary string (still is excepted an empty string) In particular dar will no more complain if the given path contains // or \\ however it must in a way or another point to something that exists! - added a short listing feature (listing only the summary), (using both -l and -q options) - extended conditional statements in included files (DCF) with user defined targets (see user target paragraph at the end of dar man page) User targets let the user add a set of options using a single keyword on command-line. - a sample /etc/darrc is now proposed with some user targets for common operation like compression without compressing already compressed files. - dar now releases filedescriptors of archive of reference(s) before proceeding to the operation (differential backup, archive isolation, etc.) - user can add a comment in archive header some macro are provided for common options (see --user-comment in man page). This comment can be seen when listing an archive in verbose mode (-l -v) or when displaying the archive's summary (-l -v -q). - added a "security warning" feature if ctime has changed in filesystem while inode has not changed at all (-asecu disables this feature). This is to target possible rootkit files. Note that this may toggle false positive, if for example you change EA of a file. - added feature: DAR_DUC_PATH environment variable, which let dar look for a DUC file (see -E and -F options) in the given path. - added feature: DAR_DCF_PATH environment variable, same as previously but for DCF files (see -B option). - added two targets for conditional syntax: "reference:" and "auxiliary:" - weak blowfish implementation has been removed (no backward compatibility as it suffered of a weak Initial Vector (IV) initialization), but the normal blowfish encryption stays in place. - Due to openssh licensing, replaced openssh by libgcrypt dependancy (which stays optional). - added new cyphers aes256, twofish256, serpent256 and camellia256 - added the hash feature (--hash option), supporting md5 and sha1 algorithms. The hash is calculated on the fly for each slice, before its data is even written to disk. This let one to check for media corruption even before a multi-sliced archive is finished. However this does not prevent an archive to be corrupted due to a software bug (in dar, libdar or in a external library), so it is always recommended to test the archive using dar's -t option. - -G option (on-fly isolation) has been replaced by -@ when creating an archive, to reduce the number of letter used for options. This also let available the usual switches associated to -@ option to define an encryption algorithm and passphrase for the on-fly isolated catalogue. - slices number may be padded with zeros (--min-digits option) Note that if using this option when creating an archive, this same option is required for any operation on this archive - added -konly feature to only remove files recorded as suppressed at differential backup restoration time. - dar and libdar now store keys in secure memory (with the exception that a DCF is parsed in unlocked memory, having a key in a DCF file is not as secure as having dar asking for password at execution time using the "-K :" syntax) - added hook for backup: a user command or script can be run before saving and after backing up files that match a given mask all along the backup process (see -<, -> and -= options). - added feature: -alist-ea let the user see the Extended Attributes of files while listing an archive contents. - dar_manager can receive negative numbers to point to archive counting by the end of the database. - dar and libdar stay released under GPL 2.1 (not under GPL 3, and not lesser GPL, neither) - setting the "little/big endian" to usual meaning (it was inverted in the code) this does not change the dar's behavior nor its compatibility with different systems or older libdar versions. - added -ai option to avoid warning for unknown inode types - added support for Solaris's Door files - added feature: decremental backup from 2.3.11 to 2.3.12 - avoiding concurrent use of -p and -Q options, error message shown in that situation. version 2.3.10 to 2.3.11 - fixed bug in the detection code of an existing archive of the same name when creating a new archive (improperly considered some files sharing a part of the archive basename as old slices of an archive of the same base name). - fixed a display bug. When using -v to see which arguments get passed to dar by mean of configuration file (DCF file, ~/.darrc or /etc/darrc) the last argument found in file was not displayed. - fixed two bugs (one in decompression routine, the other in decryption routine) that lead dar to segfault or run into an endless loop when reading a very corrupted archive. - added -H option with -d option - fixed bug leading Dar to report some files to be removed at restoration time to be of different type than the expected one when the reference used for that archive (difference backup) was an extracted catalogue. - fixed bug in dar's command_line parsing leading dar to free twice the same block of memory when an argument containing a double slash was given to -G [SF 3162716]. - probable fix (problem difficult to reproduce) for double memory release in the storage class [SF 3163389] version 2.3.9 to 2.3.10 - added patch by Jan-Pascal van Best to have -[ and -] options working with archive merging - fixed bug in displaying dates [SF 2922417] - enhanced pseudo random number generation used in dar - added an error message when an include/exclude file listing does not contains an invalid path (instead of a self reported bug message). - modified message displayed when some slice of an old archive having the same name are present in the destination directory (backup, isolation, merging, dar_xform) from 2.3.8 to 2.3.9 - fixed bashism in doc/examples/pause_every_n_slice.duc sample script [SF 2020090] - added Jason Lewis's script "dar_backups.sh" which is an enhanced version of n the script done by Roi Rodriguez Mendez & Mauro Silvosa Rivera. - added message asking software upgrade to handle case when new archive format (used by dar >= 2.4.0) is provided to dar - very little optimization of the reading process of EA - updated FAQ - replaced "Catalogue" by "Archive Contents" in output message (-l -v). - added Sergey Feo's patch to dar_par.dcf - added check against stddef.h header file presence in configure script - fixed spelling - added Charles's Script in doc/sample - added -q option to dar - added licensing exception to allow distribution of dar beside OpenSSL library - Bug fix: during archive diff (only), dar restore atime of file in the backup instead of file in the system before opening it for reading. - tested dar with valgrind from 2.3.7 to 2.3.8 - fixed bug in libdar met when user supply an empty file as a list of file to include or exclude ( -[ and -] options ) - fixed bug concerning elastic buffers used beside strong encryption. No security issue here, just in some almost rare situations the generated archive was not readable (testing your archive prevents you loosing data in this situation) - added some speed optimizations - avoided warning to appear without -v option set, when an error is met while fetching value of nodump flag (flag not supported on filesystem for example). from 2.3.6 to 2.3.7 - fixed bug in dar_manager about the localization of the archive in which to find the latest EA - fixed bug in configure script to properly report full blowfish encryption support - fixed a bug in the statistics calculus of dar_manager for most recent files per archive - removed inappropriate internal error check - added --disable-libdl-linking option - fixed mistake in API tutorial - updated Swedish translation by Peter Landgren - fixed bug in the file filtering based on listing file ( -[ option ) - fixed typo and spelling errors in documentation - updated code for clean compilation with gcc-4.2.3 - updated code for clean compilation with gcc-4.3 20080208 (experimental gcc) from 2.3.5 to 2.3.6 - fixed Makefile.am in src/dar_suite (removed "/" after $(DESTDIR)) - fixed bug in regex mask building when not using ordered masks - fixing bug that led dar_manager to report no error while some files failed to be restored due to command-line for dar being too large. - fixed bug met when user aborts operation while dar is finalizing archive creation [SF #1800507] - fixed problem with execvp when dar_manager launches dar from 2.3.4 to 2.3.5 - changed displayed message when adding a hard link to an archive while performing a differential backup - added back the possibility to use old blowfish implementation (bfw cipher) - integrated optimization patch from Sonni Norlov - updated Swedish translation by Peter Landgren - updated French translation - fixed broken Native Language Support in 2.3.x (where x<5) from 2.3.3 to 2.3.4 - fixed behavior when differential backup is interrupted (no more store file that would have been read if no interruption had been done as "deleted" since the archive of reference) [SF #1669091]. - added official method to access catalogue's statistics through the API (for kdar next version). - Fixed syntax error in dar_par_create.duc and dar_par_test.duc files (Parchive integration with dar). - minor spelling fix in error message (compressor.cpp) - added Wiebe Cazemier's two patches for dar man page - integrated patch from Dwayne C. Litzenberger to fix weakness in dar's implementation of the blowfish encryption. - improved the returned message when an invalid path is given as argument - updated doc/sample/sample1.txt script file from 2.3.2 to 2.3.3 - avoid using getpwuid() and getgrgid() for static linking. - fixed typo in dar's man page - update FAQ - fixed bug: uncaught exception thrown when CTRC-C was hit while dar waits an answer from the user [SF #1612205] - fixed bug: unusable archive generated when CTRC-C was hit and blowfish encryption used [SF #1632273] - added a check to verify that the libdar used is compatible with the current dar suite programs [SF #1587643] - fixed bug: added workaround for the right arithmetic shift operator (the binary produced by gcc-3.4.2 produces computes "v>>s" equal to "v" when when v is a integer field composed of s exactly bits. It should rather compute it to zero...). this problem leads 32 bits generated archive incompatible with 64 bits generated archive only when blowfish is used. - fixed bug met when the inode space is exhausted, thanks to "Jo - Ex-Bart" for this new feedback. [SF #1632738] - replaced &, <, >, ' and " in XML listing by &...; corresponding sequence. [SF #1597403] - dar_manager can receive arguments after stick to -o options (it is an error in regard to documentation, but no warning was issued in that case, leading to confusion for some users) [SF #1598138] - updated Veysel Ozer's automatic_backup script - fixed hard link detection problem [SF #1667400] - verbose output did not displayed hard links information - merged patch on dar_cp by Andrea Palazzi to have it to return EXIT_DATA_ERROR when some data have been reported [SF #1622913] from 2.3.2 to 2.3.3 - avoid using getpwuid() and getgrgid() for static linking. - fixed typo in dar's man page - update FAQ from 2.3.1 to 2.3.2 - fixed bug in Native Language Support when --enable-locale-dir was not set (Thomas Jacob's patch) - updated Swedish translation by Peter Landgren - --verbose=skipped was not available (only the short -vs form was available) - reviewed regex with ordered mask for the feature to better fits user's need (Dave Vasilevsky's feedback) - fixed bug where compression algorithm was changed to maximum (fixed with Richard Fish's adequate patch) - fixed tutorial with command line evolution (dar's -g option in particular) - latest version of Grzegorz Adam Hankiewicz's mini-howto - fixed bug concerning restoration of only more recent files from 2.3.0 to 2.3.1 - set back Nick Alcock's patch which has been dropped from 2.2.x to 2.3.x (patch name is "Do not moan about every single file on a non-ext2 filesystem") - fixed compilation problem when thread-safe code is disabled - integrated Wiebe Cazemier's patch for dar's man page - fixed bug in listing: -as option also listed files that had EA even when these were not saved in the archive - file permission of installed sample scripts lacked the executable bit - fixed a bug that appeared when a file is removed while at the time it is saved by dar - avoid having an unnecessary warning appearing when restoring a file in a directory that has default EA set - Cygwin has changed and does not support anymore the path in the form "c:/some/where", you have to use "/cygdrive/c/some/where" instead. Documentation has been updated in consequence. from 2.2.x to 2.3.0 - added user_interaction::pause2() method - added the snapshot feature - added the Cache Directory Tagging detection feature - adapted Wesley's patch for a pkgconfig for libdar - added -[ and -] options (file selection from file listing) Important consequence for libdar user programs: the fs_root argument is now expanded to full absolute path inside libdar, thus the mask you will give for path inclusion/exclusion (the "subtree" argument)will be used against full absolute path of files under consideration for the operation. Assuming you have fs_root=tmp/A and the current directory is /tmp, your mask will be used against strings like /var/tmp/A/some/file. (instead of tmp/A/some/file as in the previous API version). Things are equal if the fs_root is given an absolute path. - changed archive format to "05". Due to complete review of EA management. - upon some signal reception, dar aborts the backup nicely, producing a completely formatted archive will all the file saved so far. This archive can be take as reference for a further backup to continue the operation at a later time. - dar_manager aborts properly upon certain signal reception (do not let the database partially updated). - dar_slave and dar_xform now recognize when a slicename is given in place of a basename - reviewed thread_cancellation (API change) for it be possible to cancel several thread at the same time - prevent some dead-lock situation that can occur when a signal is received inside a critical section - dar_cp, dar_xform and dar_slave also abort nicely upon signal reception - dar_manager can now restore files based on a given date (not always the most recent version) - dar_manager now has an interactive mode (-i option) - change in API, the warning() method need not be overwritten, but the new protected method inherited_warning() must be inherited in its place (same function, same prototype as the original warning() method). - dar_manager features are now part of libdar. API has been completed with these new features - added the "last_slice" context (%c with -E option) when creating an archive - dar now check a file has not been modified while it was reading it, if so it reports a warning and returns a specific exit code - remove included gettext from the package (it is more a source of conflict with external gettext and if somebody needs internationalization better is to install libintl/gettext on its own). - added George Foot feedback about the good backup practice sort guide. - added -e option to dar_manager - added the progressive_report feature in the API - dar can now pause every N slice where N >= 1 - integrated Dave Vasilevsky's patch to support Extended Attributes and file forks under MacOS X - added method in API for external program be able to list dar_manager databases, their file contents and the statistics - added the merge/sub-archive feature - remove [list of path] from command line (-g option is now mandatory) - added regex expression in filters (-ar/-ag options) - added -ak option - added the --comparison-field option (extension of the --ignore-owner option aka -O option) - added the -af option (backup files more recent than a given date, others are keept as already saved) - dar now take care that an escape character can be sent when pressing the arrow keys and avoid considering them in this situation - dar will no refuse to abort if user presses escape when dar asks the user to be ready to write to a new slice - adapted Wesley Legette's patch for an xml archive listing - added 'InRef' status for EA (same meaning as the one for file's data) from 2.2.6 to 2.2.7 - updated Swedish translation by Peter Landgren - fixed bug #37 - added old function (modified in 2.2.6) for backward compatibility - added German translation by Markus Kamp from 2.2.5 to 2.2.6 - fixed bug #36 - avoid removing slices when creating archive in dry-run mode (-e option) - fixed display problem in dar_cp that lead uncaught exception just before exiting from 2.2.4 to 2.2.5 - limited size of internal buffers allocated on the stack to not be greater than SSIZE_MAX when this macro is defined. This comes from feedback from "Steffe" at sourceforge after he ported dar to HPnonStop. - integrated Andrey Yasniy's patch: fixed differential backup problem with ru_RU.koi8-r locale. - integrated Nick Alcock's patch: no warning shown when not on EXT2 filesystem and nodump feature has been activated. - avoid having arrow key be interpreted as escape key (while they remains an escape key + one char, as received from the tty). - added part of Kyler Klein's patch for OSX (Tiger) (only concerns included gettext) from 2.2.3 to 2.2.4 - fixed #35 - added in doc/samples the backup Script of Rodriguez Mendez & Mauro Silvosa Rivera - updated Swedish translation by Peter Landgren from 2.2.2 to 2.2.3 - error in TUTORIAL (-P only receives relative paths) - updated FAQ with memory requirement questions/problem - added Bob Barry's script for determining memory requirement - added documentation about NLS in doc/NOTES - fixed bug concerning the << operator of infinint class. This has no impact as this operator is not used in dar/libdar. - added Jakub Holy's script to doc/samples - fixed bug with patch transmitted from Debian (Brian May) about the detection of the ext2_fs.h header. - added warning in libdar when user asks the nodump flags to be checked against while the nodump feature has not been activated at compilation time. - fixed dar man page about --gzip option usage when using an argument - now counting as errors the file with too long filename - now counting the file excluded due to nodump flag as ignored due filter selection from 2.2.1 to 2.2.2 - fixed typo in dar man page (flowfish ;-) ) - -Q option now forces the non terminal mode even when dar is run from a terminal (tty) this makes dar possible to run in background without having the shell stopping it upon text output. - removed unused control code for dar's command line syntax - spelling fix of the tutorial by Ralph Slooten - added the pertinent part of the memory leak patch from Wesley Leggette (there is no bug here as the function where could occur the memory leak is not used in dar (!) ). - updated FAQ - updated man page information about optional argument's syntax to options like -z9 or --zip 9 - avoid calls to textdomain() when --disable-nls is set - updates doc/NOTES - fixed potential memory leakage on some system (to a "new[]" was corresponding a "delete" in place of a "delete[]" (Wesley's patch)) In consequences, for API users, note that the following calls - tools_str2charptr - tools_extract_basename - libdar_str2charptr_noexcept all return a char * pointer which area must be released by the caller using the delete[] operator - partially integrated Wesley's api_tutorial patch (added explanations) - Fixed installation problem of header files, problem reported by Juergen Menden - updated the examples programs for they properly initialize libdar - the gettext.h file was not installed with libdar headers - fixed typo error reported by Peter Landgren - updated api_tutorial with compilation & linking informations - fixed pedantic warning about some classes inherited from "mask" (the parent copy constructor is not called from inherited copy constructor; note that the parent class is a pure virtual class) - added Swedish translation (by Peter Landgren) - fixed typo in French translation - added a const_cast statment to avoid compilation warning under some systems - fixed problem on solaris where the TIME&MIN non canonical parameters for terminal are not set by default to 1 and 0 (but to 4 and 0), leading keyboard interaction to be impossible when dar needs user interaction. - added O_BINARY to open() mode in dar_cp, without this will cause some problem under Cygwin. from 2.2.0 to 2.2.1 - fixed execution problem for testing programs - added control code to avoid the "endless loop" warning when -M is used and root and archive are not located on the same filesystem - replaced an internal bug report by a more appropriate exception (elastic.cpp line 191) - fixed bug #31 - fixed bug #32 - fixed bug #33 - changed exception type when dar_manager's -D option does not receive an integer as argument - fixed bug #34 - added Wesley Leggette's patch to API tutorial - fixed inconsistencies concerning Native Language Support in Dar - added gettext NLS domain switch when incoming and exiting from libdar - fixed bug #30 - changed the way ctermid() system call is used - updated FAQ from 2.1.x to 2.2.0 - caching read/write for catalogue to drop the number of Context Switches. - added -aSI and -abinary options - added -Q option - added -G option - fixed a display bug about archive size, present when listing with -v option - added -aa / -ac options - added -M option - thread safe support for libdar - added -g option - added -am option - added -acase / -an options - user_interaction can now be based on customized C++ class - user_interaction_callback now have a context argument - added feature: dar_manager now restores directory tree recursively - added feature: dar_manager can receive a range of archive number with -D option - added summary at the end of configure script - added -j option (--jog) change behavior when virtual memory is exhausted - added Native Language Support - added feature that proposes removal of slices of an older archive of same basename - libz is now optional - libbz2 is now optional - added openssh's libcrypto dependency - added blowfish strong encryption - changed archive format number (version "04"), difference occures only when encryption is used - moved libdar operations (archive creation, listing, testing ...) as method of the C++ archive class - added thread cancelation routine - added feature : password can be read out of command-line (interactively at execution time). - added programming documentation (thanks to Doxygen) - optimize CRC computation for speed - added warning telling [list of path] is deprecated (better use -g option) - added Todd Vierling's patch for dar to compile under Interix from 2.1.5 to 2.1.6 - fixed compilation problem with gcc-3.x for dar64 - updated libtool to generate the configure script - fixed old info in dar's man page from 2.1.4 to 2.1.5 - added protection code against bad_alloc exception - new configure option to bypass libdl test - removed expected exception list in deci, limitint, real_infinint and storage modules to improve global robustness - remove the #pragma implementation/interface directives which tend today to become obsolete, and seems to be the cause of compilation problem on (recent) Linux kernel 2.6.7 for example. - added protection code to report bug conditions - code simplification for filesystem reading (while performing backup) - fixed bug #29 - fixed code syntax to support gcc-3.4.x from 2.1.3 to 2.1.4 - fixed bug #27 - improved limitint detection overflow - fixed bug #28 from 2.1.2 to 2.1.3 - fixed namespace visibility inconsistency for several call to open() - added "list:" key in conditionnal syntax, to stay coherent with man page - optimized dar_cp algorithm for speed in case I/O error - made dar_cp more talkative about events that succeed while copying data - fixed bug #25 - fixed bug #26 from 2.1.1 to 2.1.2 - fixed bug #24 - added "-w d" option which is equivalent to -w but necessary when dar is not compiled with GNU getopt - updated documentation about GNU getopt() vs non GNU getopt() - update configure script to have libgnugetopt auto-detection from 2.1.0 to 2.1.1 - fixed configure script warning when an include file is "present but cannot be compiled" - fixed bug #21 - fixed bug #22 - dar_xform and dar_slave now send their help usage on stdout (instead of stderr) - fixed typo in error message from 2.0.x to 2.1.0 - fixed bug #17 - API version 2 documentation - API version 2 implementation - -E and -F can now be present several time on command line and/or included files (dar, dar_slave and dar_xform) - context (%c in -E and -F) is now transmitted in the pipes from dar to dar_slave - added -wa option - added -as option - added -e option - updated the API to be able to add new encryption protocol later - root (-R argument) can now be a symbolic link pointing to a directory - fixed bug #17bis - added information returned by the system when error during read() to the message returned to the user - fixed bug #18 - documentation about filter mechanism added - fixed bug #19 - don't fail for a file if permission could not be restored - fixed bug #20 - configure script does not mess with CXXFLAGS or CFLAGS execpt when using debugging options. from 2.0.3 to 2.0.4 - updated autoconf version used to generate configure script (2.57 -> 2.59) The large file support is back with gcc-3 (was only working with gcc-2) from 2.0.2 to 2.0.3 - fixed bug #20 from 2.0.1 to 2.0.2 - fixed bug #18 - fixed bug #17bis - documentation about filter mechanism added - fixed bug #19 from 2.0.0 to 2.0.1 - fixed bug #17 from version 1.3.0 to 2.0.0 - using configure script (built with automake & autoconf) - creation of the libdar library - API for libdar (version 1) - updating TUTORIAL - added chapter in NOTES for ssh / netcat use with dar - added -H option - making documentation for API : DOC_API - speed optimization for dar_manager - enclosed libdar sources in libdar namespace - added libdar dynamic library support (using libtool) - fixed bug in ui_printf. Bug appeared with the shell_interaction split from user_interaction (for libdar) - fixed bug in dar_manager when creating empty database - changed hourshift implementation (no static variable used anymore) - changed code not to have dynamic allocation to take place before main() was called - added compilation time option to replace infinint by 32 bits or 64 bits integers - added special memory allocation (--enable-special-alloc) to better handle many small dynamic objects (in the meaning of OOP). - fix. Dar_manager does no more send all its output to stderr, just interactive messages are sent there. - changed "dar_manager -u" do not display anymore files present in the archive which have not saved data or EA in the asked archive. - removed displaying of command-line used for backup ("dar -v -l ...") as it is no more becoming inaccurate due to include files and as it would consume too much space if it has to be developed. - added sample scripts for using dar with Parchive - now displaying option read from configuration files when using -v option - added %e and %c for user script parameters - using UPX to compress binary if available at compilation time - removed comments put by mistake in 1.3.0 around warning when try to backup the archive itself. This revealed a bug, which made the warning be issued in some wrong cases. - removed this previous warning when creating archive on the stdout - fixed bug #15 - fixed error in libdar sanity checks, where exceptions were not raised (due to the lack of the "throw" keyword) - fixed bug #16 - changed order of argument passed to dar by dar_manager, for the -x be before any other option (in particular -B options). from version 1.2.1 to 1.3.0 - added parenthesis for a warning to be able to show, when opening a scrambled archive - fixed bug #10 - added feature : --flat option - improved slice name detection when given in place of archive basename - added feature : comments in the configuration file given to -B (see man page for more). - added feature : --mincompr option - fixed a display error when listing a hard link (the name of the first hard link seen on an inode was displayed in place of the name of each hard link). This did not concern the tree (-T option) listing. - added standard config files ~/.darrrc and /etc/darrc config files - conditional statements in included files (using make-like targets) - added feature : --noconf option - fixed a bug : warning message issued when th user asks for dar to backup the archive in itself, was not displayed in some cases. - fixed bug #11 - added total files counter in each archive while listing dar_manager database - fixed bug #12 - improved slicename versus basename substitution warning and replacement. - changed internal name generation to avoid using std::sstream class - bzip2 compression implemented (need libbz2 library at compilation time) - added the --nodump feature - fixed bug #13 - configuration file can have DOS or UNIX text formating - now closing files before asking for the last slice, this allow un-mounting filesystem in that case. from version 1.2.0 to version 1.2.1 - minor change to have backward compatibility with old archive (dar < 1.2.0) generated on 64 bits OS (have to use OS_BITS=32 in Makefile on 64 bits OS). - adapted Axel Kohlmeyer's patch for RPMS - adapted Dietrich Rothe's patch for compression level : -z has an optional argument which is compression level to use. - I and -X now available while listing archive contents (-l) - based on Brian May's patch, dar with EA_SUPPORT avoids complaining when reading a filesystem that do not supports EA. - based on Brian May's other patch, dar now uses by default the integers. - dar is now built with dynamic linking, and a special version named dar_static which is statically linked is also available - fixed problem on Windows NT & 2000 (fixed by first change above) from version 1.1.0 to version 1.2.0 - -P option can now accept wild cards - changed dar output format when listing archive contents to have something more similar to the output of tar. -T is provided to get the previous tree listing - fixed bug #6 - user interaction is now possible even if standard input is used (for pipe) - fixed bug #7 - added some missing #include files for compilation under Windows using Cygwin - added feature to display name of user and group (when possible) in place of uid and gid while listing archive contents. - added the possibility to launch command between slices (-E and -F options) for dar, dar_xform and dar_slave. - when saving or comparing a directory tree, DAR goes transparently in subdirectory not modifying the last_access date of each directory. - usage text (displayed by -h option) is now generated from xml file thanks to Chris Martin's little software named dar-help - fixed bug concerning the uninstallation of man pages - changed the place where man pages and documentation go /usr/share/doc usr/share/man in place of /usr/doc and /usr/man for the RPM package (conform to Filesystem Hierarchy Standard) - changed the place where documentation goes for /usr/local/doc to /usr/local/share/doc by default. (Thanks to Jerome Zago) (conform to Filesystem Hierarchy Standard) - added scrambling features (-J and -K options) - added selective compression (-Y and -Z options) - added third state for saved data to keep trace in an extracted catalogue of what is saved in the reference archive (this opens the door to the archive manager) - added the ability to read configuration file (-B option, -B like "batch"). - if a slice name is given in place of a base name, dar proposes to change to the correct base name (strips the extension number and dots). - fixed bug #8 - added dar_manager command-line program - replaced integer types by macro that can be adapted to have correct behavior on 64 bits platform (in particular to read archive from other platforms). from version 1.0.0 to version 1.1.0 - added feature: now ignored directory are not stored at all in the archive unless -D option is used, in which case ignored directory are recorded as empty directory (as it was in 1.0.x) - added support for hard links. Generated archive format version is now 02, but format 01 can still be read, and use as reference. - fixed bug #1 - fixed bug #2 - fixed bug #3 - added feature: restore only more recent file than existing one (-r option) - added feature: support for Extended Attributes (activated at compilation) - added feature: verbose option (-v) with -l (add archive contents) - modified behavior: -l option without -v is no more interactive - added feature: archive integrity test (option -t). CRC have been added in the archive (format 02), thus even without compression Dar is able to detect errors. - added feature: comparison with filesystem (difference) (option -d) - modified behavior: non interactive messages goes to stdout, while those asking user, goes to stderr (all goes to stderr if stdout is used for producing the archive, or for sending orders do dar_slave. - added feature: DAR automatically goes in non interactive mode if no terminal is found on standard input (for example when run from crontab). In that case any question make DAR to abort. - added feature: catalogue extraction to small file: "isolation" (-C option) - added feature: archive produced on stdout when using -c or -C with "-" as filename - added feature: -V option summarizes version of the binary - added feature: additional command "dar_xform" to "re-slice" an archive - added feature: read archive trough a pair of pipes with the help of dar_slave - added feature: long option are now available (see man page) - fixed bug #5 - a lot of speed optimization in algorithm - changed exit codes to positive values in case of error - dar returns an new error code when an operation is partially successful (some filed missed to be saved / restored / tested / compared). - replace the use of vform() method by a customized simple implementation in the ui_printf() routine, this should now allow compilation with gcc-3 - changed long option that used an underscore character (`_') by a dash ('-') - added -O option to have a better behavior when used with non root user - added 'make doc' option in the makefile from version 1.0.2 to version 1.0.3 - bug #5 fixed from version 1.0.1 to version 1.0.2 - bug #2 fixed - bug #3 fixed from version 1.0.0 to version 1.0.1 - correction of few mistakes that lead the compilation to fail with certain C++ compilers - bug #1 fixed. dar-2.6.8/doc/0000755000175000017520000000000013617552352010056 500000000000000dar-2.6.8/doc/dar_key.txt0000644000175000017520000000627713476543220012166 00000000000000-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1.4.10 (GNU/Linux) mQINBFBRzaMBEADLGrs4IyWuwqlgvp+OyzoMMlGk2r+PHjZC74vB8CoCgx1pybX8 U4OH7+9xZJVuOJTBOvi4xFtfDLZQlsXYFUTvsUI7nUuJfmzk5OkN/GdRkIIIagXL orUMDXsI7M89hrPxMQTZLC8im0PRF3VBNibcvIA4XIFM+L37jQpfZsztshrL6QHu GISxQHoFV48ojl+K/hb5qDDq6mzb2H7TUHoQX4RStsNO0l7X++RwZ4C3feAzGGCN rM7Tm8+PgapAzycyKZ4lx/PlzyJKWS/Q+Cu6aLuqHcO7TbHRfibvBBIfyhdAlZQx ALEcFflc8BZpDVsIc6f5DQDj28zgGGY44Mz8ZgfOYrjW9fm2aSPcp/im3nBlqsal HP5FFbJhJpDfZMsnBPdaMMtIzR6R5iJAswYkraHRrkkhGX9OkV85dRkAset5e0V/ mtzQWdDNjoIK42vb0tDMUusjCOQODKLC4l8Xn7VGZLRAm6X+s5qug4fZy/YQEL3m SlRzeoTwM/Ri1SeLLx2WxtylXczJ1qEwHV7lQIZYg2iAvyP1bkO0rpNXEAUTGvCl LM8vXXMDXl4LtN6H9O/tE/jOtY/w3u8HVPnkX/2NflqkeQGM6gqsmOGBuoPvaeWY SPQIoe4vOGlkcnzNxEr24i7VKb4QrMM7pdvX8WPSgoBweU2SHP7HCqDPGQARAQAB tDxEZW5pcyBDb3JiaW4gKGh0dHA6Ly9kYXIubGludXguZnJlZS5mci8pIDxkYXIu bGludXhAZnJlZS5mcj6JAj4EEwECACgFAlBRzaMCGwMFCRH1qD0GCwkIBwMCBhUI AgkKCwQWAgMBAh4BAheAAAoJEAgxsL0D2LGC4ZEP/2+9kmhGSNLXsweDyJqhH4mY F2PsqoVw2fFbUyOCDtF69mGT5fy47OJ1U9e1TRpiMg6ojlrb1n0WaHtFT4byLQPa dbKLO22XiEv4EuWS5rPvZQNXW4hkcgA75ECnpJSChWRoHKHdx6HylP1X/3+lbVJ7 AFVXG39CUiaqriRcYgGJ28TE7zfxqaGZAsY0EPOBpj+7qXqqAS3tBDiFL7CJSnhN R6YF3nBvjqgnbG+/hR10UXgvfUZHjMelnsSlRE59J4jHYnSnpDFqyeHZvULDNCJx lBb9yW0351fY6MGtLGdCw58qGzqUiD/t6aXZdL0U90R1qUTrY1CteeBgmpwtFXY7 j3jOTXeySGj+/X6jlkJI+oauKJ1t4KHyldKznepqhblgdL3KegfAWgjd2MdCLiJe x5KCV3n9Q/GrsHMK0zRJH7NtI8UrPdcNLdif3HazeVcfQoeZON/yQ3FM5TGmOB88 xLgNFIbspT0peq3U/1hvgV/bR+hKYhdB/dxKo6A/Hlb4H9247prsd5/K7sjmz/T8 DFE6kAE+OB87eZIL7uwvIFhJuh6jGGJTLkQosoFHIkJ7b51rKlGXsfxl4KmXJKP+ 3qdzRBp1FzchQYbWMQA104GAfD6zjqoJL2hEAvNtuVb5pHuOj7FNn6oLzmroiC83 ItprEJz0gwUGYh+7TbcgiEoEEBECAAoFAlBRz2kDBQJ4AAoJEKQuQiPIGBpS5r0A niu5gBzPPyZ4hLsA2o+23bWmimcaAJ4oTlg737i9yM2ea888L3DCy8l6SLkCDQRQ Uc2jARAAwGv3DMUijz4AgoSFoi5mfLxa/Ilg6OogFdl8UzjkJ9fQ9aBFdwCqhrg+ m75DAGcsn7S+e62nX0W9lPY3sy2zNq98Hh1wMuHI1cKCw3ricdWxbxvpnMui2gPI 3vMP33kU55bokSvBIOZc0wBbZg4BFrEcz7JpIsK46lflxPYvtnjFIiA646mtW/xH 8JKNHpVefHfdYBgCvHUW0lL/wchdm5snfXCe7FWZRqTljSVo+RfykKygp1zOiBPl OT8ePuIb0wzF7f3K0OFH1K2wOdipykuUgmzvwXVXP4FaIxEXwtlKSQEu9ScjxcVo UCvYVNcjD3RBcmUeh2vWnJ0saSSGKnwjTKoEEQnKW53soxGTr5KueFv7Op1Rmyp9 Oli+sDpaJIKnk149FBlMVRvBe7OyFB5e8imvJNUbq4MDZ6N187FKsQK887Xv7oCY jCvpcJ376v0YVAtAedw5kMlQhwVsCRWtq87mBCvEbVga66n3wZ6SPVXJGfBSqBgk mp3Oid4U4noO7yvo3E2yU6ejLb8Vq4gff+2Aaivu2IwgnKn/7OiPLoUhUN9iyFgy TKjqgSU0W5qMxa29P7qVHZu7nrZ87QMRY83bcG/D0IJPBHGVUoVovoYozRBz2wVp xcoGXSUcV6mxxj274dkIFZ31hhQBgQOs+KN+JxHEZ8Q1KbCNewMAEQEAAYkCJQQY AQIADwUCUFHNowIbDAUJEfWoPQAKCRAIMbC9A9ixgqZCD/0bfAbIc3Fyq0q9e4Ch i7ayssNUzq70nvpHaIf5JTL9BTVeeb4mR4ALfR9ncwuorgMbQjkD34aiTULa+xCc sEU4giJC/voj0Dae2Zw3kXMR1jexQFWho4GbzYtmJ9QiwLQDMidACDsY75V0a1zc g+MXnwlcRuZj04v0MTdV+kFGF5qJ+H6JOaoILSAhOlury9XDyHP8D2fV+nMxplI9 nU4g49La62jP1ZJOMwitPBQ333FaSrdHa3DTrJh3UX8+A5aiOYRJM7C3FfJZERaS 0iSYf0Vcq0XTr3ORQ0qykJfwipA4ukA8zT+b7CkWDP0IsxMlSUZaDm4nCE498pt8 QhT/v/mU4WMD5PkB+8olJr9nEJYimakoJguxFiH2QPuQbEGHE8hfbHiLVfTVY9ak jTm2BHOJrFFJINCtcX2j8Dzf6SFuo+iPmwRraIQ9d83AruBCSesv8Po2Oe2//zV9 fW+WpbgJVxxgr58LvtrRUOK+HNbNgmPkgKSmq7weSXkDxr5jtVACLhKtN5WWZhnY p/qwGK4J60P/RJQxBrTI660becfGw+ngNG8uAeJ/R87+EDr5BD/nHqCHqcsl1WZE W+i2hbcYSp8BMl3x8Gsup4vWqcORZAhgssdHQW7aJ6msuGVsj5u3VZ9ZSO8/gXLn nQ/zwsG46MiZyLiMbligccsRhw== =49T8 -----END PGP PUBLIC KEY BLOCK----- dar-2.6.8/doc/mini-howto/0000755000175000017520000000000013617552352012150 500000000000000dar-2.6.8/doc/mini-howto/Makefile.am0000644000175000017520000000055613476543220014127 00000000000000dist_pkgdata_DATA = dar-differential-backup-mini-howto.en.html dar-differential-backup-mini-howto.it.html dar-differential-backup-mini-howto.es.html index.html install-data-hook: $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/mini-howto $(INSTALL) -m 0644 $(dist_pkgdata_DATA) $(DESTDIR)$(pkgdatadir)/mini-howto uninstall-hook: rm -rf $(DESTDIR)$(pkgdatadir)/mini-howto dar-2.6.8/doc/mini-howto/dar-differential-backup-mini-howto.en.html0000644000175000017520000012605413476543220022117 00000000000000 DAR differential backup mini-howto -EN-

DAR differential backup mini-howto -EN-

Author: Grzegorz Adam Hankiewicz
Contact: dar@gradha.imap.cc
Date: 2012-12-19
Web site:http://gradha.github.com/dar-differential-backup-mini-howto/
Copyright: This document has been placed in the public domain.
Translations:From the web site you can get this document in English, Italian and Spanish.

Introduction

We all should make backups of our important data. This omnipresent advice is usually ignored by most people. I ignored it too, until I lost a good deal of important data. Not happy enough, I managed to continue loosing data in a few posterior incidents, until I decided that it was enough. Then I browsed Freshmeat for backup solutions allowing differential backup and found DAR.

A complete backup means that all the files falling under your backup policy will be saved. A differential or incremental backup will contain only the files whose contents have changed since the previous backup, either full or differential.

DAR allows you to create easily a set of differential backups. The solution I've developed helps me have an automatic backup solution which runs every night. The first day of the month, a full backup is made. The rest of the month, only differential backups are made. In my situation, very few files change from day to day, sometimes the source code of the project I'm hacking on, and always my mailboxes.

The result is that I can restore the contents of my computer to a specific day with ease, if I ever need to. DAR is a command line program, and it can get slightly complex with a few options. This little mini-howto will explain my custom solution, which is very crude, but works fine for me. Yes, I've actually tested restoring the data from the backup. In fact, during the end of the year 2003 I moved to another country and I took just one CD ROM with me plus a bootable Knoppix, and I recovered the exact state of my Debian installation in a few hours. No customizing, no long installations, no missing files.

This document was written using version 1.3.0 of DAR. When I updated to DAR 2.0.3, everything kept working, I didn't even have to update my backup archives. So it looks like the interface and backup format are pretty stable, or at least backwards compatible. However, don't take everything said here for granted. Verify that the version of DAR you have installed works as expected and you can restore from the generated backup before you have to rely on it.

This version of the text uses reStructuredText (that's what the weird markup in the text version is for). See http://docutils.sourceforge.net/ for more information.

Simple DAR usage

DAR is very similar to tar in the number of options it has: there's plenty for every need, but way too much for beginners to handle. As usual, you can always get help from the program typing dar -h or man dar after you have installed it. Like tar, there's a set of mandatory switches which define the type of operation you are doing (create, extract, list, etc), and a set of switches which affect the selected option. Just for the sake of it, imagine that you want to backup one folder of your home directory. You would write something like this:

dar -c backup_file_without_extension -g file1 -g file2 ... -g fileN

The output should be similar to the following:

$ dar -c my_backup_file -g safecopy.py/ -g translate_chars.py/


 --------------------------------------------
 15 inode(s) saved
 with 0 hard link(s) recorded
 0 inode(s) not saved (no file change)
 0 inode(s) failed to save (filesystem error)
 4 files(s) ignored (excluded by filters)
 0 files(s) recorded as deleted from reference backup
 --------------------------------------------
 Total number of file considered: 19
$ ls
mailbox_date_trimmer/  my_backup_file.1.dar  sdb.py/
mailbox_reader/        safecopy.py/          translate_chars.py/

As you will notice, DAR will add a number and extension to your name. The purpose of the extension is clear, it helps to know visually that the file is a DAR backup. The number is called a slice, and this is related to DAR's built-in feature of splitting a backup over several media. If for example you wanted to make a backup to CD ROM, but your directories are bigger than the capacity of one CD ROM, you can tell DAR to split the archive across as many files as needed, which you can later burn to several units.

Would you like to recover that backup? Pretty easy, type the following:

$ mkdir temp
$ cd temp
$ dar -x ../my_backup_file
file ownership will not be restored as dar is not run as root.
to avoid this message use -O option [return = OK | esc = cancel]
Continuing...


 --------------------------------------------
 15 file(s) restored
 0 file(s) not restored (not saved in archive)
 0 file(s) ignored (excluded by filters)
 0 file(s) less recent than the one on filesystem
 0 file(s) failed to restore (filesystem error)
 0 file(s) deleted
 --------------------------------------------
 Total number of file considered: 15
$ ls
safecopy.py/  translate_chars.py/

The backup strategy

The first step to create a good backup is to determine what parts of your system need one. This doesn't necessarily mean that you can't create a full backup, but most likely splitting it in at least two parts is going to help DAR (or any backup tool) a lot.

My home system consists of two hard disks. The first hard disk is split into a 3.8 GB partition where my complete system lives, and another partition of 11 GB where all my music and other temporary files are stored, like a local Debian package repository I make for myself. The second hard disk has a 9.4 GB partition and its only purpose is to serve as backup of the primary disk. I have no interest in backing up my music, because I have all the original CDs lying around and have scripts to re-ogg them.

From the 3.8 GB I want to backup, usually between 1.3 and 1.5 GB are always empty. I will split logically the used 2.3 GB into system and home directories (at the moment of writing this my home is 588 MB). The reason for this split is that as a normal user, I can only change my home directory and other files from the partitions I won't be backing up. Meanwhile the system part of the partition remains pretty stable and unmodified because I rarely (un)install software. In fact, from my home directory the only things changing usually will be my Mail folder and projects, where I put documents like this one and other software I write/hack.

The basic distinction between home directories and system can be useful in organizations too. If you work for a university, usually all machines will have the same system configuration but depending on the machine their homes will have different data. You can make a system backup of a single machine, and home backups of each computer. Another common configuration is having a centralized server which exports home directories with NFS. Here you only have to backup the server. If you have users with high privileges, leave them the task of doing the system backup of their own machines, the exported home is something they can ignore because it will be done at the server machine.

Once you've decided what to backup, you want to decide how to configure DAR for the backups. You can use switches or configuration files. Switches are OK when you don't have many options. Configuration files are better when you want to make different complex inclusion/exclusion rules of what files you want to backup, and more importantly, you can use comments to document the switch, stating for example the reason why you included this or that directory. This can be useful if you come back several months later and you wonder why all those options are there.

For my setup, I'll be running the DAR commands inside shell scripts called periodically by cron (Setting up some scripts to automate the process), so I don't mind having long command lines, and this very same document serves for the purpose of documenting the scripts. If you prefer configuration files, read DAR's documentation to find out how to use them and the format they use.

Making a full backup with DAR

Here is the full command line I'll be using for my system backup, running as root. Don't worry about the high number of switches, I'll go on describing the purpose of each of them:

dar -m 256 -y -s 600M -D -R / -c `date -I`_data -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom
  • -m 256

    DAR can compress your backup. The compression is applied to individual files, and it can be bad for small files. By default files with 100 bytes or less won't be compressed. With the -m switch I increase this to 256, which seems to work better for all those little configuration files lying under /etc/ and /home. As you see this is a totally optional switch, basically for tuning freaks like me.

  • -y [level]

    This option activates Bzip2 archive compression, which by default is turned off. You can even specify a numeric compression level, which goes from 0 (no compression) to 9 (best compression, slow processing). Bzip2 by default uses 6, which is the best speed/compression ratio for most files. I don't specify compression level, 6 is fine for me.

  • -s 600M

    Here comes DAR's slice feature. The specified size of 600 Megabytes is the maximum file size DAR will create. If your backup is bigger, you will end up with different backup files each with a slice number before the file extension, so you can save each file to a different unit of your backup media (floppies, zip, CDROM, etc). My backups are much smaller than this size, and I keep this switch just to be safe if I happen to create a big file in my home directory and forget to delete it. If this switch is useful for you, check DAR's manual for the -S switch too.

  • -D

    Stores directories excluded by the -P option or absent from the command line path list as empty directories. This is helpful when you are recovering a backup from scratch, so you don't have to create manually all the excluded directories.

  • -R /

    Specifies the root directory for saving or restoring files. By default this points to the current working directory. We are doing a system backup here, so it will be the root directory.

  • -c `date -I`_data

    This is the mandatory switch I talked of before, and it means to create a backup archive. For those who don't understand what follows, `date -I` is the shell's back tick expansion. In short, date -I will provide a date as YYYY-MM-DD format. With back ticks and used as a parameter, the output of the command will be used as a string of the parent command. This way you can create backup archives with the creation date embedded in the name. If you still don't understand what I'm talking about, try to run the following from the command line:

    echo "Today's date is `date -I`"
    
  • -Z file_pattern

    Using normal file name globing you can specify patterns of files you want to store in your archive without compression. This only has sense if you use the -y switch. Compressing compressed files only yields bigger files and wasted CPU time.

  • -P relative_path

    With this switch you tell DAR which paths you don't want to store in your backup archive. Here you want to put the home directory (I'm the only user on this machine, there are a few more, but they are for testing/system purpose), system directories which aren't really physical files like proc, other drives you may have mounted under mnt (most notably the drive you are putting the backup file), etc, etc. Note that the paths you specify must be relative to the path specified by the -R switch.

That wasn't so hard. Check DAR's manual page for more useful switches you might want to use. And here's the command line I'll be running as a plain user inside my home directory:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_data \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer

Nothing new under the sun. As you see, most of the command line is identical to the other one, I only change the name of the directories I want to exclude with -P and the root directory with the -R switch.

Making differential backups with DAR

Once you have a full backup you can create a differential backup. The first differential backup has to be done using the full backup as reference. The following differential backups use the latest differential backup as reference. Here's the command line for a system differential backup:

dar -m 256 -y -s 600M -D -R / -c `date -I`_diff -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -A previous_backup
  • -c `date -I`_diff

    I only change the name of the file, cosmetic purpose.

  • -A previous_backup

    This new switch is used to tell DAR where is to be found the previous backup so it can create a differential backup instead of a full backup. The only thing you have to take care of is that you don't specify slice neither extension in the file name, otherwise DAR will make you an interactive question at the command line.

The user command line is exactly the same. Here it is for completeness:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_diff \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer -A previous_backup

DAR has another nice feature we don't use here: catalogues. When you create a backup archive with DAR, internally it contains the data plus a catalogue. This catalogue contains information about what files were saved, their dates, their compressed size, etc. You can extract the catalogue and store it separately. Why would you want to do this? To set up networked differential backups.

In order to create a differential backup, you need to provide the previous backup so DAR can decide which files have changed or not. Doing this can be expensive in bandwidth if you work with a network. Instead, after you create a backup, you can extract the catalogue and send it to the machine doing the backups. Next time, you can use this file with the -A switch, and it will all work as if the complete file was there.

This can be also useful if you use slices, because the catalogue is created from the first and last slice. It's more comfortable to pass a single file to the backup command rather than having to carry the disks of your previous backup with you.

Setting up some scripts to automate the process

As said before, now it's the time to put our backup solution under cron. Place the following executable script for system backup under /root/dar_backup.sh:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner \
   -P cdrom -P var/backups > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Some things to notice:

  • DIR is the variable which holds the destination directory.
  • FILE will hold the path to today's backup file.
  • I use full paths for the commands because my root account doesn't have all of them included in the default environment. This is potentially a security risk. Ideally you would like to compile DAR as root and keep your binaries where you make them so nobody can touch them. And run Tripwire over them too.
  • DAR generates statistics after each run. We don't want them in our cron because it will generate unnecessary mail. Only stdout is redirected to /dev/null. Errors will be reported and a mail generated if something goes wrong.
  • The last two find commands are optional. I use them to change file ownership to a normal user, which will later create the backup. Again, another security risk. root should backup that from root, and users should backup their stuff. But with a mono user system, I don't care. If some intruder is good enough to go through my firewall and account passwords to take a look at my backups, I'm already screwed.

Now place the following nearly identical script for differential backups under /root/dar_diff.sh:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp -P mnt \
   -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -P var/backups -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

The only two changes are the addition of the -A switch and the generation of the PREV variable with a complicated command line. Let's see what this command line does:

  • First the ls command creates a list of the files with .dar extension in the backup directory. This output is piped to the next command.
  • By default ls displays files alphabetically. tail is used to get the last file with the -n 1 switch, which says to display only the last line.
  • DAR wants to operate on filenames without slice number and extension. This means that if we don't get rid of the tail, DAR will stop the operation and ask an interactive question to the user, defeating the purpose of automation. We separate the complete filename with a Bash feature called parameter expansion. There are several possible expansions, you can type man bash to see all of them. The one using %% will remove the longest tailing pattern that matches whatever goes after the %%. The result is the base name we want to pass DAR.

We only have to put these two scripts under cron control. This is what we have to type after crontab -e:

15 0 2-31 * * ./dar_diff.sh
15 0 1    * * ./dar_backup.sh

Look up in man -S 5 crontab the syntax of the command. In short, those two lines tell cron to run the scripts 15 minutes past midnight. dar_backup.sh will be run only the first day of the month. The other script will be run all the other days.

Here are the backup scripts for your users. They are the same, changing only switches to the DAR command and paths:

#!/bin/bash
# dar_backup.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

#!/bin/bash
# dar_diff.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.zip" \
   -P instalacion_manual -P Mail/mail_pa_leer \
   -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

Don't forget to add the required crontab entries for your user pointing to the appropriate path.

Recovering your backup to a clean machine

When the time comes to restore your backup, depending on what you saved you will have a full backup of one month plus differential backups up to the last time you managed to make. The restoration process is very simple, it's the same as described on the first chapter (Simple DAR usage), only you have to do it first for the full backup, and then for the differential ones. This can be boring, so here's another shell script you can save with your backup files:

#!/bin/bash

if [ -n "$3" ]; then
   CMD="$1"
   INPUT="$2_data"
   FS_ROOT="$3"
   $CMD -x "$INPUT" -w -R "$FS_ROOT"
   for file in ${INPUT:0:8}*_diff*; do
      $CMD -x "${file:0:15}" -w -R "$FS_ROOT"
   done
   echo "All done."
else
   echo "Not enough parameters.

Usage: script dar_location base_full_backup directory

Where dar_location is a path to a working dar binary, base_full_backup
is a date in the format 'YYYY-MM-DD', and directory is the place where
you want to put the restored data, usually '/' when run as root."
fi

The script is pretty self explicative. The only things you would care is the -w switch, which tells DAR to overwrite found files. This is necessary for differential backups. Oh, and place the script in the same directory where you put your backup files. Here's an usage example:

./recover.sh /usr/local/bin/dar 2003-10-01 /tmp/temp_path/

Try to run that as a normal user with a few of your backup files. You can put the result in a temporary directory, so the nice thing is you don't have to wipe your hard disk to test it.

Adding checks to the backup scripts

Denis Corbin suggests that the scripts creating the backups could verify the exit status of the DAR command. For the purpose of these very simple scripts this is not critical because DAR itself will bail out with an error message, and cron will report any output through mail (something which doesn't happen if everything goes right).

However, testing the exit status can be useful if you are testing the scripts interactively and want to know which commands are executed:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
if /usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
      -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
      -P mnt -P dev/pts -P proc -P floppy -P burner \
      -P cdrom -P var/backups > /dev/null ; then
   if /usr/local/bin/dar -t $FILE > /dev/null ; then
      echo "Archive created and successfully tested."
   else
      echo "Archive created but test FAILED."
   fi
else
   echo "Archive creating FAILED."
fi
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

You can test this version easily running the script and killing the DAR process from another terminal or console with killall dar. That will force the termination of the DAR process and you will see that one of the failure branches is reached in the backup script.

Another possible use of testing the status code could be to remove incomplete archives from the hard disk if something went wrong, trigger additional external commands when something fails, or avoid testing the created archive when you know that the first command already failed. The latter can be done easily concatenating both the creation and testing commands with && in a single line. That will tell the shell to run both commands as a sequence and avoid running the second if the first failed.

However, if a power failure happens in the middle of a backup, this version of the script would still leave dangling invalid archives. To prevent this you could enhance the script to do a positive verification. This means creating the backup in a temporary directory along with a *.valid file if the successful branch of the script is reached.

With this strategy, another cron script monitoring the directory where the temporary backups are placed would move to the final backup directory those archives which have a *.valid file, deleting all other whose last modification timestamp is older than one hour.

Ideas for the future

I'm not going to implement these soon, because I'm very lazy, but if you are one of those hyperactive hackers, here are some things which would be nice:

  • Unify both the main and differential scripts into a single one, so if the script is run and there is no main backup for the current month, the main backup will be created. Useful if your machine happens to be down during the time the monthly backup is done.

  • Upgrade the scripts to generate daily a CDROM image with cdrecord and burn it automatically to a rewritable disc placed in your machine. So if your whole hard disk is trashed, you still have the last backup on removable media. Of course, this is limited and cannot be automated if your backup spans more than one CDROM. Do the same for ZIP/JAZZ/whatever you have.

  • Integration of generated backups with a mini Knoppix bootable distribution. Or any other floppy distribution which can be booted from CDROM. So you have a recovery CDROM with tools to format your hard disk, and near it you have a fresh backup to restore a working machine.

  • Synchronisation of backup directories through Internet with remote hosts. Even if the whole machine is burnt physically along with your house, you have up to date backups somewhere else. Could be done easily with programs like rsync through ssh running in a cron job.

  • Factor common parameters into a separate file and include it from your scripts using DAR's -B switch. For instance:

    $ cat > /var/backups/system/common.dcf
    -m 256 -y -s 600M -D -R / -Z "*.gz" -Z "*.bz2" -Z "*.zip" \
    -Z "*.png" -P home/gradha -P tmp -P mnt -P dev/pts \
    -P proc -P floppy -P burner -P cdrom -P var/backups
    

    Later on you could use this in the script:

    DIR=/var/backups/system
    FILE=${DIR}/`/bin/date -I`_data
    # Commands
    /usr/local/bin/dar -B ${DIR}/common.dcf -c $FILE > /dev/null
    /usr/local/bin/dar -t $FILE > /dev/null
    /usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
    

    Which you can reuse in the differential version too!

In fact, clever people out there have already started making such scripts for themselves and are not afraid to share them. To avoid cluttering this mini-howto I'm going to store them as-is at my web page: https://github.com/gradha/dar-differential-backup-mini-howto/tree/master/contrib.

Feel free to send me your own improvement and I'll add it to the directory. Whether you are sending a single script file or .tar.gz with a whole backup suite, please add a simple .txt file which I'll put near the file, so people can read what the file does before downloading. Please use English in your description, and don't forget to put your name and email so people can send you bugfixes or improvements!

The end

And that's the whole magic. If you have problems, something is unclear or wrong (which is worse), drop me an email. If you find this document useful and want to translate it, send me a translation of the file source.en.txt so I can distribute it along this version and users can find easily their localized version. Talking about locations, you should be able to get the source of this document from my personal home page (link at the beginning of the document).
Enjoy!
dar-2.6.8/doc/mini-howto/README0000644000175000017520000000046013476543220012745 00000000000000 Mini Howto What you will find here is a mini howto compiled by Grzegorz Adam Hankiewicz. David Gervasoni has made a italian translation. An up to date version of these howto can be find on the web site as well as pdf formated versions. http://gradha.github.com/dar-differential-backup-mini-howto/ dar-2.6.8/doc/mini-howto/dar-differential-backup-mini-howto.it.html0000644000175000017520000013434213476543220022130 00000000000000 DAR differential backup mini-howto -IT-

DAR differential backup mini-howto -IT-

Author: Grzegorz Adam Hankiewicz
Contact: dar@gradha.imap.cc
Translator:David Gervasoni
Contact: davidgerva@gmail.com
Date: 2012-12-19
Web site:http://gradha.github.com/dar-differential-backup-mini-howto/
Copyright: This document has been placed in the public domain.
Translations:From the web site you can get this document in English, Italian and Spanish.

Introduzione

"Chiunque dovrebbe fare le copie di backup dei suoi dati importanti". Questo avviso presente ovunque è generalmente ignorato da molta gente. Anche io l'ho ignorato, fino al giorno in cui ho perso una considerevole mole di dati. Non abbastanza contento ho fatto in modo di perderne ancora in una serie di successivi incidenti, per poi decidere che ne avevo abbastanza. Ho cercato quindi su Freshmeat qualche programma per la creazione di backup che supportasse anche la creazione di backup differenziali e ho trovato DAR.

Fare un backup completo (o base) significa salvare tutti i files che ricadono sotto le cartelle interessate dalla politica di backup. Un backup differenziale o incrementale conterrà invece solo i files il cui contenuto è cambiato rispetto al precedente backup, fosse esso completo o differenziale.

DAR permette di creare facilmente una serie di backup differenziali. Una soluzione che ho sviluppato esegue ogni notte dei backup automatici. Il primo giorno del mese viene fatto un backup completo. Il resto del mese vengono fatti solo backup differenziali. Per quanto mi riguarda i files che cambiano giornalmente non sono molti: il codice sorgente del progetto a cui sto lavorando e, più spesso, le e-mail.

Così, quando mi serve, posso recuperare con facilità il contenuto che presentava il mio computer uno specifico giorno. DAR si presenta come un programma semplice ed essenziale eseguibile da linea di comando, ma si può rendere un po' più complicato con poche opzioni. Questo piccolo mini-howto vi illustrerà la mia specifica configurazione, molto grossolana, ma, nel mio caso, funzionale. Ho già sperimentato il recupero dei dati dalle copie di backup. Infatti verso la fine del 2003 mi sono trasferito in un altro paese e ho portato con me giusto un CD ROM e una Knoppix bootable e ho recuperato l'esatto stato della mia vecchia installazione Debian in poche ore. Senza modifiche, senza alcuna ulteriore installazione e senza perdere alcun file.

Questo documento è stato scritto usando la versione 1.3.0 di DAR. Quando sono passato alla 2.0.3 tutto funzionava. Non ho nemmeno dovuto aggiornare i miei backup. Quindi sembra che l'interfaccia e i formati di backup siano stabili o al limite compatibili con le versioni precedenti. Comunque non prendete tutto ciò che dico (scrivo) quì come garantito. Verificate prima che la versione di DAR che avete installato funzioni come dovrebbe e potrete, in futuro, recuperare i files dai backup senza problemi.

Per questa versione del testo ho usato reStructuredText (ecco spiegato il misterioso markup nella versione txt). Vedi http://docutils.sourceforge.net/ per maggiori informazioni.

Utilizzo essenziale di DAR

DAR è molto simile a tar nel numero di opzioni che ha: ce n'è una per ogni necessità, ma questo comporta una maggiore difficoltà iniziale per il nuovo utente. Come sempre, in qualsiasi momento, potete avere degli aiuti relativamente ai comandi disponibili scrivendo dar -h o man dar dopo che l'avete installato. Come nel programma tar, esistono una serie di opzioni obbligatorie che definiscono il tipo di operazione che intendete fare (creare, estrarre, listare etc) e un'ulteriore serie di opzioni che modificano la scelta prima effettuata. Giusto per esempio immaginate di voler fare un backup di una cartella della vostra directory /home. Dovrete scrivere qualcosa di simile a questo:

dar -c backup_file_without_extension -g file1 -g file2 ... -g fileN

L'output dovrebbe essere simile al seguente:

$ dar -c my_backup_file -g safecopy.py/ -g translate_chars.py/


 --------------------------------------------
 15 inode(s) saved
 with 0 hard link(s) recorded
 0 inode(s) not saved (no file change)
 0 inode(s) failed to save (fileystem error)
 4 file(s) ignored (excluded by filters)
 0 file(s) recorded as deleted from reference backup
 --------------------------------------------
 Total number of file considered: 19
$ ls
mailbox_date_trimmer/  my_backup_file.1.dar  sdb.py/
mailbox_reader/        safecopy.py/          translate_chars.py/

Come avrete notato DAR aggiunge al nome del file un numero e un'estensione. Il motivo dell'estensione è chiaro, aiutare a capire che il file è un backup fatto con DAR. Il numero è chiamato slice ed è connesso alla possibilità di DAR di dividere il file di backup in base a grandezze specificate, in modo da poterle memorizzare su diversi supporti. Se per esempio voleste avere i backup su CD ROM, ma i backup delle vostre directory sono più grandi della capacità del CD ROM, potete chiedere a DAR di dividere l'archivio in tanti files che potrete poi memorizzare su diverse unità.

Volete recuperare questo backup? Scrivete semplicemente i seguenti comandi:

$ mkdir temp
$ cd temp
$ dar -x ../my_backup_file
file ownership will not be restored as dar is not run as root.
to avoid this message use -O option [return = OK | esc = cancel]
Continuing...


 --------------------------------------------
 15 file(s) restored
 0 file(s) not restored (not saved in archive)
 0 file(s) ignored (excluded by filters)
 0 file(s) less recent than the one on fileystem
 0 file(s) failed to restore (fileystem error)
 0 file(s) deleted
 --------------------------------------------
 Total number of file considered: 15
$ ls
safecopy.py/  translate_chars.py/

La politica di backup

Il primo passo per creare backup funzionali è determinare quali parti del vostro sistema necessitano di essere archiviate. Questo non sta a significare che non potete semplicemente fare un backup del vostro intero sistema, ma dividerlo in almeno due parti aiuterà molto DAR (o qualsiasi altro tool di backup) nel suo lavoro.

Il sistema inplementato in casa mia conta di due hard disk. Il primo hard disk è diviso in una partizione da 3.8 GB, dove risiede il mio intero sistema, e un'altra partizione da 11 GB dove sono memorizzati tutta la mia musica e altri file temporanei, ad esempio alcuni pacchetti Debian fatti da me. Il secondo hard disk ha una partizione da 9.4 GB e il suo unico scopo è di servire come backup del disco primario. Non mi interessa fare il backup dei file musicali perchè ho tutti i cd originali e uno script per estrarre di nuovo le tracce e riconvertirle in ogg.

Della partizione da 3.8 GB di cui voglio fare il backup generalmente sono liberi all'incirca 1.3 - 1.5 Gb. Ho diviso "logicamente" i 2.3 GB occupati in system e home directories (mentre scrivo, la mia home è di 588 MB). La ragione di questa divisione è che, come un normale utente, posso esclusivamente modificare il contenuto della mia home directory e alcuni file della partizione di cui non ho intenzione di fare il backup. Contemporaneamente il settore della partizione in cui risiede il sistema rimane abbastanza stabile e immutato perchè raramente (dis)installo software. Infatti anche nella mia home directory le sole cose che cambiano sono abitualmente la mia cartella Mail e progetti, dove metto documenti come questo e altri software che scrivo/modifico.

La distinzione di base fra home directories e system può essere anche utile nella normale organizzazione. Se lavori per una università spesso tutte le macchine hanno la stessa configurazione di base, ma ogni macchina avrà i suoi dati memorizzati. Puoi fare un singolo system backup di una singola macchina e più home backup per ogni computer. Un'altra configurazione comune è l'esistenza di un server centrale che condivide le home directories in NFS. In questo modo dovete solo fare il backup del server. Se vi sono utenti con privilegi di alto livello permettete loro di fare il backup del sistema delle loro proprie macchine, il backup delle home lo possono ignorare visto che se ne occuperà il server.

come configurare DAR. Potete usare le opzioni o i file di configurazione. Le opzioni sono utili quando non ne avete troppe da specificare. I file di configurazione sono invece meglio quando volete fare backup differenti, complessi, con inclusioni/esclusioni; inoltre potete usare commenti per documentare le opzioni specificate spiegando per esempio perchè includete/escludete questa o quella directory. Può essere utile ciò se tornate ad utilizzare il computer dopo molto tempo e volete sapere il perchè di ogni opzione.

La mia configurazione fa partire il programma DAR con una script shell richiamato periodicamente da cron (Qualche script per automatizzare i processi), così non devo digitare ogni volta lunghe stringhe di comando. Questo breve documento vuole anche introdurre brevemente alla creazione di tali scripts. Se preferite utilizzare i file di configurazione leggete la documentazione allegata a DAR per sapere come e quale sintassi utilizzare.

Eseguire backup di base (full backup) con DAR

Ecco qua sotto l'intera linea di comando che, da root, devo utilizzare per il backup del mio sistema. Non dovete preoccuparvi vedendo il gran numero di opzioni inserite, successivamente descriverò il motivo di ognuna di esse:

dar -m 256 -y -s 600M -D -R / -c `date -I`_data -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom
  • -m 256

    DAR può comprimere i backup. La compressione è applicata a ogni file e può essere anche inutile per file di ridotte dimensioni. Di default, file di 100 bytes o meno non vengono compressi. Con l'opzione -m si porta questo limite a 256, cosa che sembra funzionare meglio per tutti quei piccoli file di configurazione che stanno sotto /etc/ e /home. Come potete notare questa è un'opzione assolutamente facoltativa, quasi un "capriccio".

  • -y [level]

    Questa opzione attiva la compressione Bzip2 che di default non è attiva. Potete anche specificare un livello di compressione tramite un numero che può andare da 0 (nessuna compressione, processo veloce) a 9 (miglior compressione, processo lento). Bzip2 di default usa il livello 6 che è il rapporto migliore velocità/compressione per la maggior parte dei file. Personalmente non specifico il livello di compressione, 6 mi va più che bene.

  • -s 600M

    Ecco quà l'opzione di DAR che vi permette di definire la dimensione dei file di backup o, meglio, delle slice. La grandezza specificata, in questo caso di 600 MB, sarà il massimo spazio occupato dai file creati. Se il vostro backup è più grande, ritroverete differenti file di backup con un numero di progressione inserito appena prima dell'estensione, cosìcchè potrete salvare ogni file su differenti supporti (floppies, zip, CDROM, etc). I miei backup sono molto più piccoli di questa dimensione e mantengo questa opzione giusto per tranquillità, nel caso i file diventassero più grandi. Se pensate che questa opzione possa esservi utile potete leggere il manuale di dar per saperne di più.

  • -D

    Memorizza il nome e il percorso delle directory escluse dall'opzione -P o che non ci sono fra quelle specificate alla linea di comando. Questa è un'opzione utile quando state recuperando un backup dal nulla; in questo modo non dovete creare manualmente tutte le directory escluse.

  • -R /

    Specifica la directory di root (directory radice) in cui salvare o dalla quale 'leggere' i file interessati dal backup. Di default questa è la directory in cui si sta lavorando (./). Se stiamo facendo un backup di sistema dalla cartella x, ecco che questa sarà la directory di root.

  • -c `date -I`_data

    Questa è l'opzione obbligatoria di cui vi ho parlato prima e definisce la creazione del backup. Per chi non capisce ciò che segue `date -I` è un trucchetto della shell. Brevemente, date -I restituisce una data con formato YYYY-MM-DD. L'output del comando fra gli apici singoli sarà usato come input dell'opzione -c. In questo modo potete creare backup con la data di creazione direttamente nel nome del file. Se ancora non capite di cosa sto parlando, provate la seguente istruzione dalla linea di comando:

    echo "La data di oggi è `date -I`"
    
  • -Z file_pattern

    Usando come argomento normali estensioni di file potete decidere quali file volete memorizzare nel vostro backup senza che siano compressi. Questo ha senso solo se usate anche l'opzione -y. Comprimendo file compressi otterrete al massimo file più grandi, nonchè spreco di risorse e occupazione della CPU.

  • -P relative_path

    Con questa opzione dite a DAR quali directory non volete memorizzare nel vostro backup. Quì potreste mettere ad esempio la /home (Sono l'unico utilizzatore di questa macchina, ce ne sono pochi altri, ma solo per testare alcune funzioni), directory di sistema che non sono realmente dei file, come proc, altri file che potreste aver montati sotto mnt (come, ovviamente, il drive in cui metterete i file di backup) etc, etc. Notate che i percorsi che inserite devono essere relativi a quello specificato con l'opzione -R.

Tutto ciò non è poi così difficile. Controllate le pagine di manuale di DAR per maggiori informazioni sulle opzioni che vi interessa usare. Ed ecco quì il comando che uso all'interno della mia home:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_data \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer

Nulla di nuovo sotto il sole. Come potete vedere molti dei comandi sono identici a quelli 'di cui sopra', ho solo cambiato il nome delle directories che voglio escludere utilizzando l'opzione -P e la directory radice con l'opzione -R.

Eseguire backup differenziali con DAR

Una volta che avete creato un backup base, potete creare quelli differenziali. Il primo backup differenziale deve essere creato usando quello di base come riferimento. I backup differenziali successivi useranno come riferimento l'ultimo backup differenziale disponibile. Ecco quì il comando per un backup differenziale del sistema:

dar -m 256 -y -s 600M -D -R / -c `date -I`_diff -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -A previous_backup
  • -c `date -I`_diff

    Ho solo cambiato il nome del file, per un motivo... "pratico".

  • -A previous_backup

    Questa nuova opzione viene usata per dire a DAR dove trova il file di backup precedente in modo da creare un backup differenziale invece di uno base. L'unica cosa alla quale fare attenzione è che voi non dovete specificare nè il numero progressivo nè l'estensione, diversamente DAR porrebbe una richiesta alla linea di comando.

La linea di comando dell'utente è esattamente la stessa. Ecco quà per completezza:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_diff \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer -A previous_backup

DAR ha un'altra interessante caratteristica che quì non usiamo: i cataloghi. Quando create un backup con DAR questo contiene i dati e un catalogo. Questo catalogo contiene informazioni inerenti i file che sono stati salvati: la loro data, la loro dimensione dopo la compressione, etc. Potete estrarre il catalogo e memorizzarlo separatamente. Perchè dovreste farlo? Per implementare backup differenziali in rete, ad esempio.

Al fine di creare un backup differenziale dovete procurare a DAR il backup precedente in modo che il programma possa decidere quali file sono stati modificati e quali no. Facendo questo lavoro su di una rete ciò può occupare molta banda. Invece, dopo aver creato il backup, potete estrarre il catalogo e inviarlo alla macchina designata alla creazione dei backup. Successivamente potete usare questo file con l'opzione -A, in questo modo DAR lavorerà come se il file del backup base fosse quello.

Questo può essere anche utile se usate le slices perchè il catalogo è creato per la prima e l'ultima slice. E' più semplice passare al comando un singolo file piuttosto che dover utilizzare tutti i dischi del vostro precedente backup.

Qualche script per automatizzare i processi

Come ho detto prima è venuto il momento di mettere la nostra procedura di backup sotto cron. Mettendo il seguente script eseguibile per il backup del sistema sotto /root/dar_backup.sh:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner \
   -P cdrom -P var/backups > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Alcune cose da notare:

  • DIR è la variabile che rappresenta la directory di destinazione.
  • FILE rappresenta il percorso del file di backup di oggi.
  • Uso percorsi assoluti nei comandi perchè il mio account di root non li ha tutti inclusi nell'ambiente di default. Questo è potenzialmente un rischio in ambito di sicurezza. Idealmente dovreste compilare DAR come root e mantenere i binari dove li avete creati, così nessuno potrà toccarli o eseguirvi Tripwire.
  • DAR genera statistiche dopo ogni esecuzione. A noi non servono se eseguite in cron perchè produrrebbero solo mail inutili. Lo stdout è rediretto a /dev/null. Gli errori saranno invece riportati in una mail nel caso qualcosa andasse storto.
  • Gli ultimi due comandi find sono opzionali. Li uso per cambiare i permessi dei file per un normale utente che creerà successivamente i backup. Un ulteriore rischio in fatto di sicurezza. Root dovrebbe eseguire il backup dei file da root e gli utenti i loro. Ma con un sistema mono-user questo non è importante. Se un ipotetico intruso è capace di passare attraverso il mio firewall, inserire la mia password e quindi guardare tutti i miei backup: sono fregato.

Ora ponete il seguente script per i backup differenziali, quasi identico al precedente, sotto /root/dar_diff.sh:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp -P mnt \
   -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -P var/backups -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Gli unici due cambiamenti sono le aggiunte dell'opzione -A e la generazione della variabile PREV con una linea di comando un po' complicata. Vediamo cosa fa questa linea di comando:

  • Prima di tutto, il comando ls crea una lista dei file con estensione .dar presenti nella directory di backup; euesto output è rediretto al comando successivo.
  • Di default ls elenca i file in ordine alfabetico. tail è usato per ottenere l'ultimo file con l'opzione -n 1 che ordina di mostrare solo l'ultima riga.
  • DAR necessita di lavorare con filenames senza il numero di slice e senza estensione. Se non correggiamo noi il nome del file, DAR fermerà il processo e chiederà all'utente se effettuare l'operazione in modo automatico o meno. Separiamo quindi il nome del file con una feature Bash, chiamata parametro d'espansione. Ci sono diverse possibili espansioni, potete digitare man bash per vederle tutte. Usando %% rimuoviamo la più lunga "coda" di caratteri che si trova dopo il %%. Il risultato è il nome base che vogliamo passare a DAR.

Ora dobbiamo solo mettere questi due script sotto il controllo di cron. Questo è ciò che dobbiamo scrivere dopo il comando crontab -e:

15 0 2-31 * * ./dar_diff.sh
15 0 1    * * ./dar_backup.sh

Controllate in man -S 5 crontab la sintassi del comando. In breve queste due linee dicono a cron di far partire i processi 15 minuti dopo la mezzanotte. dar_backup.sh verrà eseguito solo il primo giorno del mese. L'altro script verrà eseguito tutti gli altri giorni.

Ecco quì gli scripts di backup per i vostri utenti. Essi sono identici, cambiano solo alcune opzioni di DAR e i percorsi:

#!/bin/bash
# dar_backup.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

#!/bin/bash
# dar_diff.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.zip" \
   -P instalacion_manual -P Mail/mail_pa_leer \
   -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

Non dimenticate di aggiungere a crontab le stringhe richieste per i votri utenti.

Estrarre i backup su macchine vuote

Venuto il momento di recuperare i vostri backup, in base a quello che avete salvato, avrete il backup completo del mese e tanti backup differenziali quanti quelli che avete fatto. Il processo di recupero dei dati è molto semplice: è uguale a quello descritto nel primo paragrafo (Utilizzo essenziale di DAR), l'importante è che prima recuperiate il backup base e solo successivamente quelli differenziali. Questo può essere noioso, così ecco quà un'altro script che potete salvare fra i vostri file di backup:

#!/bin/bash

if [ -n "$3" ]; then
   CMD="$1"
   INPUT="$2_data"
   FS_ROOT="$3"
   $CMD -x "$INPUT" -w -R "$FS_ROOT"
   for file in ${INPUT:0:8}*_diff*; do
      $CMD -x "${file:0:15}" -w -R "$FS_ROOT"
   done
   echo "All done."
else
   echo "Not enough parameters.

Usa: script dar_location base_full_backup directory

Dove dar_location è un percorso alla directory con i binari di dar,
base_full_backup è una data in formato 'YYYY-MM-DD' e directory è
il posto dove volete mettere i file recuperati, solitamente '/'
quando eseguito come root."
fi

Lo script si spiega da solo. L'unica cosa alla quale dovete fare attenzione è l'opzione -w che dice a DAR di sovrascrivere i file trovati. Questo è obbligatorio per i backup differenziali. Ricordate di mettere lo script nella stessa directory dove mettete i file di backup. Ecco un'utilizzo di esempio:

./recover.sh /usr/local/bin/dar 2003-10-01 /tmp/temp_path/

Provate ad utilizzare questo come utente normale con pochi file di backup. Potete mettere i file recuperati in una directory temporanea, così non dovete svuotare il vostro hard disk per provarlo.

Aggiungere dei controlli allo script di backup

Denis Corbin suggerisce che lo script di creazione dei backup verifichi anche l'exit status dei comandi di DAR. Per quanto riguarda questo script così semplice, ciò non è di importanza critica perchè DAR stesso stamperebbe a schermo un messaggio d'errore e cron lo riporterebbe via mail (cosa che normalmente non succede se tutto va per il verso giusto)

Comunque testare l'exit status può essere utile se state verificando il funzionamento dello script e volete sapere quali comandi sono eseguiti:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
if /usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
      -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
      -P mnt -P dev/pts -P proc -P floppy -P burner \
      -P cdrom -P var/backups > /dev/null ; then
   if /usr/local/bin/dar -t $FILE > /dev/null ; then
      echo "Archive created and successfully tested."
   else
      echo "Archive created but test FAILED."
   fi
else
   echo "Archive creating FAILED."
fi
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Potete testare facilmente questa versione facendo partire lo script e killando i processi di DAR manualmente da un'altro terminale o un'altra console con killall dar, che forzerà la fine dei processi DAR e vedrete che uno dei rami di fallimento sarà raggiunto nello script di backup.

Un'ulteriore possibile utilizzo per testare il codice può essere la rimozione di archivi incompleti dall'hard disk se qualcosa andasse male o evitare di testare l'archivio creato quando sapete che il primo comando è già fallito. Successivamente si possono facilmente concatenare i comandi di creazione e di test con && in una singola linea di testo. Ciò indica alla shell di eseguire entrambi i comandi in sequenza e impedisce l'esecuzione del secondo se il primo è fallito.

una procedura di backup, questa versione dello script lascierà archivi errati vaganti. Per prevenire ciò potete fare in modo che lo script esegua una positive verification. Ciò creerà il backup in una directory temporanea insieme con un file *.valid.

Così un'altro script monitora la directory dove i file temporanei sono messi e sposta in una directory definitiva i file con *.valid eliminando quelli la cui ultima modifica è precedente a un'ora.

Idee per il futuro

Non ho programmato di aggiornare questo testo presto perchè sono molto pigro, ma se voi siete fra quegli hackers imperattivi, ecco quà qualcosa che mi piacerebbe inserire:

  • Unificare gli script dei backup di base e differenziali in uno unico, cosicchè se all'esecuzione dello script non esistono backup base per il mese corrente questo venga reato. Utile per macchine che rimangono spente molto tempo dopo che il backup mensile è stato fatto.

  • Aggiornare lo script in modo che crei giornalmente un immagine per CD ROM con cdrecord e la masterizzi automaticamente su un cd riscrivibile presente nel drive. Così nel caso l'intero hard disk si guasti sarebbe disponibile l'ultimo backup su un media rimovibile. Certo la cosa è limitata e non può essere automatica nel caso i backup occupino più spazio di un CDROM. La stessa cosa vale per ZIP/JAZZ/qualsiasi cosa vogliate.

  • Integrazione dei backup generati con una mini Knoppix bootable o qualsiasi altra ditribuzione che possa essere avviata da CDROM. Così avreste un CDROM per recuperare i dati che può partire automaticamente e formattare il vostro hard disk.

  • Sincronizzazione delle directory di backup attraverso internet con hosts remoti. In questo modo se l'intera macchina è bruciata fisicamente, ad esempio con la vostra casa, voi avete i vostri backup in qualche altro posto. Potrebbe essere fatto facilmente con programmi come rsync attraverso ssh eseguiti tramite cron.

  • Inserimento dei parametri comuni in un file separato da inculdere dallo script utilizzando l'opzione di DAR, -B. Per esempio:

    $ cat > /var/backups/system/common.dcf
    -m 256 -y -s 600M -D -R / -Z "*.gz" -Z "*.bz2" -Z "*.zip" \
    -Z "*.png" -P home/gradha -P tmp -P mnt -P dev/pts \
    -P proc -P floppy -P burner -P cdrom -P var/backups
    

    Successivamente si può utilizzare questo nello script:

    DIR=/var/backups/system
    FILE=${DIR}/`/bin/date -I`_data
    # Commands
    /usr/local/bin/dar -B ${DIR}/common.dcf -c $FILE > /dev/null
    /usr/local/bin/dar -t $FILE > /dev/null
    /usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
    

    Che può essere riutilizzato anche nella versione differenziale!

In effetti, qualcuno ha già iniziato a creare qualche script a proprio uso e consumo e non ha problemi a condividerli. Per evitare di "disordinare" questo mini-howto ho intenzione di archiviarli come sono nel mio spazio web: https://github.com/gradha/dar-differential-backup-mini-howto/tree/master/contrib.

Sentitevi liberi di inviare i vostri lavori e i vostri aggiornamenti e li aggiungerò alla directory. Se avete intenzione di inviare un singolo file di script o un .tar.gz con una intera suite di backup, inserite un semplice file .txt descrittivo che metterò assieme agli altri files, così la gente potrà leggere cosa sono e cosa fanno i files prima di scaricarli. Usate l'inglese nella vostra descrizione e non dimenticate di mettere nome e e-mail così la gente potrà inviarvi bugfixes o miglioramenti.

The end

And that's the whole magic. Se avete qualche problema, qualcosa non è chiaro o sbagliato (il che è peggio) inviatemi un'e-mail. Se trovi questo documento utile e lo vuoi tradurre inviami una traduzione del file source.en.txt così posso distribuirla assieme a questa versione e gli utenti troveranno più semplice la versione nella loro lingua. Dovreste raggiungere facilmente il codice di questo dodumento alla mia home page (link at the beginning of the document).
Enjoy!

Per finire

Versione un po' corretta, con un italiano un po' più scorrevole. Ecco lo scopo che mi ero prefissato per questa revisione. Non so se l'obbiettivo è stato raggiunto, ma non immaginavo che tradurre dall'inglese fosse così difficile. Termini che initaliano non sai come rendere, plurali di nomi inglesi che in italiano rimangono singolari, modi di dire che, una volta tradotti letteralmente, non ti escono più dalla testa. Spero, comunque, che riusciate a capire in modo più agevole questa correzione rimanendo, come sempre, a disposizione. David (link at the beginning of the document)
dar-2.6.8/doc/mini-howto/dar-differential-backup-mini-howto.es.html0000644000175000017520000013426413476543220022126 00000000000000 DAR differential backup mini-howto -ES-

DAR differential backup mini-howto -ES-

Author: Grzegorz Adam Hankiewicz
Contact: dar@gradha.imap.cc
Translator:Grzegorz Adam Hankiewicz
Date: 2012-12-19
Web site:http://gradha.github.com/dar-differential-backup-mini-howto/
Copyright: Este documento está bajo dominio público.
Translations:De la página web puede obtener este documento en inglés, italiano y español.

Introducción

Todos deberíamos hacer copias de seguridad de nuestros datos importantes. Este consejo omnipresente es habitualmente ignorado por la mayoría de las personas. Yo lo ignoré también, hasta que perdí una buena cantidad de datos importantes. Insatisfecho, continué perdiendo datos en algunos incidentes posteriores, hasta que decidí que era bastante. Entonces busqué programas de copias de seguridad en Freshmeat que permitiesen hacer copias de seguridad diferenciales y encontré DAR.

Una copia de seguridad completa significa que todos los ficheros bajo su política de seguridad serán guardados. Una copia de seguridad diferencial o incremental, sólo contendrá aquellos ficheros cuyos contenidos han cambiado desde la copia de seguridad anterior, ya sea esta completa o diferencial.

DAR le permite crear de forma sencilla un conjunto de copias de seguridad diferenciales. El método que he desarrollado me ayuda a tener copias de seguridad automáticas que se ejecutan cada noche. El primer día del mes, se realiza una copia de seguridad completa. El resto del mes, sólo se realizan copias de seguridad diferenciales. En mi situación, muy pocos ficheros cambian de un día a otro, algunas veces el código fuente del proyecto en el que estoy trabajando, y siempre mis buzones de correo.

El resultado es que puedo recuperar el contenido de mi ordenador a un día específico con facilidad, en caso de necesitarlo. DAR es un programa de línea de comando, y puede hacerse ligeramente complejo con algunas opciones. Este pequeño mini-howto le explicará mi solución personal, que es muy cruda, pero me da buenos resultados. Si, he verificado que puedo recuperar datos de las copias de seguridad. De hecho, a finales del año 2003 me trasladé a otro país y solamente llevé conmigo un CD ROM con una Knoppix autoarrancable, y recuperé el estado exacto de mi instalación Debian en cuestión de horas. Sin personalizaciones, sin largas instalaciones, sin ficheros perdidos.

Este documento fue escrito usando la versión 1.3.0 de DAR. Cuando me actualicé a DAR 2.0.3, todo seguía funcionando, ni si quiera tuve que actualizar mis archivos de copias de seguridad. Así que parece que la interfaz y el formato de copias de seguridad son bastante estables, o al menos compatibles hacia atrás. No obstante, no confíe a ciegas en este documento. Verifique que la versión de DAR que tiene instalada funciona como espera y que puede recuperar una copia de seguridad generada antes de tener que depender de ella.

Esta versión del texto usa reStructuredText (para eso son las marcas extrañas en la versión en modo texto). Lea más sobre esto en http://docutils.sourceforge.net/.

Uso simple de DAR

DAR es muy similar a tar en el número de opciones que tiene: hay suficiente para cada necesidad, pero demasiadas para un novato. Como es habitual, siempre puede obtener ayuda del programa tecleando dar -h o man dar tras su instalación. Al igual que tar, hay un conjunto de parámetros obligatorios que definen el tipo de operación que va a realizar (crear, extraer, listar, etc), y un conjunto de parámetros que afectan la opción seleccionada. Simplemente por probar, imagínese que quiere realizar una copia de seguridad de su directorio home. Escribiría algo así:

dar -c fichero_sin_extension -g file1 -g file2 ... -g fileN

La salida debería ser similar a esto:

$ dar -c mi_copia -g safecopy.py/ -g translate_chars.py/


 --------------------------------------------
 15 inode(s) saved
 with 0 hard link(s) recorded
 0 inode(s) not saved (no file change)
 0 inode(s) failed to save (filesystem error)
 4 files(s) ignored (excluded by filters)
 0 files(s) recorded as deleted from reference backup
 --------------------------------------------
 Total number of file considered: 19
$ ls
mailbox_date_trimmer/  mi_copia.1.dar        sdb.py/
mailbox_reader/        safecopy.py/          translate_chars.py/

Tal y como se habrá dado cuenta, DAR añade un número y extensión a su nombre. El propósito de la extensión es claro, ayuda a saber visualmente que el fichero es una copia de seguridad de DAR. El número es un trozo, y está relacionada con la característica de DAR de repartir la copia de seguridad en varios dispositivos de almacenamiento. Si por ejemplo quisiese hacer una copia de seguridad en CD ROM, pero sus directorios son mayores que la capacidad de uno, puede decirle a DAR que reparta el archivo en tantos ficheros como sea necesario, que luego puede grabar en varios CD ROMs.

¿Quiere recuperar su copia de seguridad? Muy sencillo, teclee lo siguiente:

$ mkdir temp
$ cd temp
$ dar -x ../mi_copia
file ownership will not be restored as dar is not run as root.
to avoid this message use -O option [return = OK | esc = cancel]
Continuing...


 --------------------------------------------
 15 file(s) restored
 0 file(s) not restored (not saved in archive)
 0 file(s) ignored (excluded by filters)
 0 file(s) less recent than the one on filesystem
 0 file(s) failed to restore (filesystem error)
 0 file(s) deleted
 --------------------------------------------
 Total number of file considered: 15
$ ls
safecopy.py/  translate_chars.py/

La estrategia de copias de seguridad

El primer paso para crear una buena copia de seguridad es determinar qué partes de su sistema necesitan una. Esto no significa necesariamente que no puede crear una copia de seguridad completa, sólo que repartir la copia en al menos dos partes puede ayudar mucho a DAR (y cualquier otra herramienta de copias de seguridad).

Mi sistema en casa se compone de dos discos duros. El primero está partido en una partición de 3.8 GB donde vive mi sistema completo, y otra partición de 11 GB donde almaceno mi música y otros ficheros temporales, como un repositorio local de paquetes Debian que hago para mí mismo. El segundo disco duro tiene una partición de 9.4 GB cuyo único propósito es servir de copia de seguridad del disco primario. No tengo interés en realizar copias de seguridad de mi música, porque tengo todos los CDs originales y scripts para recomprimirlos en formato ogg.

De las 3.8 GB que quiero hacer copia de seguridad, normalmente entre 1.3 y 1.5 GB están vacías. Repartiré las 2.3 GB usadas a nivel lógico entre directorios de sistema y home (en el momento de escribir esto, mi home ocupa 588 MB). La razón de esta separación es que como usuario normal sólo puedo cambiar cosas en mi directorio home y otros ficheros de las particiones que no hago copias de seguridad. Mientras, la parte sistema de la partición es bastante estable y no se modifica porque (des)instalo software muy de vez en cuando. De hecho, de mi directorio home las únicas cosas que cambian normalmente son mis directorios Mail y projects, donde pongo este documento y otro software que escribo/hackeo.

La diferenciación básica entre directorios home y de sistema también puede ser útil en organizaciones. Si trabaja para una universidad, normalmente todas las máquinas tendrán la misma configuración de sistema, pero dependiendo de la máquina sus directorios home contendrán datos diferentes. Puede hacer un a copia de seguridad de sistema de una sola máquina, y copias de seguridad del home de cada máquina. Otra configuración común es tener un servidor central que exporta los directorios home por NFS. Aquí sólo tiene que hacer copia de seguridad del servidor. Si tiene usuarios con privilegios altos, déjeles la tarea de hacer una copia de seguridad de sistema de sus propias máquinas, el directorio home exportado es algo que pueden ignorar dado que será realizado en el servidor.

Una vez haya decidido qué quiere guardar en su copia de seguridad, debe decidir cómo configurar DAR. Puede usar parámetros o ficheros de configuración. Los parámetros están bien cuando no tiene muchas opciones. Los ficheros de configuración son mejores cuando quiere añadir complejas reglas de inclusión/exclusión de ficheros, y además, puede usar comentarios para documentar los parámetros, indicando por ejemplo la razón por la que incluye tal o cual directorio. Esto puede ser útil si vuelve dentro de unos meses y se pregunta qué hacen todas estas opciones.

Con mi configuración, ejecutaré comandos DAR desde scripts shell llamados periódicamente por cron (Configurando algunos scripts para automatizar el proceso), así que no me importa tener largas líneas de comando, y este mismo documento tiene doble propósito para documentar esos scripts. Si prefiere ficheros de configuración, lea la documentación de DAR para aprender su formato y cómo usarlos.

Copia de seguridad completa con DAR

Aquí está la línea de comando completa que usaré para mi copia de seguridad de sistema, ejecutada como root. No se preocupe por el gran número de parámetros, iré describiendo su propósito uno a uno:

dar -m 256 -y -s 600M -D -R / -c `date -I`_data -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom
  • -m 256

    DAR puede comprimir su copia de seguridad. La compresión se aplica a ficheros individuales, y puede ser perjudicial para pequeños ficheros. Por defecto los ficheros con 100 bytes o menos no serán comprimidos. Con el parámetro -m incremento este valor a 256, el cual parece funcionar mejor para esos pequeños ficheros de configuración que se almacenan en /etc/ y /home. Como puede ver, esta opción es completamente opcional, básicamente para fanáticos del ajuste como yo.

  • -y [nivel]

    Esta opción activa la compresión Bzip2 del archivo, que por defecto está desactivada. Incluso puede especificar un nivel numérico de compresión, que va de 0 (no compresión) hasta 9 (mejor compresión, procesado lento). Bzip2 por defecto usa 6, que es la mejor relación velocidad/compresión para la mayoría de los ficheros. Yo no uso nivel de compresión, el 6 me va bien.

  • -s 600M

    Aquí está la característica de DAR de trocear. El tamaño especificado de 600 Megabytes es el tamaño máximo de fichero que DAR creará. Si su copia de seguridad es mayor, obtendrá varios ficheros de copia de seguridad, cada uno con su número de trozo antes de la extensión del fichero, para que pueda salvar cada uno en una unidad diferente de almacenamiento (disquetes, zip, CDROM, etc). Mis copias de seguridad son mucho más pequeñas que este tamaño, y mantengo este parámetro sólo por si acaso se me ocurre crear un fichero grande en mi directorio home y olvido borrarlo. Si este parámetro le resulta útil, lea también en el manual de DAR sobre el parámetro -S.

  • -D

    Almacena directorios como directorios vacíos aquellos excluidos por la opción -P o aquellos ausentes en la línea de comando como parámetros. Esto es útil cuando recupera una copia de seguridad desde cero, para que no tenga que crear manualmente todos los directorios que fueron excluidos.

  • -R /

    Especifica el directorio raíz para salvar o recuperar ficheros. Por defecto esto apunta al directorio de trabajo actual. Estamos realizando una copia de seguridad de sistema, así que apuntará al directorio raíz.

  • -c `date -I`_data

    Este es uno de los parámetros obligatorios de los que hablé antes, y significa crear una copia de seguridad. Para aquellos que no entienden lo que sigue, `date -I` es la expansión de comillas de la shell de línea de comando. En pocas palabras, date -I proporcionará la fecha en formato AAAA-MM-DD. Con comillas y usado como parámetro, la salida del comando será usada como cadena del comando padre. De este modo puede crear copias de seguridad con la fecha de creación empotrada en el nombre. Si todavía no sabe de lo que hablo, intente ejecutar lo siguiente desde la línea de comando:

    echo "La fecha de hoy es `date -I`"
    
  • -Z patrón_fichero

    Usando las reglas normales de meta caracteres en ficheros puede especificar patrones de ficheros que quiere almacenar en la copia de seguridad sin compresión. Esto sólo tiene sentido si usa el parámetro -y. Comprimir ficheros comprimidos únicamente crea ficheros mayores y malgasta tiempo de la CPU.

  • -P ruta_relativa

    Con este parámetro le dice a DAR qué rutas no quiere almacenar en su copia de seguridad. Aquí posiblemente quiere poner el directorio home (soy el único usuario de la máquina, hay algunos más, pero con el propósito de pruebas/sistema), directorios de sistema que no son realmente ficheros físicos como proc, otras unidades que pueda tener montadas bajo mnt (destacando la unidad donde va a poner la copia de seguridad), etc, etc. Tenga en cuenta que las rutas que especifique aquí deben ser relativas a la ruta especificada por el parámetro -R.

Eso no fue tan difícil. En el manual de DAR puede leer sobre más parámetros que pueda querer usar. Y aquí está la linea de comando que ejecutaré como usuario dentro de mi directorio home:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_data \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer

Nada nuevo bajo el sol. Como puede ver, la mayoría de la línea de comando es idéntica a la anterior, únicamente cambio el nombre de los directorios que quiero excluir con -P y el directorio raíz con el parámetro -R.

Haciendo copias de seguridad diferenciales con DAR

Un vez tenga una copia de seguridad completa puede crear una copia de seguridad diferencial. La primera copia de seguridad diferencial debe ser realizada usando la copia de seguridad completa como referencia. Las siguientes copias de seguridad diferenciales usan la última copia de seguridad diferencial como referencia. Aquí está la línea de comando para una copia de seguridad diferencial de sistema:

dar -m 256 -y -s 600M -D -R / -c `date -I`_diff -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -A copia_previa
  • -c `date -I`_diff

    Sólo cambio el nombre del fichero, por razones cosméticas.

  • -A copia_previa

    Este nuevo parámetro se usa para decirle a DAR dónde puede encontrar la copia de seguridad anterior para que pueda crear una copia de seguridad diferencial en lugar de una completa. La única cosa con la que debe tener cuidado es no especificar ni trozo ni extensión en el nombre del fichero, de lo contrario DAR le realizará una pregunta interactiva en la línea de comando.

La línea de comando de usuario es exactamente igual. Aquí está:

dar -m 256 -y -s 600M -D -R /home/gradha -c `date -I`_diff \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer -A copia_previa

DAR tiene otra buena característica que no usamos: catálogos. Cuando crea una copia de seguridad con DAR, internamente contiene todos los datos más un catálogo. Este catálogo contiene información sobre qué ficheros fueron guardados, sus fechas, su tamaño comprimido, etc. Puede extraer un catálogo y almacenarlo por separado. ¿Para qué querría hacer esto? Para configurar copias de seguridad diferenciales por red.

Para poder crear una copia de seguridad diferencial, necesita proporcionar a DAR la copia de seguridad previa para que pueda decidir qué ficheros han cambiado. Realizar esto puede consumir mucho ancho de banda en una red. En su lugar, tras crear la copia de seguridad, puede extraer el catálogo y enviarlo a la máquina que realiza las copias de seguridad. La siguiente vez, puede usar este fichero con el parámetro -A, y funcionará como si el fichero completo estuviese ahí.

Esto también puede ser útil si usa trozos, porque el catálogo se crea a partir del primer y último trozo. Es mucho más cómodo usar un solo fichero con el comando de copia de seguridad en lugar de tener que llevar consigo los discos de la copia de seguridad anterior.

Configurando algunos scripts para automatizar el proceso

Tal y como se mencionó anteriormente, es hora de configurar las copias de seguridad bajo cron. Ponga el siguiente script ejecutable para copias de seguridad de sistema bajo /root/dar_backup.sh:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
   -P mnt -P dev/pts -P proc -P floppy -P burner \
   -P cdrom -P var/backups > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Algunas cosas a destacar:

  • DIR es la variable que contiene el directorio destino.
  • FILE contendrá la ruta a la copia de seguridad del día.
  • Uso rutas completas para los comandos porque mi cuenta root no las tiene incluidas en el entorno por defecto. Esto es un riesgo de seguridad potencial. Idealmente querría compilar DAR como root y guardar los binarios donde los cree para que nadie pueda tocarlos. Y también ejecutar Tripwire sobre ellos.
  • DAR genera estadísticas tras cada ejecución. No las queremos en nuestro cron porque generarían emails innecesarios. Sólo stdout (la salida estándar) es redireccionada a /dev/null. Los errores serán mostrados y un email enviado si algo va mal.
  • Los últimos dos comandos find son opcionales. Los uso para cambiar el propietario a un usuario normal, quien creará posteriormente las copias de seguridad. De nuevo, otro riesgo de seguridad. El usuario root debería hacer copias de seguridad como root, y los usuarios deberían realizar sus propias copias. Pero en un sistema monousuario me da igual. Si algún intruso es lo suficientemente bueno para atravesar el cortafuegos y las palabras claves de mis cuentas de usuarios para poder leer las copias de seguridad, ya la he fastidiado.

Ahora ponga el siguiente script casi idéntico para copias de seguridad diferenciales en /root/dar_diff.sh:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
   -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp -P mnt \
   -P dev/pts -P proc -P floppy -P burner -P cdrom \
   -P var/backups -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Los únicos dos cambios son la adición del parámetro -A y la generación de la variable PREV con una complicada línea de comando. Veamos qué es lo que hace esta línea de comando:

  • Primero el comando ls crea un listado de los ficheros con la extensión .dar en el directorio de copias de seguridad. La salida se pasa por una tubería al siguiente comando.
  • Por defecto ls muestra los ficheros en orden alfabético. Usamos tail para obtener el último fichero con el parámetro -n 1, el cual hace que sólo se muestre la última línea.
  • DAR quiere operar siempre con nombres de fichero sin número de trozo o extensión. Esto significa que si no nos deshacemos de éstas, DAR detendrá la operación para realizar una pregunta interactiva al usuario, fastidiando toda la automatización. Separamos el nombre completo del fichero con una característica de Bash llamada expansión de parámetros. Hay varios tipos de expansiones posibles, puede teclear man bash para verlas todas. Aquella que usa %% eliminará el patrón final más largo que coincida con lo que va tras %%. El resultado es el nombre base que queremos pasar a DAR.

Ahora sólo tenemos que poner estos dos scripts bajo cron. Esto es lo que tenemos que teclear tras crontab -e:

15 0 2-31 * * ./dar_diff.sh
15 0 1    * * ./dar_backup.sh

Puede informarse sobre la sintaxis con man -S 5 crontab. En pocas palabras, estas dos líneas le dicen a cron que ejecute los scripts 15 minutos tras medianoche. dar_backup.sh se ejecutará sólo el primer día del mes. El otro script se ejecutará el resto de los días.

Aquí están los scripts de copia de seguridad para sus usuarios. Son iguales, cambiando únicamente los parámetros del comando DAR y algunas rutas:

#!/bin/bash
# dar_backup.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_data
# Commands
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.png" \
   -P instalacion_manual -P Mail/mail_pa_leer > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

#!/bin/bash
# dar_diff.sh

DIR=/var/backups/gradha
FILE=${DIR}/`/bin/date -I`_diff
PREV=`/bin/ls $DIR/*.dar|/usr/bin/tail -n 1`
/usr/local/bin/dar -m 256 -y -s 600M -D -R /home/gradha -c $FILE \
   -Z "*.gz" -Z "*.bz2" -Z "*.zip" -Z "*.zip" \
   -P instalacion_manual -P Mail/mail_pa_leer \
   -A ${PREV%%.*} > /dev/null
/usr/local/bin/dar -t $FILE > /dev/null
/usr/bin/find $DIR -type f -exec chmod 400 \{\} \;

No olvide añadir las entradas crontab requeridas por su usuario apuntando a la ruta adecuada.

Recuperando su copia de seguridad desde cero

Cuando llegue el momento de recuperar su copia de seguridad, dependiendo de lo que haya guardado tendrá una copia de seguridad completa del mes más copias de seguridad diferenciales hasta la última vez que las pudo realizar. El proceso de recuperación es muy simple, es el mismo descrito en el primer capítulo (Uso simple de DAR), sólo que debe hacerlo primero con la copia de seguridad completa, y entonces con las copias de seguridad diferenciales. Esto puede ser muy aburrido, así que aquí tiene otro script que puede guardar junto con sus ficheros de copia de seguridad:

#!/bin/bash

if [ -n "$3" ]; then
   CMD="$1"
   INPUT="$2_data"
   FS_ROOT="$3"
   $CMD -x "$INPUT" -w -R "$FS_ROOT"
   for file in ${INPUT:0:8}*_diff*; do
      $CMD -x "${file:0:15}" -w -R "$FS_ROOT"
   done
   echo "All done."
else
   echo "Not enough parameters.

Usage: script dar_location base_full_backup directory

Where dar_location is a path to a working dar binary, base_full_backup
is a date in the format 'YYYY-MM-DD', and directory is the place where
you want to put the restored data, usually '/' when run as root."
fi

Este script es auto explicativo. La única cosa por la que debe preocuparse es el parámetro -w, que le dice a DAR que sobreescriba los ficheros que encuentre. Esto es necesario para copias de seguridad diferenciales. Oh, y ponga el script en el mismo directorio que sus ficheros de copia de seguridad. Aquí tiene un ejemplo de uso:

./recover.sh /usr/local/bin/dar 2003-10-01 /tmp/temp_path/

Pruebe ejecutar eso como un usuario normal con algunos ficheros de copias de seguridad. Puede poner el resultado en un directorio temporal, así que lo bueno es que no necesita borrar su disco duro para probarlo.

Añadiendo verificaciones a los scripts

Denis Corbin sugiere que los scripts que crean las copias de seguridad podrían verificar el código de salida del comando DAR. Para el propósito de estos scripts tan simples esto no es crítico porque el propio DAR abortará la operación con un mensaje de error, y cron informará de cualquier salida de error por email (algo que no ocurre si todo va bien).

No obstante, verificar el código de salida puede ser útil si está probando los scripts de forma interactiva y quiere saber qué comandos están siendo ejecutados:

#!/bin/bash

DIR=/var/backups/system
FILE=${DIR}/`/bin/date -I`_data
# Commands
if /usr/local/bin/dar -m 256 -y -s 600M -D -R / -c $FILE -Z "*.gz" \
      -Z "*.bz2" -Z "*.zip" -Z "*.png" -P home/gradha -P tmp \
      -P mnt -P dev/pts -P proc -P floppy -P burner \
      -P cdrom -P var/backups > /dev/null ; then
   if /usr/local/bin/dar -t $FILE > /dev/null ; then
      echo "Archive created and successfully tested."
   else
      echo "Archive created but test FAILED."
   fi
else
   echo "Archive creating FAILED."
fi
/usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
/usr/bin/find $DIR -type f -exec chmod 440 \{\} \;

Puede probar esta versión fácilmente ejecutando el script y matando el proceso DAR desde otra terminal o consola con killall dar. Esto forzará la terminación del proceso DAR y verá que una de las ramas de error es alcanzada en el script.

Otro posible uso de la verificación del código de retorno del comando sería borrar archivos incompletos del disco duro si algo falla, ejecutar comandos externos adicionales si algo falla, o evitar verificar el archivo creado cuando sabe que el primer comando falló. Esto último se puede hacer fácilmente concatenando los comandos de creación y verificación con && en una sola línea. Esto le dice a la shell que ejecute ambos comandos como una secuencia para evitar ejecutar el segundo si el primero falla.

No obstante, si falla la corriente eléctrica durante una copia de seguridad, esta versión del script todavía dejaría a medio escribir archivos inválidos. Para prevenir esto podría mejorar el script para realizar una verificación positiva. Esto significa crear el fichero de copia de seguridad en un directorio temporal junto con un fichero *.valid si se alcanza la rama adecuada del script con éxito.

Continuando esta estrategia, otro script cron monitorizando el directorio donde se crean los ficheros temporales de copias de seguridad movería al directorio final aquellos archivos con un fichero *.valid correspondiente, borrando todos los demás cuya última fecha de modificación fuese mayor que una hora.

Ideas para el futuro

No voy a implementar estas pronto, porque soy muy vago, pero si usted es uno de esos hackers hiper activos, aquí tiene algunas cosas que estaría bien tener:

  • Unificar tanto el script principal como el diferencial en uno, por lo que si el script se ejecuta y no hay fichero de copia de seguridad principal para el mes actual, será creado, y de lo contrario se creará uno diferencia. Útil si su máquina está apagada por alguna razón durante el día del mes que realiza la copia de seguridad no diferencial.

  • Mejorar los scripts para generar una imagen CDROM diaria con cdrecord y grabarla automáticamente en un disco regrabable colocado en su máquina. Por lo que si su disco duro entero resulta dañado, todavía tiene la última copia de seguridad en un otro medio de almacenamiento. Por supuesto, esto es limitado y no puede ser automático si su copia de seguridad necesita más de un CDROM. Haga lo mismo para ZIP/JAZZ/loquesea.

  • Integrar las copias de seguridad generadas con una mini distribución Knoppix autoarrancable. O cualquier otra distribución basada en disquetes que puede arrancar desde CDROM. Así tendría un CDROM de rescate con las herramientas para formatear su disco duro, y justo al lado una copia de seguridad fresca con la cual restablecer su máquina a un estado funcional.

  • Sincronización de los directorios con copias de seguridad a través de Internet con máquinas remotas. Así, si su máquina acaba quemándose físicamente junto con su casa, todavía tiene copias de seguridad seguras en alguna otra parte. Podría hacerse de forma sencilla con programas como rsync funcionando por ssh como tarea del cron.

  • Extraer parámetros comunes en un fichero separado e incluirlo en sus scripts usando el parámetro -B de DAR. Por ejemplo:

    $ cat > /var/backups/system/common.dcf
    -m 256 -y -s 600M -D -R / -Z "*.gz" -Z "*.bz2" -Z "*.zip" \
    -Z "*.png" -P home/gradha -P tmp -P mnt -P dev/pts \
    -P proc -P floppy -P burner -P cdrom -P var/backups
    

    Más tarde puede usar esto en el script:

    DIR=/var/backups/system
    FILE=${DIR}/`/bin/date -I`_data
    # Commands
    /usr/local/bin/dar -B ${DIR}/common.dcf -c $FILE > /dev/null
    /usr/local/bin/dar -t $FILE > /dev/null
    /usr/bin/find $DIR -type f -exec chown .gradha \{\} \;
    

    ¡Que también puede reusar en la versión diferencial!

De hecho, hay personas listas que han comenzado a hacer scripts de este estilo para sí mismas y no les asusta compartirlos. Para evitar engordar este mini-howto, voy a guardarlos tal y como son en mi página web: https://github.com/gradha/dar-differential-backup-mini-howto/tree/master/contrib.

Sientase libre de enviarme sus propias mejoras y las añadiré al directorio. Ya sea un fichero único o un .tar.gz con una suite de copias de seguridad completa, por favor añada un fichero simple .txt que pondré al lado del fichero. Por favor use inglés en su descripción, ¡y no olvide poner su nombre y dirección de correo para que la gente pueda enviarle correcciones o mejoras!

El fin

Y esa es toda la magia. Si tiene problemas, algo no está claro o es incorrecto (lo cual es peor), mándeme un email. Si encuentra este documento útil y quiere traducirlo, mándeme una traducción del fichero source.en.txt para que pueda distribuirla junto con esta versión y otros usuarios puedan encontrar fácilmente su versión traducida. Hablando de localizar, debería ser capaz de obtener el código fuente de este documento de mi página personal (enlace al comienzo del documento).
¡Disfrute!
dar-2.6.8/doc/mini-howto/Makefile.in0000644000175000017520000003720613617552304014142 00000000000000# Makefile.in generated by automake 1.15 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2014 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = doc/mini-howto ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(dist_pkgdata_DATA) \ $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac am__vpath_adj_setup = srcdirstrip=`echo "$(srcdir)" | sed 's|.|.|g'`; am__vpath_adj = case $$p in \ $(srcdir)/*) f=`echo "$$p" | sed "s|^$$srcdirstrip/||"`;; \ *) f=$$p;; \ esac; am__strip_dir = f=`echo $$p | sed -e 's|^.*/||'`; am__install_max = 40 am__nobase_strip_setup = \ srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*|]/\\\\&/g'` am__nobase_strip = \ for p in $$list; do echo "$$p"; done | sed -e "s|$$srcdirstrip/||" am__nobase_list = $(am__nobase_strip_setup); \ for p in $$list; do echo "$$p $$p"; done | \ sed "s| $$srcdirstrip/| |;"' / .*\//!s/ .*/ ./; s,\( .*\)/[^/]*$$,\1,' | \ $(AWK) 'BEGIN { files["."] = "" } { files[$$2] = files[$$2] " " $$1; \ if (++n[$$2] == $(am__install_max)) \ { print $$2, files[$$2]; n[$$2] = 0; files[$$2] = "" } } \ END { for (dir in files) print dir, files[dir] }' am__base_list = \ sed '$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;$$!N;s/\n/ /g' | \ sed '$$!N;$$!N;$$!N;$$!N;s/\n/ /g' am__uninstall_files_from_dir = { \ test -z "$$files" \ || { test ! -d "$$dir" && test ! -f "$$dir" && test ! -r "$$dir"; } \ || { echo " ( cd '$$dir' && rm -f" $$files ")"; \ $(am__cd) "$$dir" && rm -f $$files; }; \ } am__installdirs = "$(DESTDIR)$(pkgdatadir)" DATA = $(dist_pkgdata_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) am__DIST_COMMON = $(srcdir)/Makefile.in README DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CXXSTDFLAGS = @CXXSTDFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPGME_CFLAGS = @GPGME_CFLAGS@ GPGME_CONFIG = @GPGME_CONFIG@ GPGME_LIBS = @GPGME_LIBS@ GREP = @GREP@ HAS_DOT = @HAS_DOT@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBCURL_CFLAGS = @LIBCURL_CFLAGS@ LIBCURL_LIBS = @LIBCURL_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTHREADAR_CFLAGS = @LIBTHREADAR_CFLAGS@ LIBTHREADAR_LIBS = @LIBTHREADAR_LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYEXT = @PYEXT@ PYFLAGS = @PYFLAGS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dot = @dot@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ dist_pkgdata_DATA = dar-differential-backup-mini-howto.en.html dar-differential-backup-mini-howto.it.html dar-differential-backup-mini-howto.es.html index.html all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/mini-howto/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu doc/mini-howto/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs install-dist_pkgdataDATA: $(dist_pkgdata_DATA) @$(NORMAL_INSTALL) @list='$(dist_pkgdata_DATA)'; test -n "$(pkgdatadir)" || list=; \ if test -n "$$list"; then \ echo " $(MKDIR_P) '$(DESTDIR)$(pkgdatadir)'"; \ $(MKDIR_P) "$(DESTDIR)$(pkgdatadir)" || exit 1; \ fi; \ for p in $$list; do \ if test -f "$$p"; then d=; else d="$(srcdir)/"; fi; \ echo "$$d$$p"; \ done | $(am__base_list) | \ while read files; do \ echo " $(INSTALL_DATA) $$files '$(DESTDIR)$(pkgdatadir)'"; \ $(INSTALL_DATA) $$files "$(DESTDIR)$(pkgdatadir)" || exit $$?; \ done uninstall-dist_pkgdataDATA: @$(NORMAL_UNINSTALL) @list='$(dist_pkgdata_DATA)'; test -n "$(pkgdatadir)" || list=; \ files=`for p in $$list; do echo $$p; done | sed -e 's|^.*/||'`; \ dir='$(DESTDIR)$(pkgdatadir)'; $(am__uninstall_files_from_dir) tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: for dir in "$(DESTDIR)$(pkgdatadir)"; do \ test -z "$$dir" || $(MKDIR_P) "$$dir"; \ done install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dist_pkgdataDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-dist_pkgdataDATA @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) uninstall-hook .MAKE: install-am install-data-am install-strip uninstall-am .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am \ install-data-hook install-dist_pkgdataDATA install-dvi \ install-dvi-am install-exec install-exec-am install-html \ install-html-am install-info install-info-am install-man \ install-pdf install-pdf-am install-ps install-ps-am \ install-strip installcheck installcheck-am installdirs \ maintainer-clean maintainer-clean-generic mostlyclean \ mostlyclean-generic mostlyclean-libtool pdf pdf-am ps ps-am \ tags-am uninstall uninstall-am uninstall-dist_pkgdataDATA \ uninstall-hook .PRECIOUS: Makefile install-data-hook: $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/mini-howto $(INSTALL) -m 0644 $(dist_pkgdata_DATA) $(DESTDIR)$(pkgdatadir)/mini-howto uninstall-hook: rm -rf $(DESTDIR)$(pkgdatadir)/mini-howto # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.6.8/doc/mini-howto/index.html0000644000175000017520000000353013476543220014063 00000000000000 Dar - Mini-Howto
Dar Documentation


Mini Howto



This Mini-Howto has been written by Grzegorz Adam Hankiewicz. For convenience you can find bellow a local copy of his work but can also fetch updated version from the official site.

This Mini-Howto has been translated in several languages:


dar-2.6.8/doc/old_dar_key.txt0000644000175000017520000000270113476543220013010 00000000000000-----BEGIN PGP PUBLIC KEY BLOCK----- Version: GnuPG v1.0.6 (GNU/Linux) Comment: For info see http://www.gnupg.org mQGiBDyl5ZwRBACkz8Pj10JsE5sLlyB0wlhsWa1TKVtpcj1d7R0S+OifT4jw4E9c 36c+zGIH4bIXhk/kwADhWH6caFOcAN5YWcauUCUMcdKHf1s3Pr0V4XNwuWa3jaIJ IAvTNxPYgMvTYvK2MrE1DnYuIZkjrpR/XOgCSSQpIM8qENUhhHIgcTkrswCgzAhM oq+QQk29gQ6K+YuZp7Qz8bMEAJJHkCt3nGHfU+vtFiNQstJ6usu62qv2AJD5Cmkk lYW5HsiE26w7w2JDrCAxs24YYIGAZKkV/05dJxWsiF/c87/XtZNsdQASSFAsKvr+ yNSxRPYvpfbLP5IX3KmKGdgX/3H1e/tZb6iLwpeKGnuMo3xOI900Vzloi8cXMz0x iy6eBACP6bjjgYjQ1E5olJKl4C4dkDVVArH7Gpa0N+bH1idESA+VkqNVz2Ydfarp k35SQ0UZJ9j432QeUrPsU/2JHqII/WyLTANFqmiUWkYFzmjS5bI/AlVyUo07mRCG qAkjBHNpd1clwifrrr15UykN3v9zgJHSEkEBD97jPmDmEGhnE7Q8RGVuaXMgQ29y YmluIChodHRwOi8vZGFyLmxpbnV4LmZyZWUuZnIvKSA8ZGFyLmxpbnV4QGZyZWUu ZnI+iFcEExECABcFAjyl5ZwFCwcKAwQDFQMCAxYCAQIXgAAKCRCkLkIjyBgaUsFT AJ4w/Lz7BSsiO5Bm+YpTnQhbTV8avwCeL4xD+0u0xhIgjNqW4FQNj9D2g8y5AUUE PKXlnhAE4Kgg3USjhKvjPL7BUbpw5/hI45vC8SWQKAfYCSjQ30i8DWa+GabVMiCy 8mwgpwTJm/9FPx0JRlS5h6fCMfhth6zSRdK0AFToThTFPXtAp+DliAy1k6e2LDwk bh6AH4rbaix+NoOxhvwVeJ0ivL4Evua9ZCJlqcb4kWTO+jwAI3nHMC5AVmB3AcpB nHT+2wcDmJpKzuOLnd2w8z2EJwADBQTfdNskxTuLvBXBqDgtHWTRnZZIRAfoLHwU zgbwVvbZpYpVXjMd+oyGKA1mz21bhZLZUDusN7TRMQFDE3KdG3hvsaVqOvuMlCEY EyHqF11DcYl4beZ1nj2tBlzT0Gc5GUx2d2lCa3xltDTULbnoxqxzxM9rLII3CBvz VRpnprCZ5x8lI9FsVXgUVQSNTJ3KFDzLXPOn7rHAo121tii/iEYEGBECAAYFAjyl 5Z4ACgkQpC5CI8gYGlIwWwCeLkW80F06uS4PljUD1ctugOHteygAn0UwSHxTeSFd Bo0ZNpPxfhXbIQKq =k+H0 -----END PGP PUBLIC KEY BLOCK----- dar-2.6.8/doc/dar_doc.jpg0000644000175000017520000002055013476543220012072 00000000000000ÿØÿàJFIFHHÿÛC  !"$"$ÿÛCÿÂð@"ÿÄÿÄÿÚ ý(ðõ•Œ}o¿­ÖãŽøcküOY<ó‹’A ÷›¿÷}t89ø¿µù£¾Ô´ÊoÚÉ—8ý óÑmäÀƒ½ŽUÖÊ·álïäþß3e_÷ßœïúT}ËÉ9Îó*üyëAÐÂ/,¢dÆwžÕ¤zšyxùÿ©‡ªÖlËV® µ_¨ts´¥\pqòšü©§›«Çúß~on6NƒÊuZA>/ Ú¯==>|UÎö~_èçùŸ¤¢Î»†¦mºþå^ÍêYãŸ—Óæ}¬ ÌÞ„W(Û«\”íÒñéP³f:ÿ#õ8{||šº”}_;rKõì’¯™þ³så¾§¨õ€žÂ*“a{-W˜¼õ|»[™ø½²aÌåÛŸK±D!üïc'e?Iö_övÙÖ˧R¥šÔú0Ù£jÿ+Î=¯Þræ )—6¤×Óbþ}Õ­™¶ƒ ´°ëñêüÆÎo¹óµº—rÎÖhgaßðz{Ed!I‘‡Uö,ŸºqINYbê(Nžo³Ã|»;WùçÍ“UlÞ’ìù4¾–…Wæ{rÏ.—C.Õm<÷7¤€ <ù¾ŸÌÁÔýKwF7Å*Ü´qÍŽaUEç;™-Éc¯Ž-Ô§W¶³4/Á^Õ+4ß^ì[æÕ̹›/WbÖn¥9yóÕ˜)Ë[#ž¹ ä¸v<{ÓµðìàW°zr>ºw•éê'VÛéC3ÍDè§4žÕ¢œñÍv:7iÙ³4±É™ ´<›6hçhÅ<Þó^Æ"¥è;æ›(fóþ˜#0qÙÏœ¥k´êÉÑ•Ö6É3ÿÄ*!1 "02P#@A`3ÿÚÿ¤ÏéIÀ¿T©ˆE×F°§#ôZëö-ö–h˜›ö·¿rþ…¼qGùrUl6g |2øýx×!kŸáE_ȵV[XSÃοÇôÆ´wL² 騏ütù)9¯ô:ÑßJ;FœAIm9Úú+w×ú`éÔãå*§3ˆé¾7e†^Â&Yv™´ÃÛû˜› šÚÝ.ÒÓüb¡•\MB‡M&ŸOÑ «‘2ÇŠ;rÏß¼N¤êMðû–j«•þ<­ü8cÓ™ªfZiÕj5¯û7W­±%V¥Õ“3ˆŸ¶Ó‚¨H®­c€¶g˜—ZµV‘äPØçª8§† ´™aŠ›æ&y<7r–|Bð[‡úÏ`f©²q+$>#~' %‘X'¿3…Ü,Ѭ°bÅ9UÝ4½£óQU…Ú—éß5iбcâÆT¡íúÜË›ee²s*îêLÕÙŠ…ÄD=G¯²êmZjwg~ Çbgt¡¹_)üŒ0‚U-µ#ZbUm§MBÒ>¦8[fçå¥í‰Þ¾£¨'§j Æ5îEœ+ãjò_‹Jÿôåm‹XëW-¸m¥âŒÊº*1õ±ÉÔ?N¢y ÜÁp.³£KuKpÝ5—\ªË£ µz¥:jÒ/+N%ºÍ5Û¥î·32÷]Ÿ'&Éx€w–:5*ÿSžÓVw¹Hµ–4éÅk5Uõ£ÑRÓUޱ‰•)Ípw õ zhÑbղåǺ&W`›DÔR€TªÃ¢³¤±ß§9'ǧÓ,(%–µB#ªcX ÚÄ¢ÃXˆ‘VZåJ·&øYþY?Ë{¦”æž}=§šö€û­<ûÎð¢Æ{"iÌéâm›¢c0'-Pø«b!ÈÖӨ/‹üÛ8qÊcØG13ö”dÛ1109Øøm±z¬Á.xµ"û-JW¨â0ÊéÆ+³ñOÇQÇ8»–9ØvÁìéfÚP ýJW€Ç 1-BÁ©yUV%ÞÛ°¬:žXä ÜÇŠ“ågTØîûik0öm^³dZIf ÁÖ ϰ9«úv‰¶bbbcÝp.­ª7EŸCaW%e§ùœ¬¹Æi;…•™`~“«ôÓ+WWtOäÓföºÝ‡OÔõ p£0ºˆ?ÑÀŽŠð(«É³JÄz[„¢»-G7N¶¾Êi;«6a•²®äðDQ^ëk[.ÀÞ,©£"7p•÷cû,ªÐvB9T ƒNÁ«B¥¼)´A5vtØwR1˜ƒÅgç`øÔr¿ eV`K4ᢌ-´Z/±w \.ÿ¿ÿÄ3 !1A"2 Q$@Baq0CRS`±áðÿÚ?þ…&GÜëa¤eÇO @…ËçÄ+ø¿br”;Á˜Ÿ¬žÈò΂UÚõñ©ôÎ!Lop•â˹ö™GÕÝ"\ÄŸLKCk¸|ÙïÙ]ƒ¼vIÒ±,Þ9ľêéÄjGª1ÇÌUõºëCÚ2ÛjLCw(Æ—æ.¤Ec\þ–ïMZ‡y‹] ü·°¥w”órlfØ…ì^¦÷ÆÚbän_é<ÍŒ™gT™}¤½àµ™ÈJcéÎÑ+¥ºŸ/ÈýØf¦*»ŒkÅD®F±T,æc©AÚWJôÛQï7İstÜyË?NQ`úm@¡Šâ4'K@uéâö…St¾¶$™ðÔZÝ+*†i¾â VyÜùE­íè áæµÝcÊ`Svêý`YSl DÈÃ`&Àr'™¯_ lÛ¤éÚ)ÕhÖ¾ UYÊÂÃ+øer±ÁÛS21ÃÆÒ`áËxÊ_´x¨Ò;ö<¦;TÏPå8†)±ßA¶Ts>ØÜe7Ü#xGòÌbÞ‹ézòðK죭f6e\??ÛÚ©;zÏ%´ ‘(ÇÖÛ,O9\¶ŽU´øì€}Ò¼Aæûôôý"3rmp.10rÓmã”Gó[åšé1¸‘cM-_ÓýËfý +þþÑÙ¨M¶^Úg.¼É”½n5©×Àã(ö‡ '´*ÇɤÙ]uÒ e‚[VíÕ@9\jÖâðÒ' ª½½\ŒÆÂjZ-¤ÈóPýô® é8‚…m_¬ÏH«CÈÔO4²šŸâ,ÛméÞ.ûê-÷PF†Wtè!ÇY®Ý%((4xÿÄ)1! @AQ"#20BaÿÚ?÷Øö§Ù5ȼű[g¨ÓØ^itÅ<ž˜›LØßáì¿Ôì?×ULͳ`Ê4í`È•èØË(zùXnc©SƒêÓ¦NLJAeÕ솰ñôÄJiÞÁe‹´â8ùŠ2cÞii"-ˆ2íBºí²öÜ} dâiéÉ 6ÍA•¶\ÀøIVÕÏÜÕ¦Û ÆW¿íy€L™¸üK,a ϧNŸí4À*Ën"Ç̹•GˆÍÇÇ1Én`c¦Ë%€LcÌýc©³1ôçãÑZolM¸âîZ¬~ ÂgˆÖ¢Á~æÄq.à4È*!þ°X8è:YPhÊWžŠÅxçû×?3µc Ä}‡3òäkYºƒ˜oS,±JàJlm0:‘Ì·˜®Wˆº¸Œ¬¾FPÜË*+Ö yŠêÙÄÚ¬¹Äì«y"5 õ?vÆ©1àËkUÊ6 æ=\ˆ)þ=Ñjf&G=;;­;­éÜq‰Ý¹ßxÚ¦c ™\G²²ܶÄdÆec¸˜Ì çõ2‡ñ¶mÚs8hË´ãÙ5¬ÜÎëg1›qϸÿÄ2!1 "0AQq2@aPp‘¡#BR`bráÿÚ?ü‹µ/ýë*ô·ÑcõR®®h>(}êB‚€ÙL wZ „ÿªáhë€HÆáFª,õE”»iû/YPÇüC­kaµ.£› M,i#t½ØêaÈ©D|Ô·²ÒÕŽÀŒ(Å;M±W_ 5ä­Ÿp"Õ„jt´Çuf«ÙIá ú÷Vj“d/Í%M#ºÊ¥B/=s²S™ó4;ŸmÛ f|+Y`ù+¹êyºFuhËMnõ:Ÿ€ ›Ÿ4ò7¬¸¯XVº¹²ÂôU¹¤õéXPŸú"âd•¨ŽÖeÄŸ•ÿP _ÍuT ™Xpû)¹òT鬂¡Ö<Èè)IWÍŽ¢`䫬 Fà3Y¸QËÊʱ4ÒÙ^•1L,.«­¬÷Þ–Ç.7nV–4¸·VµdtCt·Ùåb¶cÙ[ge‡9q»OÀ]üî¿ÄP…;±í@ŒòàoYa§{„¨#šâ¥Ùì¬ô4Ÿ*I‘OB<Ë{MP§¢sˆ¹R<ÐîËfϺøRWÙ|!¤À›¨ÙååÅ•© š舕 g€©Q¹V>ˉYL®§taâ:)Ú.ÈÏTÆL­N R‰¸R¸H¬€%C‚Ò0(4:+ñð¯îx€4›ÝiV}–d+eq0Í‹æåtGÐáÀ™… ¯ÙuBÍ 1ãòÿÄ+!1AQa q0P‘¡@±ÁÑá`pðÿÚ?!ÿ‘Ù)Üû(XÎCÞfå˜öP‹pî±ÙÓ˜¿2üÌá™Wn~Ŷ&=˜ÂÖ¦P•9Ëí4ýˆqY ïÕn%¨¦!‹©pÒæ¼È¿aÍJÛÄ{DX€k˜T%ñ=ˆÕOk.bËV„™«ÍäŠIZþÒ㲇Pö"%Àò|3F[/3W˜‚¤‚M®‹Á-ƒË¡D¥Ôj˜\ÃéF"DPͨ»„P}‚¢Œ"Ô¢Îã,€úËEôéÚ{'½Yè a¨áEW¾ÐÑ1]‚쨩Áûª bŸ7ü‘Ö½ÊG8¦µý ÉAq;3Ø„í¾>­8NÄó4C)A`âéèÈ‹ÑÛ*M !J¹Á+®3´Ÿ"£›§àå†lcy® ‡)Aüá„&þ¢É2naz%8™˜c+ˆÖ,&ã-™všDÌÎãý2õä~"•O31ÐÉ«àé]™88C2\N?ÊUÞrÊh—µ¸¡±[—xó]sõ.j;â"'q7¿M^çG¤–6óüÌ}­íž2ý‘¨žäfZWM#§ £HÍÌ€ ¦ØÍBœOÙ˜EL0¿T íþYqfîL¨«X5óæsÁBÂüà]@|dGfQÅÇpŠŸ€‹[P]`¾¨áìžþl{Âü„o)^>¬|"„UvËŠF,ì>LɵèEîöèÙÖåšÊ•ç.iNm0ƒ «Ú]„g‘›[1‡ T+ØRÃä ~ Õj£Áp^Q5k¹Jš¿Ô­ÜNH¾cE¿’ Ù.Ph©ÜÍ/tfø^'“ŽaWðG®…íƒÞ-ƒ" Ø% 4†Öøí~SdÃ.öLOl}†´ÞɃæ¥öJWœgÌÅSPi0Ë)ƒOýÿÚ óÏ<óÏ<óÏ<óÏ<óÏ<óÏ<óÏ<⯼óÏ<óÏ<óÏ<óÏ<óæœóÏ<óÏ<óÏ<óÏPS±Üb”Ÿ8#Tó‚%Ÿšlߦ¾p[WØ™c‹ÒH±R°ÒæõUò“£Â(¾¹hXéÃþëòz?–Pϲ ÐE~dFÞ="gÞ^ËYR¼“©Î!;Ä»?~ìr¢P`*îs-Jí}Ä„Ƶø¨ÐMÅ:™La¼ÆK*Tk˜eˆf~ÄÄ»™`:Mã–#2ʶìžíÀ~7=<¿P+à„w¨<¨úŠf ;¥Ù3M‘ñ¨;©LRžÂXÿp¬vª^ð¨eƒï~,yGõÛGYnÒ[ Á2V"°’ÙñY€ö; Ë»&ñP[æõ,LôK-}–‚m½OÜ̓Æß{€È_C €Î9ï€Pã»>pÜ=„Ë™bÌ·TÔ?qpýÁ¶•ƒƽtù·à¿gÔzûéÙæ|Ç–ZUvª]¬¨SØNܱµ­c=:÷ùÀDiŽ ‰§dSƒæ0Õ‰Wv]x:ÔZ*ÞN¥b£lk¦Ç]N"2¬5ŽWË©I0i±-âÚ{ q'ø1ãKL’‡Q³Ò ”ÍÉ›iNŠ9g¾±2;ÃS/'‰ÓØD Ç›ŠñC²®’³M³UºÖ`G’ÏÄã˜8,óëïß- ‚ ú%é¡6qgSÄø€g'ðpÆEcÄzþ}÷Å/néÄoäÿÄ%!1A@Q qa‘±Áá0ÿÚ?ïj}‘à×d#´¦õv)c»_ •-ÑÙþpHD¸Diéºê4hˆwX‘ ML Ç“P*µž>%ÿyt $O•ιö޾ ûéRLèH4?©ž÷Œ2Üâ+=.C™PhÜU|A‹7à&g¿Â{ã0(¦mÆZ: ±Dˆ­ôÖ[âX³fË2v9•÷k-ŽÞ|±kwÉhH9^ËpîâÇ0EÊ#OT‡BR×XAj€fÓ‰‡‰„wl磬Oi™æ)] [®™yE©¨\¨Ù?nŒ‰“XÍ"Ÿ‚Ui¬Ù'ƒ-Ù%Àê¾:‹ñˆ£í/ÄýIJsæÜ•/|~âm¢øÿ%éüFÖˆµÅ(Tº€s>¬¸¥ô NS'GÑn+¨ð†Ž‰õˆ”ÄAž"ìoó+wŒûÆwÇ`)¨(Ù1ï’=ÎãÿÄ,!1AQaq0‘¡±ÁÑð P@áñ`pÿÚ?ÿÑ¡´9‚é½¥dÔU¬ºWëJèÜá´j—Q²—™˜<ÏôgN-[ÙÏÒ,<œȵ%õœ`øÂøÅA&=ôÞúÿD©‘hä@§ô€ naŒÃ.B߈+'£õˆhoú.úÃ’¡ÈdøÎ6‡!V¯;•è[6‡î£ØV©òÜyžês=ñÕ.kH~bÈo?ЊIvu‰•kŠéˆ^¤¯{yßýLTXæ¾qÍ;9o¿ÂÈjRî¨31¬efÀ×'ô.£ê4Ñ“©ùŠK»S) q©’¦¨[¹°Ç«•½9®ÿ½`A¼]îß'ôTaªÅ©þ˜õºCáŸÞ%8R_(dlÌ…¤!®²ôªê©ÂÞ²ßÍ|eP¸)¸'#ÌëÓÖ9¢ˆk¿Êà DLñ1Qb÷ô˜6Á»ë/…䚟Ê9¬]L´à¥YÛYsh`øJn³œ” ¢õÊΰ:ñ”sK<Ç…F5ÃaÈBïcü1Æ¥[ù&.¦Dsð5;ó5Û̓±;K°¯¥Þ,G°z–'Î_€ú ›ï)H %êJ±Ü„2ùùKÊJ#„6' W zFÌ«I5‰=í+³Ì^”&ªñâñàÜ4â# ›Î%0©:b`øöp oˆ‘,î\@Sa/'gY‹öUèÐY–Óo£“ýJMË6‹¢z5Xköã16¬w]ÿÔ±3‰jl,ô`Uî//ëstÛ\†ûçÞ.&zrÁl»üÄÂ? ‘͘¼k–îd×g§˜ê†ºLüã¯(•h¦Â½À7¢vžc‡£êü"Rõç´Â#‰¡5§¿î!öVQs©à¸Ô„VEKС‡PžF ‰Fù+ò†6VŽ#ÝØÝß´:ì 1¨¬¨À‚#AËŠ÷c_Fãð¡Þ\V³/XFÞÔ0êƒÁ¸â7|2Îÿ"˜åóꂞ®órñVeÀye¬Í[ö•%•௷ÎËñ1aKí­ìoþJö­}It#¢?’öÔk¹ÆíÊ9ZØŽ=Wí/â0ºÄÍÔE6¦X|އ»¹yuÛeŒ s "Ô&ØžFe’Ñ_/Øô˜—ÆYLF–xYCÇ2&¦J0_cæ½% ·ëŽ™šÎÏ¥J=™¥ÀÀ½Aí5i}àÜÒ<ß·°H+=eèPµBÕÆÎJ±2f<Ë9¤çïîa¥¼x{ËÚÑ‚h"‡«»KX¨víËÐŒâŠ&Z®ˆì ¸Úæ¥J®¸‡~}!Bª®‘‚Ö±/öR¦ÕÄ"`´„ü‚O¼ÃW;¤”]Ÿ¤, *Û~ Èç]*ÝÀ䎘Ì[„%eát—‘¸°x¨üß´µ•UWè-ùI5^ð«Ô­E-J˜Ì:£’ðÀQéáK=Î(æ)Ì0xáß–hŠØ( Wðmàös™f“…ƒ£U Qj½L¬©“£”ê<£ãpõ.U -uš!ß)+ Æ©k õ— õ¿ì§HŸ=ârÓ+Rê_›Ì²@YAÚLj°ìyŠ* µ îñ0ˆ¸œô¿(jÿ™"h -½K P³¬âÐŒ¹jº¢ÔÊÞUK‹ ³}7¨ò£Xb°À`x™"áºkÖ:@íGÞX ìúKM›Æ\>ï*ÊUu‹€\ºÏÆ6Zo¬xndHg®ñlUÇJm¦b·”¸-zÁ¶Ó€ö½‰™ G˜¤ònºFQr°e±Òèÿ,6w€NX.©q nؽ²¡råðEëH°³ñÚ°·JŠBS“!‹;Ë Ã¼NE^®;<’·¤‚[prøHjŒ+Y8ñؽ#g#¹¢ Fye›ÚË©l“Ö¿µ,ÎI~a2&ìE]˜`á·ñVî[€»| þQâ/‹ï?â [”¶hö6Ó¬üËE‹V+-¶”B[‹tòqõìb•­ = çwÃ2’Í–»¿x…0¨1TeL“Ä} ¼GszæÙríå+¿±d‚Ùå•[¬•’É­žÁ­G Ïpòê„ÂZ;žèZ:!ÔÞ:ÂhWÌ󀥸䩕ÈÁÒcj%œâñ/–±…Ã_Á[!` ysˆ ¾’„³ˆô1(În ä ´±Ö@_0/åè>±Ð#`@å)©VñÞ8*«RŸ¼ÝX`zBW@UYzBô ›ªðBKAmÁu{JÎy=#X¡r»‚%žÕi½=Š“Ð›ªžáÁœWJİKò`ÃÒt\ˆ³0ȹ—wŸõ¯ÌJÀ[Yz¡ö”¸»SªëóÜRÙDî4ª‰WÏ‚U;zB`-ß”©ð( ˜ª›ºõ¶cL6À?DvDÑÍ}¥Ò{|Xnw “‰†|+0’ÐU|a%k%/r²ë˜¤89–qßw,l€­Ste°‡oðP±=ˆF¨lÍB(JÔwÔåÖ=#_y–1æ8 \½øërœ*Âö\<:9q}.<¶ãs€Ûëð%—˜´SSdëÛè¹MRƒ qù°5Ý]¼ÕvÍf(—tn [*оÌLÅZF.ªm´/W LŽfÓ 9=HÇ‹)£/Ä•'0˜a.³Ó¤6-ÃþBÈL¥°Búæ `PHv¨.«|ËÜè–³ ÛØüÊ:©¢äg‡Î€¶ëQÂÖ,tÀ6è e01™^Gå4+äI\Z^âé]ö,zP­« :›uæ\¼å+VþWô<Þœ¢A€Ð$O@Ý = ²Æ‰Œ‡dºsfµˆùt2½HZ;³üÿÙdar-2.6.8/doc/Makefile.am0000644000175000017520000000255313617551261012035 00000000000000SUBDIRS = samples mini-howto man dist_noinst_DATA = COMMAND_LINE Doxyfile portable_cp dist_pkgdata_DATA = README Features.html Limitations.html Notes.html Tutorial.html Good_Backup_Practice.html FAQ.html api_tutorial.html dar_doc.jpg dar_s_doc.jpg index.html dar-catalog.dtd Known_Bugs.html authentification.html dar_key.txt old_dar_key.txt from_sources.html downloading.html presentation.html usage_notes.html python/libdar_test.py INSTALL_HTML_MAN = $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/man ; $(INSTALL) -m 0644 man/*.html $(DESTDIR)$(pkgdatadir)/man UNINSTALL_HTML_MAN = rm -rf $(DESTDIR)$(pkgdatadir)/man if USE_DOXYGEN DOXYGEN = @DOXYGEN_PROG@ all-local: Doxyfile.tmp Doxyfile.tmp: sed -e "s%##VERSION##%@PACKAGE_VERSION@%g" -e "s%##HAS_DOT##%@HAS_DOT@%g" Doxyfile > Doxyfile.tmp cd .. ; $(DOXYGEN) doc/Doxyfile.tmp if [ -d html/search ]; then chmod u+x html/search ; fi clean-local: rm -rf html Doxyfile.tmp doxygen_sqlite3.db install-data-hook: ./portable_cp html $(DESTDIR)$(pkgdatadir) $(INSTALL_HTML_MAN) $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/python $(INSTALL) -m 0644 python/libdar_test.py $(DESTDIR)$(pkgdatadir)/python uninstall-hook: rm -rf $(DESTDIR)$(pkgdatadir)/html $(UNINSTALL_HTML_MAN) rmdir $(DESTDIR)$(pkgdatadir) || true else all-local: clean-local: install-data-hook: $(INSTALL_HTML_MAN) uninstall-hook: $(UNINSTALL_HTML_MAN) endif dar-2.6.8/doc/COMMAND_LINE0000644000175000017520000001426313552573442011615 00000000000000Status flags: --------------- ! : not used - : used without arg + : used with argument * : used with optional argument # : obsolete --------------- letters usage: --------------- a * alteration of operation --alter argument: a[time] binary[-unit[s]] b[lind-to-signatures] c[time] case d[ecremental] do-not-compare-symlink-mtime duc e[rase_ea] f[ixed-date] file-auth[entication] g[lob] h[oles-recheck] header i[gnore-unknown-inode-type] k[eep-compressed] l[axist] list-ea m[ask] n[o-case] r[egex] s[aved] secu SI[-unit[s]] t[ape-marks] test-self-reported-bug y abyte[s] z[eroing-negative-dates] b - terminal bell --bell c + create archive --create d + difference with filesystem --diff e - simulate the operation --empty (aka dry-run) f - do not restore directory structure --flat g + recurse in this directory --go-into h - help usage --help i + path of the input pipe --input j + retry delay for networking errors --network-retry-delay k + do not deleted files dropped between two backups --no-delete / --deleted { ignore | only } l + list archive contents --list m + minimum size below which no compression will occur. --mincompr a default value is used. use -m 0 if you want to compress in any case. n - do not allow overwriting --no-overwrite o + path of the output pipe --output p + pause before creating new slice --pause q - suppress final statistics report --quiet r - do not overwrite more recent file --recent [=date] (to implement) s + size of slice --slice t + test archive structure --test u + exclude EA from operation (mask) --exclude-ea v + verbose output --verbose --verbose={skipped|treated|messages|dir|masks|all} w * do not warn before overwriting --no-warn x + extract archive --extract y + repair archive --add-missing-catalogue z * compression --gzip --compress A + make a differential backup --ref B + filename taken as command-line extension --config C + extract catalogue in separated file --isolate D - store excluded dir as empty dir --empty-dir E + shell command to launch with slices --execute F + shell command to launch with slices of archive of reference --execute-ref G - forbid the use of multi threads when compiled against libthreadar --single-thread H * if a file for differential backup or diff differs from exactly one hour it is assumed as identical (no change). I + include mask --include J + key for unscrambling the reference catalogue --key-ref K + key for un/scrambling --key L - license information M - only consider what is under the current mounting point. --no-mount-points N - do not read any configuration file ~/.darrc or /etc/darrc O + ignore user Id and group Id --ignore-owner P + prune a directory tree --prune Q + quite on stderr at startup no long option equivalent R + set the root directory --root S + size of the first slice --first-slice T + (listing) tree listing format --tree-format, --list-format= T + (create/merge/isolate) interation count for key derirvation --kdf-param U + include EA for the operation (mask) --include-ea V - version information --version W - warranty disclosure information (POSIX RESERVED, sorry) X + exclude mask --exclude Y + included only filename for compression --include-compression Z + excluded filename from compression --exclude-compression - don't save files with nodump flag set --nodump # + encryption block size --crypto-block * + encryption block size for the archive of reference --crypto-block-ref , - cache directory tagging standard --cache-directory-tagging [ + include files listed in given file --include-from-file ] + exclude files listed in given file --exclude-from-file - x (forbidden by getopt) ? x (forbidden by getopt) : x (forbidden by getopt) + + merging operation --merge @ + second reference archive for merging --aux-ref $ + key for auxilliary refence archive --aux-key ~ + command to execute between slices --aux-execute % + encryption block size for the auxilliary archive of reference --aux-crypto-block / + policy to solve overwriting conflict --overwriting-policy ^ + ownership and permission of generated slices --slice-mode _ + retry on change --retry-on-change { + include files for delta signature --include-delta-sig } + exclude files for delta signature --exclude-delta-sig 0 * sequential read of the archive --sequential-read 1 + sparse-file detection tunning --sparse-file-min-size 2 + dirty file behavior --dirty-behavior {ignore|no-warn} 3 + create a hash algorithm (+algo) --hash 4 + filesystem specific attr. family --fsa-scope 5 + exclude file having a given EA set --exclude-by-ea [] 6 + minimum size for delta signatures --delta-sig-min-size 7 + define keys used to sign the archive --sign 8 + delta binary diff --delta sig, --delta patch 9 + min_digits --min-digits archive[,ref[,aux]]; " + anonymous pipe descriptor to read conf from. --pipe-fd ' + how to detect modified date in diff backup --modified-data-detection= {any-change | crc-comparison} . + user comment --user-comment ; x (forbidden by getopt) < + backup hook mask --backup-hook-include > + backup hook mask --backup-hook-exclude = + backup hook execute --backup-hook-execute \ + ignored as a symlinks --ignored-as-symlinks [: dar-2.6.8/doc/Features.html0000644000175000017520000015535213552573442012456 00000000000000 DAR's FEATURES
Dar Documentation


DAR's FEATURES






Here follow the main features of dar/libdar tool. For each feature an overview is presented with some pointers you are welcome to follow for a more detailed information.



HARD LINK CONSIDERATION


hard links are properly saved in any case and properly restored if possible. For example, if restoring across a mounted file system, hard linking will fail, but dar will then duplicate the inode and file contents, issuing a warning. Hard link support includes the following inode types: plain files, char devices, block devices, symlinks (Yes, you can hard link symbolic links! Thanks to Wesley Leggette for the info ;-) )


SPARSE FILES
references: man dar

--sparse-file-min-size, -ah
By default Dar takes care of sparse files, even if the underlying filesystem does not support sparse files(!). When a long sequence of zeroed bytes is met in a file during backup, those are not stored into the archive but the number of zeroed bytes is stored instead (structure known as a "hole"). When comes the time to restore that file, dar restores the normal data but when a hole is met in the archive dar directly skips at the position of the data following that hole. If the underlying filesystem supports sparse files, this will (re)create a hole in the restored file, making a sparse file. Sparse files can report to be several hundred gigabytes large while they need only a few bytes of disk space, being able to properly save and restore them avoids wasting disk space at restoration time and in archives.


EXTENDED ATTRIBUTES (EA)
references: man dar
MacOS X FILE FORKS / ACL
keywords: -u -U -am -ae --alter=list-ea
Dar is able to save and restore EA, all or just those matching a given pattern.

File Forks (MacOS X) are implemented over EA as well as Linux's ACL, they are thus transparently saved, tested, compared and restored by dar. Note that ACL under MacOS seem to not rely on EA, thus while they are marginally used they are ignored by dar.


FILESYSTEM SPECIFIC ATTRIBUTES (FSA)
references: man dar
MacOSX/FreeBSD Birthdate, Linux FS attributes
keyword: --fsa-family
Since release 2.5.0 dar is able to take care of filesystem specific attributes. Those are grouped by family strongly linked to the filesystem they have been read from, but perpendicularly each FSA is designated also by a function. This way it is possible to translate FSA from a filesystem into another filesystem when there is a equivalency in role.

currently two families are present:
  • HFS+ family contains only one function : the birthtime. In addition to ctime, mtime and atime, dar can backup, compare and restore all four dates of a given inode (well, ctime is not possible to restore).
  • extX family contains 12 functions (append_only, compressed, no_dump, immutable, journaling, secure_deletion, no_tail_merging, undeletable, noatime_update, synchronous_directory, synchronous_update, top_of_dir_hierarchy) found on ext2/3/4 and some other Linux filesystems. Dar can thus save and restore all of those for each file depending on the capabilities or permissions dar has at restoration time.


DIRTY FILES
references: man dar

keywords: --dirty-behavior , --retry-on-change
At backup time, dar checks that each saved file had not changed at the time it was read. If a file has changed in that situation, dar retries saving it up to three times (by default) and if it is still changing, is flagged as "dirty" in the archive, and handled differently from other files at restoration time. The dirty file handling is either to warn the user before restoring, to ignore and avoid restoring them, or to ignore the dirty flag and restore them normally.

Note that dar precision when readng/writing inode dates (atime, ctime, mtime, birthtime) is the microsecond. Thus a file is seen as having changed even if a very small modification occurres in it very frequently.


FILTERS
references: man darcommand line usage notes

keywords: -I -X -P -g -[ -] -am --exclude-by-ea
dar  is able to backup from a total file system to a single file, thanks to its filter mechanism. This one is dual headed: The first head let one decide which part of a directory tree to consider for the operation (backup, restoration, etc.) while the second head defines which type of file to consider (filter only based on filename, like for example the extension of the file).

For backup operation, files and directories can also be filtered out if they have been set with a given user defined EA.


NODUMP FLAG references: man dar

keywords: --nodump
Many filesystems, like ext2/3/4 filesystems provide for each inodes a set of flags, among which is the "nodump" flag. You can instruct dar to avoid saving files that have this flag set, as does the so-called dump backup program.


ONE FILESYSTEM references: man dar

keywords: -M
By default dar does not stop at filesystems boundaries unless the filtering mechanism described above exclude such directory that matches another mounted filesystem. But you can also ask dar to avoid changing of filesystem without the burden of finding and listing the directories to be excluded from the backup: dar will manage alone to only save files of the current filesystem.


CACHE DIRECTORY TAGGING STANDARD
references: man dar

keywords: --cache-directory-tagging
Many software use cache directories (mozilla web browser for example), directories where is stored temporaneous data that is not interesting to backup. The Cache Directory Tagging Standard provides a standard way for software applications to identify this type of data, which let dar (like some other backup softwares) able to take into account and avoid saving them.


DIFFERENTIAL BACKUP references: man dar/TUTORIAL

keywords: -A
When making a backup with dar, you have the possibility to make a full backup or a differential backup. A full backup, as expected, makes backup of all files as specified on the command line (with or without filters). Instead, a differential backup, (over filter mechanism), saves only files that have changed since a given reference backup. Additionally, files that existed in the reference backup and which do no more exist at the time of the differential backup are recorded in the backup as "been removed". At recovery time, (unless you deactivate it), restoring a differential backup will update changed files and new files, but also remove files that have been recorded as "been removed". Note that the reference backup can be a full backup or another differential backup (this second method is usually designed as incremental backup). This way you can make a first full backup, then many incremental backups, each taking as reference the last backup made, for example.


DECREMENTAL BACKUP references: man dar / Decremental backup

keywords: -+ -ad
As opposed to incremental backups, where the older one is a full backup and each subsequent backup contains only the changes from the previous backup, decremental backup let the full backup be the more recent while the older ones only contain changes compared to the just more recent one. This has the advantage of providing a single archive to use to restore a whole system in its latest known state, while reducing the overall amount of data to retain older versions of files (same amount required as with differential backup). It has also the advantage to not have to keep several set of backup as you just need to delete the oldest backup when you need storage space. However it has the default to require at each new cycle the creation of a full backup, then the transformation of the previous full backup into a so-called decremental backup. Yes, everything has a cost!


DELTA BINARY
references: man dar

keywords: --delta sig, --include-delta-sig, --exclude-delta-sig, --delta-sig-min-size, --delta no-patch
Since release 2.6.0, for incremental and decremental backups, instead of saving an entire whole file when it has changed, dar/libdar provides the ability to save only the part that has changed in it. This feature called binary delta relies on librsync library. It is not activated by default considering the non null probability of collision between two different versions of a file. This is also the choice of the dar user community.


PREVENTING ROOTKITS AND OTHER MALWARES
references: man dar

keywords: -asecu
At backup time when a differential, incremental or decremental backup is done, dar compares the status of inode on the filesystem to the status they had at the time of the last backup. If the ctime of a file has changed while no other inode field changed dar issues a warning considering that file as suspicious. This does not mean that your system has been compromised but you are strongly advised to check whether this concerned file has been recently updated (Some package manager may lead to that situation) or has its Extended Attributes changed since last backup was made. In normal situation this type of warning does not show often (false positive are rare but possible). However in case your system has been infected by a virus or compromised by a rootkit, dar will signal the problem if the intruder tried to hid its forfait.


DIRECTORY TREE SNAPSHOT references: man dar

keywords: -A +
Dar can make a snapshot of a directory tree and files recording the inode status of files. This may be used to detect changes in filesystem, by "diffing" the resulting archive with the filesystem at a later time. The resulting archive can also be used as reference to save file that have changed since the snapshot has been done. A snapshot archive is very small compared to the corresponding full backup but it cannot be used to restore any data.


SLICES references: man dar/TUTORIAL

keywords: -s -S -p -aSI -abinary
Dar stands for Disk ARchive. From the beginning it was designed to be able to split an archive over several removable media whatever their number is and whatever their size is. To restore from such a splitted archive, dar will directly fetch the requested data in the correct slice(s). Thus dar is able to save and restore using old floppy disk, CD-R, DVD-R, CD-RW, DVD-RW, Zip, Jazz, etc... However, Dar will not un/mount removable media because it is independent of hardware. Given the size, it will split the archive in several files (called SLICES), eventually pausing before creating the next one, allowing this way the user to un/mount a medium, burn the file on CD-R, send it by email (if your mail system does not allow huge file in emails, dar can help you here also.. but OK, this is bad doing so :-)). By default, (no size specified), dar will make one slice whatever its size is. Additionally, the size of the first slice can be specified separately, if for example you want first to fulfill a partially filled disk before starting using empty ones. Last, at restoration time, dar will just pause and prompt the user asking a slice only if it is missing, so you can choose to have more than one slice per medium without penalty from dar. Note that all these operation can be automatized using the "user command between slices" feature (presented below), that let dar do all you want it to do once a slice is created or before reading a slice.


COMPRESSION references: man dar

keywords: -z
dar can use compression. By default no compression is used. Actually gzip, bzip2, lzo, xz/lzma algorithms are available, and there is still room available for any other compression algorithm. Note that, compression is made before slicing, which means that using compression together with slices, will not make slices smaller, but will probably make less slices in the backup.


SELECTIVE COMPRESSION references: man dar/samples

keywords: -Y -Z -m -am
dar can be given a special filter that determines which files will be compressed or not. This way you can speed up the backup operation by not trying to compress *.mp3, *.mpg, *.zip, *.gz and other already compressed files, for example. Moreover another mechanism allow you to say that files under a given size (whatever their name is) will not be compressed.


STRONG ENCRYPTION references: man dar

keywords: -K -J -# -* blowfish, twofish, aes256, serpent256, camellia256
Dar can use blowfish, twofish, aes256, serpent256 and camellia256 algorithms to encrypt the whole archive. Two "elastic buffers" are inserted and encrypted with the rest of the data, one at the beginning and one at the end of the archive to prevent a clear text attack or codebook attack.


PUBLIC KEY ENCRYPTION
references: man dar

keywords: -K, --key-length
Encryption based on GPG public key is available. A given archive can be encrypted for a recipient (or several recipients without visible overhead) using its public key. Only the recipient(s) will be able to read such encrypted archive.


PRIVATE KEY SIGNATURE
references: man dar

keywords: --sign
When using encryption with public key it is possible in addition to sign an archive with your own private key(s). Your recipients can then be sure the archive has been generated by you, dar will check the signature validity against the corresponding public key(s) each time the archive is used (restoration, testing, etc.) and a warning is issued if signature does not match or key is missing to verify the signature. You can also have the list of signatories of the archive while listing the archive content.


SLICE HASHING
references: man dar

--hash, md5, sha1, sha512
When creating an archive dar can compute an md5, sha1 or sha512 hash before the archive is written to disk and produce a small file compatible with md5sum, sha1sum or sha512sum that let verify that the medium has not corrupted the archive slices.


DATA PROTECTION references: man dar/Parchive integration

keywords: -al
Dar is able to detect corruption in any part of a dar archive, but it cannot fix it.

Dar relies on the Parchive program for data protection against media errors. Thanks to dar's ability to run user command or script and thanks to the ad hoc provided scripts, dar can use Parchive as simply as adding a word (par2) on command-line. Depending on the context (archive creation, archive testing, ...), dar will by this mean create parity data for each slice, verify and if necessary repair the archive slices.

Without Parchive, dar can workaround a corruption by not restoring the concerned file. For some more vital part of the archive, like the "catalog" which is the table of contents, dar has the ability to use an isolated catalog as backup of the internal catalog of an archive. It can also make use of tape marks that are used inside the archive for sequential reading as a way to overcome catalog corruption. The other vital information is the slice layout which is replicated in each slice and let dar overcome data corruption of that part too. As a last resort, Dar also proposes a "lax" mode in which the user is asked questions (like the compression algorithm used, ...) to help dar recover very corrupted archives and in which, many sanity checks are turned into warnings instead of aborting the operation. However this does not replace using Parchive. This "lax" mode has to be considered as the last resort option.


TRUNCATED ARCHIVE REPARATION
reference: man dar

keyword: -y
Since version 2.6.0 an truncated archive (due to lack of disk space, power outage, or any other reason) can be repaired. A truncated archive lacks a table of content which is located at the end of the archive, without it you cannot know what file is saved and where to fetch its data from, unless you use the sequential reading mode which is slow as it implies reading the whole archive even for restoring just one file. To allow sequential reading of an archive, which is suitable for tape media, some metadata is by default inserted all along the archive. This metadata is globally the same information that should contain the missing table fo content, but spread by pieces all along the archive. Reparing an archive consists of gathering this inlined metadata and adding it at the end of the repaired archive to allow direct access mode (default mode) which is fast and efficient.



DIRECT ACCESS


even using compression and/or encryption dar has not to read the whole backup to extract one file. This way if you just want to restore one file from a huge backup, the process will be much faster than using tar. Dar first reads the catalogue (i.e. the contents of the backup), then it goes directly to the location of the saved file(s) you want to restore and then proceeds to restoration. In particular using slices, dar will ask only for the slice(s) containing the file(s) to restore.

Since version 2.6.0 dar can also read an archive from a remote host by mean of FTP or SFTP. Here too dar can leverage its direct access ability to only download the necessary stuff in order to restore some files from a large archive, or list the archive content or even compare a set of file with live filesystem.



SEQUENTIAL ACCESS
references: man dar
(suitable for tapes)
--sequential-read, -at
The direct access feature seen above is well adapted to random access media like disks, but not for tapes. Since release 2.4.0, dar provides a sequential mode in which dar sequentially read and write archives. It has the advantage to be efficient with tape but suffers from the same drawback as tar archive: it is slow to restore a single file from a huge archive. The second advantage is to be able to repair a truncated archive (lack of disk space, power outage, ...) as described above.



MULTI-VOLUME TAPES
references: man dar_split

keywords: --sequential-read
The independant dar_split program provides a mean to output dar but also tar archives to several tapes. If takes care of splitting the archive when writing to tapes and gather pieces of archive from several tapes for dar/tar to work as if it was a single pieced archive.



ARCHIVE TESTING references: man dar/TUTORIAL/ Good Backup Practice

keywords: -t
thanks to CRC (cyclic redundancy checks), dar is able to detect data corruption in an archive. Only the file where data corruption occurred will not be possible to restore, but dar will restore the others even when compression or encryption (or both) is used.



ISOLATION references: man dar

keywords: -C -A -@
the catalogue (i.e.: the contents of an archive), can be copied (this operation is called isolation) to a small file, that can in turn be used as reference for differential archive. There is then no need to provide an archive to be able to create a differential backup based on it, just its catalogue is can be used instead. Such an isolated catalogue can also be used to rescue the archive it has been isolated from in the case the archive's internal catalogue has been corrupted. Such isolated catalogue can be created at the same time as the archive (operation called on-fly isolation) or as a separate operation (called isolation).



FLAT RESTORATION references: man dar

keywords: -f
It is possible to restore any file without restoring the directories and subdirectories it was in at the time of the backup. If this option is activated, all files will be restored in the (-R) root directory whatever their real position is recorded inside the archive.



USER COMMAND BETWEEN SLICES references: man dar dar_slave dar_xform/command line usage notes

keywords: -E -F -~
several hooks are provided for dar to call a given command once a slice has been written or before reading a slice. Several macros allow the user command or script to know the requested slice number, path and archive basename.



USER COMMAND BEFORE AND AFTER SAVING A DIRECTORY OR A FILE
references: man dar/command line usage notes

keywords: -< -> -=
It is possible to define a set of file that will have a command executed before dar start saving them and once dar has completed saving them. This is especially intended for saving live database backup. Before entering a directory dar will call the specified user command, then it will proceed to the backup of that directory. Once the whole directory has been saved, dar will call again the same user command (with slightly different arguments) and then continue the backup process. Such user command may have for action to stop the database and to reactivate it afterward for example.



CONFIGURATION FILE references: man dar, conditional syntax and user targets

keywords: -B
dar can read parameter from file. This is a way to extends the command-line limited length input. A configuration file can ask dar to read (or to include) other configuration files. A simple but efficient mechanism forbids a file to include itself directly or not, and there is no limitation in the degree of recursion for the inclusion of configuration files.

Two special configuration files $HOME/.darrc and /etc/darrc are read if they exist. They share the same syntax as any configuration file which is the syntax used on the command-line, eventually completed by newlines and comments.

Any configuration file can also receive conditional statements, which describe which options are to be used in different conditions. Conditions are: "extract", "listing", "test", "diff", "create", "isolate", "merge", "reference", "auxiliary", "all", "default" (which may be useful in case or recursive inclusion of files) ... more about their meaning and use cases in dar man page.



REMOTE OPERATIONS references: command line usage notes, man dar/dar_slave/dar_xform

keywords: -i -o - -afile-auth
dar is able to read and write an archive to a remote server in three different ways:

1 - dar is able to produce an archive to its standard output or to a named pipe and is able to read an archive from its standard input or from a named pipe

2 - if the previous approach is fine to write down an archive over the network (through an ssh session for example), reading an archive from a remote sever that way (using a single pipe) requires dar to read the whole archive which may be inefficient to just restore a single file. For that reason, dar is also able to read an archive through a pair of pipes (or named pipes) using dar_slave at the other side of the pipes. From the pair of pipes, one pipe let dar asking to dar_slave which portion of the archive it has to send through the other pipe. This makes a remote restoration much more efficient and still allows these bidirectional exchanges to be encrypted over the network, simply running dar_slave through an ssh session.

3 - last, since release 2.6.0 dar can make use FTP or SFTP protocols to read or write an archive from or to a remote server. This method does not rely on anonymous or named pipes, is as efficient as option 2 for reading a remote archive and is compatible with slicing and slice hashing. however this option is restricted to these two network protocols: FTP (low CPU usage but insecure) SFTP (secure)



DAR MANAGER references: man dar_manager


The advantage of differential backup is that it takes much less space to store and time to complete than always making full backup. But, in the other hand, it may lead you having a lot of them due to the reduces space requirements. Then if you want to restore a particular file, you may spend time to figure out in which backup is located the most recent version. To solved this, dar_manager gathers contents information of all your backups. At restoration time, it will call dar for you to restore the asked file(s) from the proper backup.


RE-SHAPE SLICES OF AN EXISTING ARCHIVE references: man dar_xform


the provided program named "dar_xform" is able to change the size of slices of a given archive. The resulting archive is totally identical to archives directly created by dar. Source archive can be taken from a set of slice, from standard input or even a named pipe. Note that dar_xform can work on encrypted and/or compressed data without having to decompress or even decrypt it.



ARCHIVE MERGING references: man dar

keywords: -+ -ak -A -@
From version 2.3.0, dar supports the merging of two existing archives into a single one. This merging operation is assorted by the same filtering mechanism used for archive creation. This let the user define which file will be part of the resulting archive.

By extension, archive merging can also take as single source archive as input. This may sound a bit strange at first, but this let you make a subset of a given archive without having to extract any file to disk. In particular, if your filesystem does not support Extended Attributes (EA), thanks to this feature you can still cleanup an archive from files you do not want to keep anymore without loosing any EA or performing any change to standard file attributes (like modification dates for example) of files that will stay in the resulting archive.

Last, this merging feature give you also the opportunity to change the compression level or algorithm used as well as the encryption algorithm and passphrase. Of course, from a pair of source archive you can do all these sub features at the same time: filtering out files you do not want in the resulting archive, use a different compression level and algorithm or encryption password and algorithm than the source archive(s), you may also have a different archive slicing or no slicing at all (well dar_xform is more efficient for this feature only, see above "RE-SHAPE SLICES OF AN EXISTING ARCHIVE" for details).



ARCHIVE SUBSETTING
references: man dar

keywords: -+ -ak
As seen above under the "archive merging" feature description, it is possible to define a subset of files from an archive and put them into a new archive without having to really extract these files to disk. To speed up the process, it is also possible to avoid uncompressing/recompressing files that are kept in the resulting archive or change their compression, as well change the encryption scheme used. Last, you may manipulate this way files and their EA while you don't have EA support available on your system.



DRY-RUN EXECUTION
references: man dar

keywords: -e
You can run any feature without effectively performing the action. Dar will report any problem but will not create, remove or modify any file.




ARCHIVE USER COMMENTS
references: man dar

keywords: --user-comment, -l -v, -l -q
The archive header can encompass a message from the user. This message is never ciphered nor compressed and always available to any one listing the archive summary (-l and -q options). Several macro are available to add more confort using this option, like the current date, uid and gid used for archive creation, hostname, and command-line used for the archive creation.



PADDED ZEROS TO SLICE NUMBER
references: man dar

keywords: --min-digits
Dar slice are numbered by integers starting by 1. Which makes filename of the following form: archive.1.dar, archive.2.dar, ..., archive.10.dar, etc. However, the lexicographical order used by many directory listing tools, is not adapted to show the slices in order. For that reason, dar let the user define how much zeros to add in the slice numbers to have usual file browsers listing slices as expected. For example, with 3 as minimum digit, the slice name would become: archive.001.dar, archive.002.dar, ... archive.010.dar.

dar-2.6.8/doc/Notes.html0000644000175000017520000066667613617551261012005 00000000000000 DAR/LIBDAR Internals - Notes
Dar Documentation


Dar/Libdar Internals - Notes





Introduction

Here take place a collection of notes. These have been created after implementation of a given feature, mainly for further reference but also for user information. The ideas behind these notes are to remind some choices of implementation, the arguments that lead to this choices in on side, and in the other side let the user have a room to be informed on the choices done and be able to bring his remarks without having to deeply look in the code to learn dar's internals.

Contents



EA & differential backup

Brief presentation of EA:

EA stands for Extended Attributes. In Unix filesystem a regular file is composed of a set of byte (the data) and an inode. The inode add properties to the file, such as owner, group, permission, dates (last modification date of the data [mtime], last access date to data [atime], and last inode change date [ctime]), etc). Last, the name of the file is not contained in the inode, but in the directory(ies) it is linked to. When a file is linked more than once in the directory tree, we speak about "hard links". This way the same data and associated inode appears several time in the same or different directories. This is not the same as a symbolic links, which is a file that contains the path to another file (which may or may not exist). A symbolic link has its own inode. OK, now let's talk about EA:

Extended attributes is a recent feature of Unix file system (at the time of the writing, year 2002). They extend attributes provided by the inode and associated to a data. They are not part of the inode, nor part of the data, nor part of a given directory. They are stored beside the inode and are a set of pair of key and value. The owner of the file can add or define any key and eventually associate data to it. It can also list and remove a particular key. What they are used for ? A way to associate information to a file.

One particular interesting use of EA, is ACL: Access Control List. ACL can be implemented using EA and add a more fine grain in assigning access permission to file. For more information one EA and ACL, see the site of Andreas Grunbacher:

EA & Differential Backup

to determine that an EA has changed dar looks at the ctime value. if ctime has changed, (due to EA change, but also to permission or owner change) dar saves the EA. ctime also changes, if atime or mtime changes. So if you access a file or modify it, dar will consider that the EA have changed also. This is not really fair, I admit.

Something better would be to compare EA one by one, and record those that have changed or have been deleted. But to be able to compare all EA and their value reference EA must reside in memory. As EA can grow up to 64 KB by file, this can lead to a quick saturation of the virtual memory, which is already enough solicited by the catalogue.

These two schemes implies a different pattern for saving EA in archive. In the first case (no EA in memory except at time of operation on it), to avoid skipping in the archive (and ask the user to change of disks too often), EA must be stored beside the data of the file (if present). Thus they must be distributed all along the archive (except at the end that only contains the catalogue).

In the second case (EA are loaded in memory for comparison), EA must reside beside or within the catalogue, in any case at the end of the archive, not to have to user to need all disks to just take an archive as reference.

As the catalogue, grows already fast with the number of file to save (from a few bytes for hard_link to 400 bytes around per directory inode), the memory saving option has been adopted.

Thus, EA changes are based on the ctime change. Unfortunately, no system call permits to restore ctime. Thus, restoring a differential backup after its reference has been restored, will present restored inode as more recent than those in the differential archive, thus the -r option would prevent any EA restoration. In consequence, -r has been disabled for EA, it does only concern data contents. If you don't want to restore any EA but just more recent data, you can use the following : -r -u "*"



Archive structure in brief


The Slice Level

A slice is composed of a header, data and trailer (the trailer appeared with archive format version 8)

+--------+-------------------------------------------+-------+
| header |  Data                                     |Trailer|
|        |                                           |       |
+--------+-------------------------------------------+-------+

the slice header is composed of
  • a magic number that tells this is a dar slice
  • a internal_name which is unique to a given archive and shared by all slices
  • a flag that tells whether the slice is the last of the archive or whether a trailer is present that contains this info.
  • a extension flag, that was used in older archive but which now always set to 'T' telling that a TLV list follows
  • A TLV (Type Length Value) list of item, it contains:
    • the slice size
    • first slice size
    • the data_name. 
The TLV list will receive any future new field related to slice header.

+-------+----------+------+-----------+-------+
| Magic | internal | flag | extension | TLV   |
| Num.  | name     | byte | byte      | list  |
+-------+----------+------+-----------+-------+

The header is the first thing to be written, and if the current slice is not the last slice (all data to write could not fit in it), before format 8, the flag field was changed indicating that another slice follows. Since archive format 8, the flag is set to a specific value indicating that the information telling whether the slice is the last or not is placed in a slice trailer, a new "structure" that appeared with that format and which is located at the end of each slice.

The header is also the first part to be read.

A TLV list is of course a list of TLV:

+-------+----------+------+-----------+- ...-----+-------+
| Number| TLV 1    | TLV 2| TLV 3     |          | TLV n |
| of TLV|          |      |           |          |       |
+-------+----------+------+-----------+--...-----+-------+

Each TLV item is, as commonly, defined as set of three fields:

+---------+-------------------------+-------------------------+
| Type    | Length                  | Value                   |
|(2 bytes)| (arbitrary large value) | (arbitrary large data)  |

+---------+-------------------------+-------------------------+

The 2 bytes type is large enough for today's need (65535 different types while only three used), however TLV 65535 is reserved for future use and will signal a new format for the type field.

To know in which slice and at which position to find a particular data, dar needs to know each file's size. This is the reason why each slice contains the slice size information, in particular the last slice. In older version, dar had to read the first slice first to get this slicing information. Then it could read the archive contents at the end of the last slice. Today, reading the last slice, dar can fetch the slicing scheme from the slice header (what we just detailed) and fetch the archive contents at the end of this same last slice.

The trailer (which is one byte length) is new since archive format version 8 (released 2.4.0). It contains the value that was located in the header flag field in older archive format, telling whether the slice is the last of the archive or not. When writting down a single sliced archive (no -s option provided), both the header and the trailer tell that the slice is the last of the archive (duplicated information). However, when doing multi-sliced archive, it is not possible to known whether a slice is the last before reaching the requested amount of data per slice (which depends on the amount of byte to save, compression ratio, encryption overhead, etc.). Thus the header flag contains a value telling that to know whether the slice is the last or not, one must read the trailer.

In older format, it was necessary to seek back to update the header with the correct information when a new slice had to be created. But, keeping this behavior, it would not have been possible to make a digest "on the fly" (see --hash option). The addition of the trailer was required for that feature: to compute a md5 or sha1, ... hash for each slice. But, this costs one byte per slice, yes.

Data Name

As seen above in the header fields, we have among others the three following identifiers:
  • magic number
  • internal name
  • data name
as already said, magic number is constant and let dar be (almost) sure a given file is a dar slice file, this is also based in particular on that field that the common unix 'file' command identifies an dar archive. Also briefly explained, the internal_name is a identifier that let dar be almost sure that several slices are from the same archive (problem car arise if two archives of same basename have their slices mixed together: dar will see that and report it to the user).

The new and not yet described field is the "data_name". The data_name field is also present in the archive catalogue (the table of content) of each archive. It may be the same value as the one in the slice headers (normal archives) or another value if the archive results from a catalogue isolation process.

Why this field? A new feature with release 2.4.0 is the ability to use an extracted catalogue to backup a internal catalogue of a given archive. Comparing the data_name value of the catalogue resulting from the isolation operation to the data_name value present int the slices of an archive to rescue, dar can be (almost) sure that the extracted catalogue matches the data present in the archive the user is trying to use it with.

In brief:

Fields Normal Archive
Resliced Using dar_xform
Resulting From Isolation
 isolated archive
resliced with dar_xform

internal_name (slice header)
A
B
C
D
data_name (slice header)
A
A
C
C
data_name (archive catalogue)
A
A
A
A

Archive Level

The archive level describes the structure of the slice's data field (removing header and trailer of each slice), when they are all sticked together from slice to slice:

+---------+----------------------------+-----------+--------+---------+--------+
| version |   Data                     | catalogue | term 1 | version | term 2 |
| header 
|                            |           |        | trailer |        |
+---------+----------------------------+-----------+--------+---------+--------+

The version header is a short version of the trailer version. It is used when reading an archive in sequential mode, to be able to prepare the proper compression layer, and known whether escape sequence mark are present in the archive.

the version trailer (which may still be called "version header" in some part of the documentation because it was only located at the beginning of the archive in previous archive format) is composed of:
  • edition version of the archive
  • compression algorithm used
  • command line used for creating the archive, now known as "user comment"
  • flag, telling:
    • whether the archive is encrypted,
    • whether it has escape sequence marks,
    • whether the header/trailer contains an encrypted key
    • whether the header/trailer contains the initial offset field
    • whether the archive is signed
    • whether the header/trailer contains the slice layout of the archive of reference
    • whether the archive has salt+iteration count+hash algo for key derivation fonction (KDF)
  • initial offset (telling where starts the data in the archive, is only present in the trailer)
  • crypto algorithm used (present only if flag tells that the archive is encrypted)
  • size of the crypted key that follows (present only if the flag tells an encrypted key is present)
  • encrypted key (encrypted by mean of GPG asymetric algorithm, present only if flag says so)
  • eventually, salt, iteration_count and hash algo for key derivation fonction (KDF) (used since version 2.6.0 when strong encryption is set)
  • CRC (Cyclic Redundancy Check) computed on the whole version header or trailer
+---------+------+---------------+------+--------+-------+----------+---------------+------------+------+------+-----------+-----+-------+
| edition | algo | command line  | flag | initial| crypto| crypted  | gnupg crypted |  reference | salt | salt | iteration |hash | CRC   |
|         |      |               |      | offset | algo  | key size | sym. key      |  slicing   | size | (KDF)| count(KDF)|(KDF)|       |
+---------+------+---------------+------+--------+-------+----------+---------------+------------+------+------+-----------+-----+-------+

The trailer is used when reading an archive in direct access mode, to build the proper compression layer, escape layer (it is needed if mark have been inserted in the archive to un-escape data that could else be taken as an escape sequence mark) and encryption layer.

The data is a suite of file contents, with EA and FSA if present. When tape mark is used, a copy of the CRC is placed after's file Data and file's EA, to be used when reading the archive in sequential mode. This CRC is also dropped into the catalogue which takes place at the end of the archive to be used when reading the archive in direct access mode (the default). Last when delta binary is used, a file signature may follow the file's data:

  ....--+---------------------+-------+----+----+------------+------------+----+-----------+-----+-....
        |  file1 data         | delta | EA | FSA| file2 data | file3 data | EA | file4     | FSA |
        | (may be compressed) | sig   |    |    |(no EA/FSA) |            |    | delta sig |     |
  ....--+---------------------+-------+----+----+------------+------------+----+-----------+-----+-....
in the previous archive example, we find:
  • for file1: his data, a delta signature of this data, Extended Attributes, and File Specific Attributes
  • for file2: only his data, he has no delta signature, no EA no FSA
  • for file3: data and EA
  • for file4: no data, only a delta signature and FSA
file1 shows all fields that can be associated with an inode, but none is mandatory, though if present they always follow this order:
  • Data or delta patch
  • followed by data/delta patch CRC when tape marks as set
  • Delta signature
  • followed by delta signature CRC when tape marks are set
  • Extended Attributes
  • followed by EA CRC when tape maks are set
  • File Specific Attributes
  • followed by FSA CRC when tape marks are set
More precisely about delta signature combined with tape marks, there is additional fields present than just the delta sig and its CRC:
+------+------+---------------+----------+--------+
| base | sig  | sig data      | data CRC | result |
| CRC  | size | (if size > 0) |    if    |  CRC   |
|      |      |               | size > 0 |        |
+------+------+---------------+----------+--------+
  • base CRC is the CRC of the file that delta signature has been based on, used at restoration time before applying a patch
  • sig size gives the size of the next field, that may be zero if the file does not has signature associated but the file's data is a delta patch and thus base and result CRCs are needed
  • sig data is the delta signature data. This field and the following are the only one present outside the ending catalogue about delta signatures when tape marks are not set.
  • data CRC is the CRC on the previous field "sig data"
  • result CRC is the CRC of the resulting file once patched  (to check the patching was successful)
the catalogue, contains all inode, directory structure and hard_links information as well as data and EA CRC. The directory structure is stored in a simple way: the inode of a directory comes, then the inode of the files it contains, then a special entry named "EOD" for End of Directory. Considering the following tree:

 - toto
    | titi
    | tutu
    | tata
    |   | blup
    |   +--
    | boum
    | coucou
    +---

it would generate the following sequence for catalogue storage:

+-------+------+------+------+------+-----+------+--------+-----+
|  toto | titi | tutu | tata | blup | EOD | boum | coucou | EOD |
|       |      |      |      |      |     |      |        |     |
+-------+------+------+------+------+-----+------+--------+-----+

EOD takes on byte, and this way no need to store the full path of each file, just the filename is recorded.

the terminator stores the position of the beginning of the catalogue, it is the last thing to be written. Thus dar first reads the terminator, then the catalogue. Well, there is now two terminators, both are meant to be read backward. The second terminator points to the beginning of the "trailer version" which is read first in direct access mode. The first terminator points to the start of the catalogue, which is read once the adhoc compression and encryption layers has been built based on the information found on the "trailer version"

All  Together

Here is an example of how data can be structured in a four slice archive:

+--------+--------+------------------------+--+
| slice  | version|  file data + EA        |Tr|
| header | header |                        |  |
+--------+--------+------------------------+--+

the first slice (just above) has been defined smaller using the -S option

+--------+-----------------------------------------------------------------+--+
| slice  |           file data + EA                                        |Tr|
| header |                                                                 |  |
+--------+-----------------------------------------------------------------+--+

+--------+-----------------------------------------------------------------+--+
| slice  |           file data + EA                                        |Tr|
| header |                                                                 |  |
+--------+-----------------------------------------------------------------+--+

+--------+---------------------+-----------+------ +---------+--------+--+
| slice  |   file data + EA    | catalogue | term 1| version | term 2 |Tr|
| header |                     |           |       | trailer |        |  |
+--------+---------------------+-----------+-------+---------+--------+--+

the last slice is smaller because there was not enough data to make it full.

The archive is written sequentially this way.


Other Levels

Things get a bit more complicated if we consider compression and encryption. The way the problem is addressed in dar's code is a bit like networks are designed in computer science, using the notion of layers. Here, there is a additional constraint, a given layer may or may not be present (encryption, compression, slicing for example). So all layer must have the same interface for serving the layer above them. This interface is defined by the pure virtual class "generic_file", which provides generic methods for reading, writing, skipping, getting the current offset when writing or reading data to a "generic_file". This way the compressor class acts like a file which compresses data wrote to it and writes compressed data to another "generic_file" below it. The strong encryption and scramble classes act the same but in place of compressing/uncompressing they encrypt/decrypt the data to/from another generic_file object. The slicing we have seen above follows the same principle, this is a "sar" object that transfers data wrote to it to several fichier [=file] objects. Class fichier [=file] also inherit from generic_file class, and is a wrapper for the plain file system calls. Some new classes have been added with format 8, in particular the escape class, which inserts escape sequence mark at requested position, and modifies data wrote for it never looks like an escape sequence mark. To reduce the level of context switch when reading the catalogue (which makes a ton of small read), a cache class is also present, it gather small writes made to it into larger writes, and pre-reads a large amount of data to answer to the many small reads when building the catalogue in memory from the archive.

Here are now all currently possible layers together:

              +----+--+----+-...........+---------+
archive       |file|EA|file|            |catalogue|
layout        |data|  |data|            |         |
              +----+--+----+-...........+---------+
                |   |    |      |              |
            +-----+ | +-------+ |              |
sparse      |spars| | |sparse | |              |
file        |file | | |file   | |              |
detection   |detec| | |detect.| |              |
layer       +-----+ | +-------+ |              |
(optional)      |   |    |      |              |
                V   V    V      V              V
              +-----------------------------------+
compression   |         (compressed)  data        |
              +-----------------------------------+
                    |                      |
                    |                      |
                    V                      V
              +-----------------------------------+
escape layer  |   escaped data / escape sequences |
(optional)    +-----------------------------------+
                    |                      |          / First Terminateur
                    |                      |          |
                    |                      |          V
elastic  +---+      |                      |       +----+---+
buffers  |EEE|      |                      |       | T1 |EEE|
         +---+      |                      |       +----+---+
           |        |                      |              |           Second
           V        V                      V              V         Terminator
         +--------------------------------------------------+              |
cipher   |        (encrypted) data / cache if no encryption |              |
         +--------------------------------------------------+              V
                    |                         |               +---------+----+
+-------+           |                         |               | trailer | T2 |
| header|           |                         |               +---------+----+
+-------+           |                         |                    |      |
    |               |                         |                    |      |
    V               V                         V                    V      v
+-----------------------------------------------------------------------------+
|                  data                                                       |
+-----------------------------------------------------------------------------+
        |         |  |         |   |        |   |        |  |    |  |        |
slice   |         |  |         |   |        |   |        |  |    |  |        |
headers |         |  |         |   |        |   |        |  |    |  |        |
 |  |   |         |  |         |   |        |   |        |  |    |  |        |
 |  +---|------\  |  |         |   |        |   |        |  |    |  |        |
 V      V      V  V  V         V   V        V   V        V  V    V  V        V
+---------+  +---------+  +---------+  +---------+  +-------+  +-------+  +----+
|HH| data |  |HH| data |  |HH| data |  |HH| data |  |HH|data|  |HH|data|  |HH| |
+---------+  +---------+  +---------+  +---------+  +-------+  +-------+  +----+
  slice 1      slice 2      slice 3      slice 4      slice 5


The elastic buffers are here to prevent plain text attack, where one knows which data is expected at a given place, an trying to guess the cipher comparing the expected data and the encrypted one. As dar generates structured archives, there would have some possibility that one use this attack to crack an archive encryption. To overcome this problem, elastic buffers have been added at the beginning and at the end of encrypted data. This way it is not possible to know where is located a given archive structure within the encrypted data. The elastic buffers are random data that contain at a random place a pattern that tells the overall size of the buffer (which size is randomly chosen during archive creation). The pattern is of the form ">###<" where the hash field (###) contains the elastic buffer size in binary. Small elastic buffer can be "><" for two bytes or "X" for one byte, but as it is encrypted beside archive data, it is not possible to determine its size for one that does not hold the archive encryption key. Elastic buffer are usually several kilobyte long. Here follows an example of elastic buffer:

972037219>20<8172839


For clarity, the size field between '>' and '<' has been written in decimal instead of binary, as well as the random data inside the elastic buffer. The location of the size field '>20<' is also randomly chosen at creation time.

A Teminateur is short structure that is intended to be read backward. It gives the absolute position of a given item within the archive: The second terminateur let dar skip at the beginning of the archive trailer. The first terminateur (eventually encrypted) let dar skip at the beginning of the catalogue).





Scrambling


Before strong encryption was implemented, dar had only a very simple and weak encryption mechanism. It remains available in current release under the "scram" algorithm name. It mains advantage is that is does not rely on any external library, it is completely part of libdar.

How does it works?

Consider the pass phrase as a string, thus a sequence of bytes, thus a sequence of integer each one between 0 and 255 (including 0 and 255). The data to "scramble" is also a sequence of byte, usually much longer than the pass phrase. The principle is to add byte by byte the pass phrase to the data, modulo 256. The pass phrase is repeated all along the archive. Let's take an example:

the pass phrase is "he\220lo" (where \220 is the character which decimal value is 220). the data is "example"

taken from ASCII standard:
h = 104
l = 108
o = 111
e = 101
x = 120
a = 97
m = 109
p = 112

        e       x       a       m       p       l       e
        101     120     97      109     112     108     101

+       h       e       \220    l       o       h       e
        104     101     220     108     111     104     101

---------------------------------------------------------------

        205     221     317     217     223     212     202

---------------------------------------------------------------
modulo
256 :   205     221     61      217     223     212     202
        \205    \201    =       \217    \223    \212    \202


thus the data "example" will be written in the archive "\205\201=\217\223\212\202"

This method allows to decode any portion without knowing the rest of the data. It does not consume much resources to compute, but it is terribly weak and easy to crack. Of course, the data is more difficult to retrieve without the key when the key is long. Today dar can also use strong encryption (blowfish and few others) and thanks to a encryption block can still avoid reading the whole archive to restore any single file.



Asymmetrical Encryption and Signature


dar relies on gpgme library (GPG made Easy) to provide strong asymmetrical encryption and signature of an archive. Asymmetrical encryption is what you do when you use a public key to cypher data (=encrypt data) and a private key to uncypher it (=decrypt it). However, dar does not encrypt the whole archive that way, not it signs it that way, neither.

Instead, dar relies on the symmetrical strong encryption algorithm it is aware of for some time (blowfish, twofish, camellia, aes, etc.) to cipher the archive. The key used to cipher the archive is chosen randomly, encrypted and eventually signed using the provided recipient email of the user keyring. This encrypted key is then put in the archive header and trailer.

Well, to be more precise about the symmetrical key that is encrypted in the archive: its length user defined and defaults to 512 bytes (4096 bits). A random variation of +0 to +255 bytes is added by libdar to this size. Then the key value itself is chosen randomly. The random generator used here is the one provided by libgcrypt using the GCRY_STRONG_RANDOM entropy level.

Why doing that way and not using the asymmetrical algorithm to cypher the whole archive?
  • Because it would no more be possible to extract a single file from the archive or to read the archive's contents whithout reading the whole archive.
  • Because it would not be possible the quickly verify archive signatures (well, see below)
  • Because it would avoid recovering an corrupted archive after the point of corruption
  • Because it would cost much disk space to encrypt an archive for it be readable by more than one recipients
Yes, you can provide several recipients from your GPG keyring giving their email addresses and also at the same time sign the archive with one of your private key. The resulting archive will be possible to decrypt only by those recipients. And anyone knowing your public key will be able to verify that the archive has been generated by you. Of course the verification only validates the encryption key is from an archive you have personnaly generated, a man in the middle could modify the archive data located after the key in the archive, however it would not be possible to uncipher the tampered data using the signed key, or it would mean that the man in the middle could generate encrypted data using the same symmetric key that is encrypted in the archive. This might be the possible if a recipient's private key has been compromised. Thus the signature of the key is not sufficient to prove the authenticity of the whole archive. To cope with that risks, a better solution is to activate the slice hashing (md5 or sha1) and to sign these small files to be provided beside the archive slices.



Overflow in arithmetic integer operations


Some code explanation about the detection of integer arithmetic operation overflows. We speak about *unsigned* integers, and we have only portable standard ways to detect overflows when using 32 bits or 64 bits integer in place of infinint.

Developed in binary, a number is a finite suite of digits (0 or 1). To obtain the original number from the binary representation, we must multiply each digit by a power of two. example the binary representation "101101" designs the number N where:

N = 2^5 + 2^3 + 2^2 + 2^0

in that context we will say that 5 is the maximum power of N (the power of the higher non null binary digit).

for the addition "+" operation, if an overflow occurs, the result is less than one or both operands, so overflow is not difficult to detect. To convince you, let's assume that the result is greater both operands while it has overflowed. Thus the real result (without overflow) less the first operands should gives the second argument, but here we get a value that is greater than the all 1 bits integer (because there was an overflow and the resulting overflowed value is greater than the first second and the first operand), so this is absurd, and in case of overflow the resulting value is less than one of the operands.

for substraction "-" operation, if the second operand is greater than the first there will be an overflow (result must be unsigned thus positive) else there will not be any overflow. Thus detection is even more simple.

for division "/" and modulo "%" operations, there is never an overflow (there is just illicit the division by zero).

for multiplication "*" operation, a heuristic has been chosen to quickly detect overflow, the drawback is that it may triggers false overflow when number get near the maximum possible integer value. Here is the heuristic used:

given A and B two integers, which max powers are m and n respectively, we have

A < 2^(m+1)
and
B < 2^(n+1)

thus we also have:

A.B < 2^(m+1).2^(n+1)

which is:

A.B < 2^(m+n+2)

by consequences we know that the maximum power of the product of A by B is at most m+n+1 and while m+n+1 is less than or equal to the maximum power of the integer field there will not be overflow else we consider there will be an overflow even if it may not be always the case (this is an heuristic algorithm).



Strong encryption


Several cyphers are available. Remind that "scrambling" is not a strong encryption cypher, all other are.

to be able to use a strong encrypted archive you need to know the three parameters used at creation time:
  • the cypher used (blowfish, ...)
  • the key or password used
  • the encryption block size used
no information about these parameters is stored in the generated archive. If you make an error on just one of them, you will not be able to use your archive. If you forgot one of them, nobody can help you, you can just consider the data in this archive as lost. This is the drawback of strong encryption.

How is it implemented?

To not completely break the possibility to directly access file, the archive is  not encrypted as a whole (as would do an external program). The encryption is done block of data by block of data. Each block can be decrypted, and if you want to read some data somewhere you need to decrypt the whole block(s) it is in.

In consequence, the larger the block size is, the stronger the encryption is. But also the larger the block size is, the longer it will take to recover a given file, in particular when the file size to restore is much smaller than the encryption block size used.

An encryption block size can range from 10 bytes to 4 GB.

If encryption is used as well as compression, compression is done first, then encryption is done on compressed data.

An "elastic buffer" is introduced at the beginning and at the end of the archive, to protect against plain text attack.  The elastic buffer size randomly varies and is defined at execution time. It is composed of random (srand()) values. Two marks characters '>' and '<' delimit the size field, which indicate the byte size of the elastic buffer. The size field is randomly placed in the buffer. Last, the buffer is encrypted with the rest of the data. Typical elastic buffer size range from 1 byte to 10 kB, for both initial and terminal elastic buffers.

Elastic buffers are also used inside encryption blocks. The underlying cypher may not be able to encrypt at the requested block size boundary. If necessary a small elastic buffer is appended to the data before encryption, to be able, at restoration time, to know the amount of data and the amount of noise around it.

Let's take an example with blowfish. Blowfish encrypts by multiple of 8 bytes (blowfish chain block cypher). An elastic buffer is always added to the data of a encryption block, its minimal size is 1 byte.

Thus, if you request a encryption block of 3 bytes, these 3 bytes will be padded by an elastic buffer of 5 bytes for these 8 bytes to be encrypted. This will make a very poor compression ratio as only 3 bytes on 8 bytes are significant.

If you request a encryption block of 8 bytes, as there is no room for the minimal elastic buffer of 1 byte, a second 8 byte block is used to put the elastic buffer, so the real encryption block will be 16 bytes.

Ideally, a encryption block of 7 bytes, will use 8 bytes with 1 byte for the elastic buffer.

This problem tends to disappear when the encryption block size grows, so this should not be a problem in normal conditions. Encryption block of 3 bytes is not a good idea to have a strong encryption scheme, for information, the default encryption block size is 10kB and is user settable to larger values.

Key Derivation Fonction - KDF

strong encryption uses secret key, to cipher/uncipher data, we will say "password" to simplify in the following.

But directly using a human provided password would provide a weak encryption. To overcome this weakness, the state of the art is to use a key derivation function, which takes in input the human provided password plus a "salt" and outputs a stronger key (salt is described below). This function is based on a hash algoritm (sha1 by default, but this value can be modified since archive format 10/release 2.6.0) and an iteration count (2000 before format 10, 200000 since release 2.6.0 by default and this can be changed), it will mix salt and passphrase then hash the result the number of iteration count that is defined, taking as hash input the previous hash output at each cycle.

The salt is a randomly chosen value by libdar that is stored in clear in the archive header, beside the iteration count and hash algorithm used for the KDF. Thus even if the user has the same password for different archive, the effective key used for strong encryption will differ from archive to archive, making much more difficult for an attacker to crack an archive using statiscical methods over a large number of archives sharing the same human provided password, as the effective encryption key used differs from archive to archive thanks to the salt. Even knowing the salt (which is available in clear for each archive) the attacker should have to use the KDF function for each archive, times each password to test in order to leverage the fact several archives have been encrypted with the same password. In today computer 200,000 iteration of sha1 takes almost one second, this is quite unoticeable by the real user, but for an attacker wanting to try 100,000 passwords over 8 archives sharing the same human passwords, this would cost 800,000 seconds (around 10 days) just be in order to start cracking user archives...

As seen above, an archive is encrypted block by block. Independently of a salt per archive, a second level of salt per encryption block is used inside a give archive, salt which is derived from the block number, more details below




libdar and thread-safe requirement


This is for those who plane to use libdar in their own programs.

If you plan to have only one thread using libdar there is no problem, of course, you will however have to call one of the get_version() first, as usual. Thing change if you intend to have several concurrent threads using libdar library.

libdar is thread-safe under certain conditions:

Several 'configure' options have an impact on thread-safe support:

--enable-test-memory is a debug option that avoid libdar to be thread-safe,  so don't use it.
--enable-special-alloc (now disable by default since version 2.5.5), makes a thread-safe library only if POSIX mutex are available (pthread_mutex_t type).
--disable-thread-safe avoid looking for mutex, so unless --disable-special-alloc is also used, the generated library will not be thread safe.

You can check the thread safe capability of a library thanks to the get_compile_time_feature(...) call from the API. Or use 'dar -V' command, to  quickly have the corresponding values and check using 'ldd' to see which library has been dynamically linked to dar, if applicable.

IMPORTANT:
As more as before it is mandatory to call get_version() call before any other call, when the call returns, libdar is ready for thread safe. Note that even if the prototype does not change get_version() *may* now throw an exception, so use get_version_noexcept() if you don't want to manage exceptions.

For more information about libdar and its API, check the doc/api_tutorial.html document and the API reference manual under doc/html/index.html


Dar_manager and delete files


This is for further reference and explanations.

In dar archive when a file has been deleted since the backup of reference (in case of differential archive), an entry of a special type (called "detruit") is put in the catalogue of the archive which only contains the name of the missing file.

In a dar_manager database, to each files that have been found in one of the archive used to build this database corresponds a list of association. These associations put in relation the mtime (date of modification of the file) to the archive number where the file has been found in that state.

There is thus no way to record "detruit" entries in a dar_manager database, as no date is associated with this type of object. Yes, in a dar archive, we can only notice a file has been destroyed because it is not present in the filesystem but is present in the catalogue of the archive of reference. Thus we know the file has been destroyed between the date the archive of reference has been done and the date the current archive is actually done. Unfortunately, no date is recorded in dar archives telling it has been done at which time.

Thus, from dar_manager, inspecting a catalogue, there is no way to give a significant date to a "detruit" entry. In consequences, for a given file which has been removed, then recreated, then removed again along a series of differential backups, it is not possible to order the times when this file has been removed in the series of date when it has existed.

The ultimate consequence is that if the user asks dar_manager to restore a directory in the state just before a given date (-w option), it will not be possible to know if that file existed at that time. We can effectively see that it was not present in a given archive but as we don't know the date of that archive we cannot determine if it is before of after the date requested by the user, and dar_manager is not able to restore the non existence of a file for a given time, we must use dar directly with the archive that has been done at the date we wish.

Note that having a date stored in each dar archive would not solve the problem without some more informations. First, we should assume that the date is consistent from host to host and from time to time (What if the user change of time due to daylight saving or move around the Earth, or if two users in two different places share a filesystem --- with rsync, nfs, or other mean --- and do backups alternatively...). Let's assume the system time is significant and thus let's imagine what would be the matter if in each archive this date of archive construction was stored.

Then when a detruit object is met in an archive it can be given the date the archive has been built and thus ordered in the series of dates when the corresponding file was found in other archives. So when the user asks for restoration of a directory a given file's state is possible to know, and thus the restoration of the corresponding archive will do what we expect : either remove the file (if the selected backup contains an  "detruit" object, or  restore the file in the state it had).

Suppose now, a dar_manager database built with a series of full backup. There will thus no be any "detruit" objects, but a file may be present or may be missing in a given archive. The solution is thus that once an archive has been integrated in the database, the last step is to scan the whole database for files that have no date associated with this last archive, thus we can assume these files were not present and add the date of the archive creation with the information that this file was removed at that time. Moreover, if the last archive add a file which was not know in archives already present in the database, we must consider this file was deleted in each of these previous archives, but then we must record the dates of creation for all these previous archive to be able to put this information properly in the database. But, in that case we would not be able to make dar removing a file, as no "detruit" object exist (all archive are full backups), and dar_manager should remove itself the entry from the filesystem. Beside the fact that it is not the role to dar_manager to directly interact with the filesystem, dar_manager should record an additional information to know if a file is deleted because it has been found a "detruit" object in an archive, or if it is deleted because it has not been found any entry in an given archive. This is necessary to known whether to rely on dar to remove the file or to make dar_manager do it itself, or maybe better is to never rely on dar to remove a file but always let dar_manager do it itself.

Assuming we accept to make dar_manager able to rm entries from filesystem without relying on dar, we must store the date of the archive creation in each archive, and store these dates for each archive in dar_manager databases. Then instead of using the mtime of each file, we could do something much more simple in database: for each file, record if it was present or not in each archive used to built the database, and beside this, store only the archive creation date of each archive. This way, dar_manager would only have for each file to take the last state of the file (deleted or present) before the given date (or the last known state if no date is given) and either restore the file from the corresponding archive or remove it.

But if a user has removed a file by accident and only notice this mistake after several backups, it would become painful to restore this file, as the user should find manually at which date it was present to be able to feed dar_manager with the proper -w option, this worse than looking for the last archive that has the file we look for.

Here we are back to the restoration of a file and the restoration of a state. By state, I mean the state a directory tree had at a given time, like a photo. In its original version dar_manager was aimed to restore files, whatever they exist or not in the last archive added to a database. It only finds the last archive where the file is present. Making dar_manager restore a state, and thus considering files that have been removed at a given date, is no more no less than restoring from a given archive, directly with dar. So all this discussion about the fact that dar_manager is not able to handle files that have been removed, to arrive to the fact that adding this feature to dar_manager will make it become quite useless... sight. But, that was necessary.



Native Language Support / gettext / libintl


Native Language Support (NLS) is the fact a given program can display its messages in different languages. For dar, this is implemented using the gettext tools. This tool must be installed on the system for dar can be able to display messages in another language than English. 

Things are the following:
- On a system without gettext dar will not use gettext at all. All messages will be in English (OK maybe better saying Frenglish) ;-)
- On a system with gettext dar will use the system's gettext, unless you use --disable-nls option with the configure script.

If NLS is available you just have to set the LANG environment variable to your locale settings to change the language in which dar displays its messages (see ABOUT-NLS for more about the LANG variable).

just for information, gettext() is the name of the call that makes translations of string in the program. This call is implemented in the library called 'libintl' (intl for Internationalization). Last point, gettext by translating strings, makes the Native Language Support (NLS) possible, in other words, it let you have the messages of your preferred  programs being displayed in you native language for those not having the English as mother tong.

This was necessary to say, because you may miss the links between "gettext" , "libintl" and "NLS".

READ the ABOUT-NLS file at the root of the source package to learn more about the way to display dar's messages in your own language. Note that not all languages are yet supported, this is up to you to send me a translation in your language and/or contact a translating team as explained in ABOUT-NLS.

To know which languages are supported by dar, read the po/LINGUAS file and check out for the presence of the corresponding *.po files in this directory.



Dar Release Process

Development Phase:
Dar receive new features during the development phase, at this stage sources are modified and tested after each feature addition. The development sources are stored in a GIT repository at sourceforge, repository you can access in read-only.

Frozen API Phase:
No new feature that would change the API are added. The API shall be documented enough to let API users give their feedback about the design and its implementation. During this time, development continues, whatever is necessary while it does not changes the API, like documentation of the whole project, problem fix in libdar, new features in command-line part of the source, and so on.

Pre-release Phase:
Once the documentation and API is stable, comes the pre-release phase, this phase starts and ends by a email to the dar-news mailing-list. At this period intensive test is done on the pre-release source, feedback and information about new pre-release packages are exchanged through the pre-release mailing-list, this mailing-list lives only during the pre-release phases and is not archived, nor visible through a mail to news gateway. Of  course, you are welcome to participate in the testing process and report to the pre-release mailing list any problem you could meet with a given pre-release package.

Release Phase:
Some little time after pre-release has ended, a first package is released (last number version is zero) and available at sourceforge for download. This phase also begins by an email to dar-news mailing-list. During that phase, users may report bugs/problem about the released software,  depending on the amount of bugs found and of their importance a new release will take place to only fixe these found bugs (no features is added), the last number of the version is incremented by one and a new mail to dar-news is sent with the list of problem fixed by the new release. The release phase ends when a new release phase begins, thus during a release phase a concurrent development phase takes place, then a frozen API, then a pre-release phase but for a new major version (the first or the second number of the version changes).

Dar's Versions

package release version

Dar packages are release during the pre-release phase (see above). Each version is identified by three number separated by dot like for example, version 2.3.0 . The last number is incremented between releases that take place in the same release phase (just bug have been fixed), the middle number increments at each pre-release phase. Last the first number is incremented when a major change in the software structure took place [version 2.0.0 has seen the split of dar's code in one part related to command-line and the rest put in a library called libdar, that can be accessed by a well defined API even by external softwares (like kdar for example). Version 2.0.0 has also seen the apparition of the configure script and the use of the gnu tools autoconf, autmake, libtool and gettext, thus in particular the possibility to have internationalization].

Note that release versionning is completely different from what is done for the Linux kernel, here for dar all versionnized packages are stable released software and thus stability increases with the last number of the version.

Libdar version

Unfortunately, the release version does not give much information about the compatibility of different libdar version, from the point of view of an external application, that thus has not been released with libdar and may be faced to different libdar versions. So, libdar has its own version. It is also a three number version, (for example, current libdar version is version 3.1.2), but each number has a different meaning. The last number increases with a new version that only fixes bugs, the middle number increases with when new features has been added but stay compatible with older libdar version in the way to use older features. Last the first number changes when the API has been changed in a way that no ascendant compatibility is no more possible for some features.

Other versions


beside the libdar library, you can find five command-line applications: dar, dar_xform, dar_slave, dar_manager and dar_cp. These except dar have their own version which is here too made of three numbers. Their meaning is the same as the meaning for the package release version: The last number increases upon bug fix, the middle upon new feature, the first upon major architecture changes.

Archive format version

When new features come, it is sometime necessary to change the structure of the archive. To be able to know the format used in the archive, a field is present in each archive that defines this format. Each dar binary can thus read all archive format, well of course a particular version cannot guess the format of archive that have been defined *after* that dar binary version has been released. If you try to open a recent archive with an old dar binary, you will have a warning about the fact that dar is probably not able to read the archive, dar will then ask you if you want to proceed anyway. Of course, you can try to read it, but this is at your own risk. In particular, depending on the feature used (See the Changelog to know which feature required to upgrade the archive format), you may succeed reading a recent archive with an old dar binary and get neither error nor warning, but this does not mean that dar did all that was necessary to restore the files properly, so it is advised to avoid using an archive with a version of dar that is tool old to handle the archive format properly (and rather reserve this possibility only in case of necessity).

Cross reference matrix

OK, you may now find that this is a bit complex so a list of version is give below. Just remember that there are two points of view: The command-line user and the external application developer.

Date
release (and
dar version)
Archive format
Database
Format
libdar version
dar_xform
dar_slave
dar_manager
dar_cp
dar_split
April 2nd, 2002
1.0.0
01
----- ----- ----- ----- ----- ----- -----
April 24th, 2002
1.0.1
01
----- ----- ----- ----- ----- ----- -----
May 8th, 2002
1.0.2
01
----- ----- ----- ----- ----- ----- -----
May 27th, 2002
1.0.3
01
----- ----- ----- ----- ----- ----- -----
June 26th, 2002
1.1.0
02
----- ----- 1.0.0
1.0.0
----- ----- -----
Nov. 4th, 2002
1.2.0
03
01 ----- 1.1.0
1.1.0
1.0.0
----- -----
Jan. 10th, 2003
1.2.1
03
01 ----- 1.1.0 1.1.0 1.0.0
----- -----
May 19th, 2003
1.3.0
03
01 ----- 1.1.0
1.1.0
1.1.0
----- -----
Nov. 2nd, 2003
2.0.0
03
01 1.0.0
1.1.0
1.1.0
1.2.0
1.0.0
-----
Nov. 21th, 2003
2.0.1
03
01 1.0.1
1.1.0
1.1.0
1.2.0
1.0.0
-----
Dec. 7th, 2003
2.0.2
03
01 1.0.2
1.1.0
1.1.0
1.2.0
1.0.0
-----
Dec. 14th, 2003
2.0.3
03
01 1.0.2
1.1.0
1.1.0
1.2.1
1.0.0
-----
Jan. 3rd, 2004
2.0.4
03
01 1.0.2
1.1.0
1.1.0
1.2.1
1.0.0
-----
Feb. 8th, 2004
2.1.0
03
01 2.0.0
1.2.0
1.2.0
1.2.1
1.0.0
-----
March 5th, 2004
2.1.1
03
01 2.0.1
1.2.1
1.2.1
1.2.2
1.0.0
-----
March 12th, 2004
2.1.2
03
01 2.0.2
1.2.1
1.2.1
1.2.2
1.0.0
-----
May 6th, 2004
2.1.3
03
01 2.0.3
1.2.1
1.2.1
1.2.2
1.0.1
-----
July 13th, 2004
2.1.4
03
01 2.0.4
1.2.1
1.2.1
1.2.2
1.0.1
-----
Sept. 12th, 2004
2.1.5
03
01 2.0.5
1.2.1
1.2.1
1.2.2
1.0.1
-----
Jan. 29th, 2005
2.1.6
03
01 2.0.5
1.2.1
1.2.1
1.2.2
1.0.1
-----
Jan. 30th, 2005
2.2.0
04
01 3.0.0
1.3.0
1.3.0
1.3.0
1.0.1
-----
Feb. 20th, 2005
2.2.1
04
01 3.0.1
1.3.1
1.3.1
1.3.1
1.0.1
-----
May 12th, 2005
2.2.2
04
01 3.0.2
1.3.1
1.3.1
1.3.1
1.0.2
-----
Sept. 13th, 2005
2.2.3
04
01 3.1.0
1.3.1
1.3.1
1.3.1
1.0.2
-----
Nov. 5th, 2005
2.2.4
04
01 3.1.1
1.3.1
1.3.1
1.3.1
1.0.2
-----
Dec. 6th, 2005
2.2.5
04
01 3.1.2
1.3.1
1.3.1
1.3.1
1.0.2
-----
Jan. 19th, 2006
2.2.6
04
01 3.1.3
1.3.1
1.3.1
1.3.1
1.0.3
-----
Feb. 24th, 2006
2.2.7
04
01 3.1.4
1.3.1
1.3.1
1.3.1
1.0.3
-----
Feb. 24th, 2006
2.3.0
05
01 4.0.0
1.4.0
1.3.2
1.4.0
1.1.0
-----
June 26th, 2006
2.3.1
05
01 4.0.1
1.4.0
1.3.2
1.4.0
1.1.0
-----
Oct. 30th, 2006
2.3.2
05
01 4.0.2
1.4.0 1.3.2 1.4.0 1.1.0 -----
Feb. 24th, 2007
2.3.3
05
01 4.1.0
1.4.0
1.3.2
1.4.1
1.2.0
-----
June 30th, 2007
2.3.4
06
01 4.3.0
1.4.0
1.3.2
1.4.1
1.2.0
-----
Aug. 28th, 2007
2.3.5
06
01 4.4.0
1.4.1
1.3.3
1.4.2
1.2.1
-----
Sept. 29th, 2007
2.3.6
06
01 4.4.1
1.4.1
1.3.3
1.4.2
1.2.1
-----
Feb. 10th, 2008
2.3.7
06
01 4.4.2
1.4.2
1.3.4
1.4.3
1.2.2
-----
June 20th, 2008
2.3.8
07
01 4.4.3
1.4.2
1.3.4
1.4.3
1.2.2
-----
May 22nd, 2009
2.3.9
07
01 4.4.4
1.4.2
1.3.4
1.4.3
1.2.2
-----
April 9th, 2010
2.3.10
07
01 4.4.5
1.4.2
1.3.4
1.4.3
1.2.2
-----
March 13th, 2011
2.3.11 07 01 4.5.0 1.4.3 1.3.4 1.4.3 1.2.2 -----
February 25th, 2012 2.3.12 07 01
4.5.1 1.4.3 1.3.4 1.4.3 1.2.2 -----
June 2nd, 2011
2.4.0 08 02 5.0.0 1.5.0
1.4.0
1.5.0
1.2.3
-----
July 21st, 2011
2.4.1
08
02 5.1.0
1.5.0
1.4.0
1.6.0
1.2.3
-----
Sept. 5th, 2011
2.4.2
08
02 5.1.1
1.5.0
1.4.0
1.6.0
1.2.3
-----
February 25th, 2012
2.4.3
08
03 5.2.0
1.5.0 1.4.0 1.7.0
1.2.3
-----
March 17th, 2012
2.4.4
08
03 5.2.1
1.5.0
1.4.0
1.7.1
1.2.3
-----
April 15th, 2012
2.4.5
08
03 5.2.2
1.5.1
1.4.1
1.7.2
1.2.4
-----
June 24th, 2012
2.4.6
08
03 5.2.3
1.5.2
1.4.2
1.7.3
1.2.5
-----
July 5th, 2012
2.4.7
08
03 5.2.4
1.5.2
1.4.3
1.7.3
1.2.5
-----
September 9th, 2012
2.4.8
08
03 5.3.0
1.5.3
1.4.4
1.7.4
1.2.6
-----
January 6th, 2013
2.4.9
08
03 5.3.1
1.5.3
1.4.4
1.7.4
1.2.7
-----
March 9th, 2013
2.4.10
08
03 5.3.2
1.5.3
1.4.4
1.7.4
1.2.7
-----
Aug. 26th, 2013
2.4.11
08
03 5.4.0
1.5.4
1.4.5
1.7.5
1.2.8
-----
January 19th, 2014
2.4.12
08
03 5.5.0
1.5.4
1.4.5
1.7.6
1.2.8
-----
April 21st, 2014
2.4.13
08
03 5.6.0
1.5.5
1.4.5
1.7.7
1.2.8
-----
June 15th, 2014
2.4.14
08
03 5.6.1
1.5.5
1.4.5
1.7.7
1.2.8
-----
September 6th, 2014
2.4.15
08
03 5.6.2
1.5.6
1.4.6
1.7.8
1.2.8
-----
January 18th, 2015
2.4.16
08
03 5.6.3
1.5.6
1.4.6
1.7.8
1.2.8
-----
January 31th, 2015
2.4.17
08
03 5.6.4
1.5.6
1.4.6
1.7.8
1.2.8

August 30th, 2015 2.4.18 08.1 03 5.6.5 1.5.6 1.4.6 1.7.8 1.2.8 -----
October 4th, 2015
2.4.19
08.1
03 5.6.6
1.5.6
1.4.6
1.7.8
1.2.8
-----
November 21th, 2015
2.4.20
08.1
03
5.6.7
1.5.8
1.4.8
1.7.10
1.2.10
-----
April 24th, 2016
2.4.21
08.1
03 5.6.8 1.5.9 1.4.9 1.7.11 1.2.10 -----
June 5th, 2016
2.4.22
08.1
03 5.6.9
1.5.9
1.4.9
1.7.11
1.2.10
-----
October 29th, 2016
2.4.23
08.1
03 5.6.9
1.5.9
1.4.9
1.7.11
1.2.10
-----
January 21st, 2017
2.4.24
08.1
03 5.6.10
1.5.9
1.4.9
1.7.11
1.2.10
-----
October 4th, 2015
2.5.0 09 04
5.7.0 1.5.7 1.4.7 1.7.9 1.2.9 1.0.0
October 17th, 2015
2.5.1 09 04
5.7.1 1.5.8 1.4.8 1.7.10 1.2.10 1.0.0
November 21st, 2015
2.5.2
09
04
5.7.2
1.5.8
1.4.8
1.7.10
1.2.10
1.0.0
January 4th, 2016
2.5.3
09
04
5.7.3
1.5.8
1.4.8
1.7.10
1.2.10
1.0.0
April 24th, 2016
2.5.4
09 04 5.8.0 1.5.9 1.4.9 1.7.11 1.2.10 1.0.0
June 5th, 2016
2.5.5
09
04
5.8.1
1.5.9 1.4.9
1.7.11
1.2.10
1.0.0
September 10th, 2016
2.5.6
09
04 5.8.2
1.5.9 1.4.9 1.7.11 1.2.10 1.0.0
October 29th, 2016 2.5.7
09
04
5.8.3
1.5.9
1.4.9
1.7.11
1.2.10
1.0.0
January 2nd, 2017 2.5.8
09
04
5.8.4
1.5.9
1.4.9
1.7.11
1.2.10
1.0.0
January 21st, 2017 2.5.9
09
04
5.9.0
1.5.9
1.4.9
1.7.11
1.2.10
1.0.0
April 4th, 2017
2.5.10
09
04
5.10.0
1.5.9
1.4.9
1.7.11
1.2.10
1.0.0
June 23rd, 2017
2.5.11
09
04
5.11.0
1.5.9
1.4.9
1.7.12
1.2.10
1.0.0
September 2nd, 2017
2.5.12
09
04
5.11.1
1.5.9
1.4.9
1.7.12
1.2.10
1.0.0
October 28th, 2017
2.5.13
09
04
5.12.0
1.5.10
1.4.10
1.7.13
1.2.10
1.0.0
December 20th, 2017
2.5.14
09
04
5.12.1
1.5.10
1.4.10
1.7.13
1.2.10
1.1.1
April 28th, 2018
2.5.15
09
04
5.12.2
1.5.10
1.4.10
1.7.13
1.2.10
1.1.1
July 19th, 2018
2.5.16
09
04
5.12.3
1.5.10
1.4.10
1.7.13
1.2.10
1.1.1
September 30th, 2018
2.5.17
09
04
5.13.0
1.5.10
1.4.10
1.7.13
1.2.10
1.1.1
December 8th, 2018
2.5.18
09
04
5.13.1
1.5.10
1.4.10
1.7.13
1.2.10
1.1.1
January 19th, 2019 2.5.19 09 04 5.13.2 1.5.11 1.4.11 1.7.14 1.2.10 1.1.1
February 9th, 2019 2.5.20 09 04 5.13.3 1.5.11 1.4.11 1.7.14 1.2.10 1.1.1
May 25th, 2019 2.5.21 09 04 5.13.4 1.5.11 1.4.11 1.7.14 1.2.10 1.1.1
July 6th, 2019 2.5.22 09 04 5.13.5 1.5.11 1.4.11 1.7.14 1.2.10 1.1.1
December 16th, 2018 2.6.0 10 05 6.0.0 1.6.0 1.5.0 1.8.0 1.2.11 1.1.2
January 19th, 2019 2.6.1 10 05 6.0.1 1.6.0 1.5.0 1.8.0 1.2.11 1.1.2
February 9th, 2019 2.6.2 10 05 6.0.2 1.6.1 1.5.1 1.8.1 1.2.12 1.1.2
March 30th, 2019 2.6.3 10.1 05 6.1.0 1.6.1 1.5.1 1.8.1 1.2.12 1.1.2
May 25th, 2019 2.6.4 10.1 05 6.1.1 1.6.1 1.5.1 1.8.1 1.2.12 1.1.2
July 6th, 2019 2.6.5 10.1 05 6.1.2 1.6.1 1.5.1 1.8.1 1.2.12 1.1.2
September 21st, 2019 2.6.6 10.1 05 6.2.0 1.6.2 1.5.2 1.8.2 1.2.12 1.1.2
January 12th, 2020 2.6.7 10.1 05 6.2.1 1.6.2 1.5.2 1.8.2 1.2.12 1.1.2
February 8th, 2020 2.6.8 10.1 05 6.2.2 1.6.2 1.5.2 1.8.2 1.2.12 1.1.2



How symmetric encryption is performed in dar/libdar

Symmetric encryption are those that use a same key/password/passphrase to cipher data and to uncipher it, like for example blofish, AES, serpent, camellia, twofish and so on.

The user provides:
  • a key in the form of a passphrase or password
  • a cipher algorithm
  • a block size (which defaults to 10 kio)

Some additional information is stored in clear text in the archive header, information that will be used for the key derivation function (KDF):

  • a salt (random data)
  • a hash algorithm
  • a number of iterations

Why a KDF? The human provided password/passphrase is usually too simple and needs to be "randomized" (in a deterministic manner to be able to uncipher the archive later on). This is the reason of the KDF, which, from a given key (a string of characters) creates a new key using any characters even non printable ones and not only alphanumerical ones and punctuation as human do. It will pass the key melted with the "salt" into the provided hash algorithm the given number of iterations, using at each new iteration the resulting key as input for the next round.

What is the salt for? Without salt, if we use the same password for many archives, we give an attacker a lot of data encrypted with the same key. The more data he has, the easier it is to crack this unique key, and cracking an archive which is finding the result of the KDF, does not lead to crack other archives even if they have been encrypted using the same human provided password: Using a different salt per archive avoids having the same encryption key used for many archives, even if the user provided the same password. The salt is stored in clear text beside the hash algorithm and the number of iteration in the archive header, this is not a problem as the human provided password has still to be known for the archive to be uncyphered. For an attacker the problem is more complicated as computation done on encrypted data to crack the key cannot leverage the fact data has been encrypted by the same key thanks to the salt. And the extra work due to the salt is proportional to the iteration number times the number of archives, thus the more there is data to crack, the more the extra work is important, well quite.

Data to encrypt/decrypt is sliced by blocks of given size and each block is provided to the encryption engine beside its number inside the archive. To be able to encrypt at once 10 kio or more, dar uses the CBC mode (Cipher Block Chaining). This mode requires an Initial Vector (IV) to be set for each block. The value of this block may be known and predictable by an attacker, that's not a problem. Using different IV for each block of data avoid having for a given key thus here for two different blocks the same encryption result (totally or just starting by the same sequence) when clear data is identical or starts by the same sequence.

To prevent clear text attack (which is when the attacker has an idea of the clear data), each archive starts and ends by an elastic buffer, which is pseudo random generated data of variable size, both elastic buffers are encrypted with the rest of the real data. This way the predictable archive structure is shifted at some random place inside the archive, which avoid the possibility of a clear text attack (or at least make it much more difficult to achieve).

Thanks to the encyrption per block, dar is able to uncipher data at random place without having to uncipher the whole archive: only the block containing the need data is unciphered. This is necessary to face both data corruption and fast restoration of a subset of the archive.

Here follows a diagram of the way key, block number, cipher algorithm and Initial Vector (IV) are defined:

           +------------------- [ algo in CBC mode ] -----------------------------> main key handle
algorithm -+                           ^                                                  |
           +---> max key len           |                                                  |
                     |                 |                                                  |
                     |                 |                                                  |
                     v                 |                                                  |
password ------> [ pkcs5 ] --> hashed_password ------------+                              |
                     ^                                    |                              |
                     |                                     |                              |
salt ----------------+                                    v                              |
                     ^                              [ SHA1/SHA256 ]                       |
                     |                                     |                              |
hash algo -----------+                                    |                              |
                     ^                                     v                              |
                     |                                essiv_password                      |
interation count ----+                                    |                              |
                                                           |                              |
                                                           v                              |
                                           [ Blowfish/AES256 in ECB mode ]                |
                                                           |                              |
                                                           |                              |
                                                           v                              |
                                                    essiv key handle                      |
                                                           |                              |     Initialization
. . . . . . . . . . . . . . . . . . . . . . . . . . . . . .|. . . . . . . . . . . . . . . | . . . . . . . . . .
                                                           v                              |
block_number_in_archive ----------------------------> [ encrypt ] ------> IV -----+       |
                                                                                  |       |
                                                                                  |       |
                                                                                  v       v
data ------------------------------------------------------------------------> [ encrypt/decrypt ] -----> data
sliced by block
of given size






How asymmetric encryption is performed in dar/libdar

Dar does not encrypt the whole archive with a recipient's public key, but rather randomly chooses a (long) password for symmetric encryption (as seen above, except it does not need KDF and salt as it can select a much better random key than human can do), encrypts that password with the recipient's public keys (eventually signing it with your own private key) and drops a copy of this ciphered/signed data into the archive. At reading time, dar read the archive header to find the encrypted password, decrypt it using the user's private key then use that password, now in clear, to decrypt the rest of the archive with the adhoc symmetric algorithm.

Signing is done on the randomly chosen ciphering key as seen above, but also on the catalogue (the table of content located at the end of the archive). More precisely the catalogue is sha512-hashed and the resulting hash is signed. The catalogue hash and its signature are stored encrypted in the archive right after the catalogue.

Why not using only asymmetric encryption end to end?

First, for a given fixed amount of data, the resulting ciphered data size may vary. Thus sticking ciphered block together in the archive would not lead to easily know where a block starts and where is located its end. Second, doing that way allows an archive to have several different recipients, the password is ciphered for each of them and the archive is readable by any specified recipient, while they do not share any key. Doing that way has very little impact of archive size.

For a multi recipient archive, any recipient has access to the signed and encrypted key and could reuses the same encryption key which may also be signed by the original sender to build a new archive with totally different content, still it should pay attention to the fact the catalogue's hash is signed and modification should not touch the catalogue in a way the hash differ. Last the catalogue contains CRC of file's data/EA/FSA and the attacker should pay attention to here too keep the same CRC for the whole archive content, not only the catalogue.

Well, this type of attach is pretty accessible by an expert guy, but it would take place between an expert guy and his friends!  See alternatives to avoid this pitfall


Why not using signing from end to end?

First, this can be done without dar, just use gpg on the slices of an archive, so there is no advantage in having such feature inside dar. The other reason is time, as signing the whole archive would be long. it would also be very painful to validate a given archive authenticity (time to read a whole archive), here we can leverage the fact the archive is encrypted (thus tampering the archive would be seen when unciphering it) and that dar uses of several mechanismes (compression, clear text CRC) to detect data corruption.



dar-2.6.8/doc/authentification.html0000644000175000017520000000412413530031622014205 00000000000000 DAR - Disk ARchive - Authentification page

Dar Documentation


DAR's Authentication




PGP/ GnuPG key

From April 2nd, 2002 to September 12th, 2012 all released packages and communications have been signed with the following PGP public key, having a fingerprint of 3D7F 383C B41E 33D7 0250 A9AC A42E 4223 C818 1A52

Starting September 13th, 2012 this new PGP public key is in use, it has the fingerprint: 3B31 29AF 1DDD EFA5 A37D 818F 0831 B0BD 03D8 B182


The signature does only proves that Denis Corbin have personally released the sources or binary package. This means there is no malicious code inside the signed packages (If you trust him, of course).



dar-2.6.8/doc/presentation.html0000644000175000017520000004137313606627706013413 00000000000000 Dar's Documentation - General Presentation
Dar Documentation


PRESENTATION



1- General Presentation

Dar is a command-line software aimed to backup and archiving large live filesystems. It is a filesystem independent and cross platform tool. But, Dar is not a boot loader, nor it is an operating system. It does not create nor format partitions, but it can restore a full filesystem into a larger or a shorter partition, from one partition to several ones, (or the opposite from several to one partition), from a filesystem type to another filesystem type (ext2/3/4 to reiserFS for example), it can save and restore hard-linked inodes (hard linked plain files, sockets, char/block devices or even hard linked symlinks (!)), Solaris's Door files, it takes care of Posix Extended Attributes (Linux, MacOS, ...). It can also detect and restore sparse files, even when the underlying filesystem does not support them: it can restore a normal file with large portions of zeroed bytes as a sparse file on another filesystem that supports it, leading to a gain in space requirement for the restoration but also for the archive size.

From a filesystem, dar creates an archive, which may be split in a set of files (called slices) which size is user defined. Dar archives are suitable to be stored on floppy, CD, DVD, usb key, hard disks, and since release 2.4.0 to tapes too. But no, dar itself cannot burn a DVD. Instead the user can give dar a command to execute each time a slice is completed. Dar can perform full backup1, incremental backup2, differential backup3 and decremental backup4. It also records files that have been removed since the last backup was made, leading the restoration of a system to get the exact same state it was at the time of the differential/incremental/decremental backup (removing files that ought to be removed, adding files that ought to be added and modifing files as expected).

Dar is able to be run on a live filesystem. It can detect when a file has changed while it was reading it, in that case it is possible to ask dar to proceed to several retries to save such a file. File that could not be saved properly (because they changed during the backup and there is no more retry left) are flagged as "dirty" and are signaled at restoration time.

Dar provides several hooks for the user to perform certain actions when some event are met. One first hook let the user launch a command when a slice is completed  (for burning the slice to DVD for example). A second hook is available for a given set of files or directories under backup, that let the user do some action before and after saving that file or directory (like suspending a database, and resuming it afterward).

Dar code has been split into two parts since its release 2.0.0, a command-line part and a library part called libdar. Dar is not a Graphical User Interface it is only a command-line tool, but thanks to the libdar library, many external tool rely on libdar and can manipulate dar archives at a more abstracted level with other interfaces than the shell command-line.

libdar may be linked against libz, libz2, liblzo and libxz to provide gzip, bzip2, lzo and xz/lzma compression. It can also be linked against libgcrypt to provide symmetric strong encryption (blowfish, twofish, aes256, serpent256, camellia256). In addition it can be linked against libgpgme to provide asymmetric encryption (same as GPG) which, in other word, let you cipher an archive with the public key of one or more recipients and in option sign it with our own private key.

Dar can be used through ssh for remote backups. It may also make used of so called "isolated catalogue" which is a small file containing the table of contents (or catalog) of a given archive. These can be used as base for a differential or incremental backup, and can also be used as backup of the table of content of the original archive if that one gets corrupted.

While dar/libdar provide a lot of features we will not mention here, you can use dar without having the knowledge of all of them. In its most simple form, dar can be used only with few options, here follows some example of use, that should not need additional explanations:

archive creation (backing up all /usr directory) :
   dar -c my_backup -R / -g usr
  
archive restoration (restoring /usr in a alternate directory):
  
dar -x my_backup -R /some/where/else

archive testing:
  
dar -t my_backup

archive comparison with an existing filesystem:
   dar -d my_backup -R /

While a big effort has been made on documentation, it must not lead you to think that you have to read it all to be able to use dar: most needs are covered by the tutorial or mini-howto and for direct explanation of common questions by the FAQ. Then, if you like or if you need, you can also look at the detailed man pages for a particular feature (These documents are the reference for each command-line tool you will get very detailed explanations). You may also find some help on the dar-support mailing-list where a bit more than a hundred of subscribed users can help you.

Dar's documentation is big because it also includes all that may be useful to know how to use libdar, which is intended for developers of external application relying on this library. For those even more curious there is also the documentation about dar's internals: libdar's structure, archive format, which can ease the understanding of the magic that makes all this working and gives a better understanding of dar/libdar code, which is written in C++. But, no, you do not need to read all this to just use dar! ;-)

Follow an abstracted list of features if you want to know more about dar/libdar.

2 - Known Projects relying on dar or libdar


In alphabetical order:
  • AVFS is virtual file system layer for transparently accessing the content of archives and remote directories just like local files.
  • backup.pl script by Bob Rogers, creates and verifies a backup using dump/restore or using dar
  • Baras by Aaron D. Marasco it a rewriting in Perl of SaraB.
  • Dar-incdec-repo by Dan A. Muresan is a framework for doing periodic DAR backups with minimal fuss
  • Darbup by Carlo Teubner. One of darbup key features is its ability to automatically delete old archives when the total space taken up by existing archives exceeds some configured maximum
  • Darbrrd by Jared Jennings, to back up a few hundred gigabytes of data onto dozens of optical discs in a way that it can be restored ten years later.
  • DarGUI by Malcolm Poole is a front-end to dar providing simple and graphical access to the main features of dar.
  • Disk archive interface for Emacs by Stefan Reichör
  • gdar by Tobias Specht, a graphical user interface to browse and extract dar archives
  • HUbackup (Home User backup) by SivanGreen
  • kdar is a KDE-3 Graphical User Interface to dar made by Johnathan Burchill
  • Lazy Backup by Daniel Johnson. Lazy Backup is intended to be so easy even lazy people will do their backups
  • A Dar plugin has been made by Guus Jansman for Midnight commander (mc)
  • SaraB: Schedule And Rotate Automatic Backups - by Tristan Rhodes. SaraB works with DAR to schedule and rotate backups. Supports the Towers of Hanoi, Grandfather-Father-Son, or any custom backup rotation strategy.
If a project you like is missing, you are welcome to contact dar's author for it to be referred here (contact coordinates can be found in the AUTHOR file of the source package).

3 - Limitations

Dar has some limitations which are detailed here.

4 - Staying informed of dar/libdar's events

To stay informed about new releases and security issues please subscribe to the Dar-news mailing-list. This is a read-only mailing-list with very low number of mail (usually less than ten a year).



1 Full backup: A full backup is a backup of a full filesystem or of a subset of files where, for each file, the archive contains all the inode information (ownership, permission, dates, etc.) file's data and eventually file's Extended Attributes.
2 Differential backup: A differential backup is based on a full backup. It contains only the data and Extended Attributes of files that changed since the full backup was made. It also contains the list of files that have been removed since the full backup was made. For files that did not change, it contains only the inode information. The advantage is that the backup process is much faster, the space required is also much lower.  The drawback is that you need to restore the full backup first, then the differential backup to get the last saved state of your system. But if you want the last version of a file that changed recently you only need the last differential backup.
3 Incremental backup: An incremental backup is essentially the same thing as a differential backup. Some make a difference, I do not. The only point I see is that the incremental backup is not based on a full backup but on a differential backup or on another incremental one.
4 Decremental backup: A decremental backup is a backup method in which the most recent backup is a full backup, while the oldest backup are a difference compared to that full backup. The advantage of such type of backup is the you can restore easily your system in the last state it had using only the last backup. And, if you want to restore it in the state it had some time before, then you can restore the last backup (full backup), then the previous archive (a decremental backup) and so on. As you most usually want to restore the system in its last available state, this makes restoration much more easy compared to doing incremental backups. However, this suffer from a important drawback, which is that you need to transform the last backup into a decremental backup when comes the time to make another backup. Then you have to remove the former full backup and replace it by its decremental version.

dar-2.6.8/doc/Good_Backup_Practice.html0000644000175000017520000004747313552573442014673 00000000000000 Good Backup Practice Short Guide
Dar Documentation


Good Backup Practice Short Guide





Presentation

This short guide is here to gather important (and somehow obvious) techniques about computer backups. It also explains the risks you take not following these principles. I thought this was obvious and well known by anyone, up to recently when I started getting feedback of people complaining about their lost data because of bad media or other reasons. To the question "have you tested your archive?", I was surprised to get the negative answers.

This guide is not especially linked to Disk ARchive (aka dar) no more than to any other tool, thus, you can take advantage of reading this document if you are not sure of your backup procedure, whatever is the backup software you use.

Notions

In the following we will speak about backup and archive:
  • by backup, is meant a copy of some data that remains in place in an operational system
  • by archive, is meant a copy of data that is removed afterward from an operational system. It stays available but is no more used frequently.
With the previous meaning of an archive you can also make a backup of an archive (for example a clone copy of your archive).

Archives

1. The first think to do just after making an archive is testing it on its definitive medium. There are several reasons that make this testing important:
  • any medium may have a surface error, which in some case cannot be detected at writing time.
  • the software you use may have bugs (also dar can, yes. ;-) ... ).
  • you may have done a wrong operation or missed an error message (no space left to write the whole archive ad so on), especially when using poorly written scripts.
Of course the archive testing must be done when the backup has been put on its definitive place (CD-R, floppy, tape, etc.), if you have to move it (copy to another media), then you need to test it again on the new medium. The testing operation, must read/test all the data, not just list the archive contents (-t option instead of -l option for dar). And of course the archive must have a minimum mechanism to detect errors (dar has one without compression, and two when using compression).

2. As a replacement for testing, a better operation is to compare the files in the archive with those on the original files on the disk (-d option for dar). This makes the same as testing archive readability and coherence, while also checking that the data is really identical whatever the corruption detection
mechanisms used are. This operation is not suited for a set of data that changes (like a active system backup), but is probably what you need when creating an archive.

3. Increasing the degree of security, the next thing to try is to restore the archive in a temporary place or better on another computer. This will let you check that from end to end, you have a good usable backup, on which you can rely. Once you have restored, you will need to compare the result, the diff command can help you here, moreover, this is a program that has no link with dar so it would be very improbable to have a common bug to both dar and diff that let you think both original and restored data are identical while they are not!

4. Unfortunately, many (all) media do alter with time, and an archive that was properly written on a correct media may become unreadable with time and/or bad environment conditions. Thus of course, take care not to store magnetic storages near magnetic sources (like HiFi speakers) or enclosed in metallic boxes, as well as avoid having sun directly lighting your CD-R(W) DVD-R(W), etc. Also mentioned for many media is humidity: respect the acceptable humidity range for each medium (don't store your data in your bathroom, kitchen, cave, ...). Same thing about the temperature. More generally have a look at the safe environmental conditions described in the documentation, even just once for each media type.

The problem with archive is that usually you need them for a long time, while the media has a limited lifetime. A solution is to make one (or several) copy (i.e.: backup of archive) of the data when the original support has arrived its half expected life.

Another solution, is to use Parchive, it works in the principle of RAID disk systems, creating beside each file a par file which can be used later to recover missing part or corrupted part of the original file. Of course, Parchive can work on dar's slices. But, it requires more storage, thus you will have to choose smaller slice size to have place to put Parchive data on your CD-R or DVD-R for example. The amount of data generated by Parchive depends on the redundancy level (Parchive's -r option). Check the notes for more informations about using Parchive with dar. When using read-only medium, you will need to copy the corrupted file to a read-write medium for Parchive can repair it. Unfortunately the usual 'cp' command will stop when the first I/O error will be met, making you unable to get the sane data *after* the corruption. In most case you will not have enough sane data for Parchive to repair you file. For that reason thje "dar_cp" tool has been created (it is available included in dar's package). It is a cp-like command that skips over the corruptions (replacing it by a field of zeored bytes, which can be repaired afterward by Parchive) and can copy sane data after the corrupted part.

5. another problem arrives when an archive is often read. Depending on the medium, the fact to read, often degrades the media little by little, and makes the media's lifetime shorter. A possible solution is to have two copies, one for reading and one to keep as backup, copy which should be never read except for making a new copy. Chances are that the often read copy will "die" before the backup copy, you then could be able to make a new backup copy from the original backup copy, which in turn could become the new "often read" medium.

6. Of course, if you want to have an often read archive and also want to keep it forever, you could combine the two of the previous techniques, making two copies, one for storage and one for backup. Once you have spent a certain time (medium half lifetime for example), you could make a new copy, and keep them beside the original backup copy in case of.

7. Another problem, is safety of your data. In some case, the archive you have does not need to be kept a very long time nor it needs to be read often, but instead is very "precious". in that case a solution could be to make several copies that you could store in very different locations. This could prevent data lost in case of fire disaster, or other cataclysms.

8. Yet another aspect is the privacy of your data. An archive may not have to be accessible to anyone. Several directions could be possible to answer this problem:

  • Physical restriction to the access of the archive (stored in a bank or locked place, for example)
  • Hid the archive (in your garden ;-) ) or hide the data among other data (Edgar Poe's hidden letter technique)
  • Encrypting your archive
  • And probably some other ways I am  not aware about.
For encryption, dar provides strong encryption inside the archive (blowfish, aes, etc.), it does preserve the direct access feature that avoid you having decrypt the whole the whole archive to restore just one file. But you can also use an external encryption mechanism, like GnuPG to encrypt slice by slice for example, the drawback is that you will have to decrypt each slice at a whole to be able to recover a single file in it.

Backup

Backups act a bit like an archive, except that they are a copy of a changing set of data, which is moreover expected to stay on the original location (the system). But, as an archive, it is a good practice to at least test the resulting backups, and once a year if possible to test the overall backup process by doing a restoration of your system into a new virtual machine or a spare computer, checking that the recovered system is fully operational.

The fact that the data is changing introduces two problems:

  • A backup is quite never up to date, and you will probably loose data if you have to rely on it
  • A backup becomes soon obsolete.
The backup has also the role of keeping a recent history of changes. For example, you may have deleted a precious data from your system. And it is quite possible that you notice this mistake long ago after deletion. In that case, an old backup stays useful, in spite of many more recent backups.

In consequences, backup need to be done often for having a minimum delta in case of crash disk. But, having new backup do not mean that older can be removed. A usual way of doing that, is to have a set of media, over which you rotate the backups. The new backup is done over the oldest backup of the set. This way you keep a certain history of your system changes. It is your choice to decide how much archive you want to keep, and how often you will make a backup of your system.

Differential / incremental backup

A point that can increase the history while saving media space required by each backup is the differential backup. A differential backup is a backup done only of what have changed since a previous backup (the "backup of reference"). The drawback is that it is not autonomous and cannot be used alone to restore a full system. Thus there is no problem to keep the differential backup on the same medium as the one where is located the backup of reference.

Doing a lot of consecutive differential backup (taking the last backup as reference for the next differential backup, which some are used to call "incremental" backups), will reduce your storage requirement, but will  extra timecost at restoration in case of computer accident. You will have to restore the full backup (of reference), then you will have to restore all the many backup you have done up to the last. This implies that you must keep all the differential backups you have done since the backup of reference, if you wish to restore the exact state of the filesystem at the time of the last differential backup.

It is thus up to you to decide how much differential backup you do, and how much often you make a full backup. A common scheme, is to make a full backup once a week and make differential backup each day of the week. The backup done in a week are kept together. You could then have ten sets of full+differential backups, and a new full backup would erase the oldest full backup as well as its associated differential backups, this way you keep a ten week history of backup with a backup every day, but this is just an example.

An interesting protection suggested by George Foot on the dar-support mailing-list: once you make a new full backup, the idea is to make an additional differential backup based on the previous full backup (the one just older than the one we have just built), which would "acts as a substitute for the actual full backup in case something does go wrong with it later on".

Decremental Backup

Based on a feature request for dar made by "Yuraukar" on dar-support mailing-list, the decremental backup provides an interesting approach where the disk requirement is optimized as for the incremental backup, while the latest backup is always a full backup (while this is the oldest that is full, in the incremental backup approach). The drawback here is that there is some extra work at each new backup creation to transform the former more recent backup from a full backup to a so called "decremental" backup.

The decremental backup only contains the difference between the state of the current system  and the state the system had at a more ancient date (the date of the full backup corresponding the decremental backup was made).

In other words, the building of decremental backups is the following :
  • Each time (each day for example), a new full backup is made
  • The full backup is tested, parity control is eventually built, and so on.
  • From the previous full backup and the new full backup, a decremental backup is made
  • The decremental backup is tested, parity control is eventually built, an so on.
  • The oldest full backup can then be removed
This way you always have a full backup as the lastest backup, and decremental backups as the older ones.

You may still have several sets of backup (one for each week, for example, containing at the end of a week a full backup and 6 decremental backups), but you also may just keep one set (a full backup, and a lot of decremental backups), when you will need more space, you will just have to delete the oldest decremental backups, thing you cannot do with the incremental approach, where deleting the oldest backup, means deleting the full backup that all others following incremental backup are based upon.

At the difference of the incremental backup approach, it is very easy to restore a whole system : just  restore the latest backup (by opposition to restoring the more recent full backup, then the as many incremental backup that follow). If now you need to recover a file that has been erased by error, just use a the adequate decremental backup. And it is still possible to restore a whole system globally in a state it had long ago before the lastest backup was done : you will for that restore the full backup (latest backup), then in turn each decremental backup up to the one that correspond to the epoch of you wish. The probability that you have to use all decremental backup is thin compared to the probability you have to use all the incremental backups: there is effectively much more probability to restore a system in a recent state than to restore it in a very old state.

There is however several drawbacks:
time
Doing each time a full backup is time consumming and creating a decremental backup from two full backups is even more time consuming...
temporary disk space
Each time you create a new backup, you temporarily need more space than using the incremental backup, you need to keep two full backups during a short period, plus a decremental backup (usually much smaller than a full backup), even if at then end you remove the oldest full backup.

In conclusion, I would not tell that decremental backup is the panacea, however it exists and may be of interest to some of you. More information about dar's implementation of decremental backup can be found here.




Any other trick/idea/improvement/correction/evidences are welcome!

Denis.





dar-2.6.8/doc/samples/0000755000175000017520000000000013617552352011522 500000000000000dar-2.6.8/doc/samples/PN_backup-root.options0000644000175000017520000000117413476543220015702 00000000000000### Options that are appended to the dar command: # No warning when not run from a terminal -Q # Don't try to read darrc files -N # Be verbose (so everything can be logged) -v # No warn on overwrite (should not happen anyway) -w # Compression level -z1 # Keep empty directories as such, so they can be restored -D # Blowfish encryption -K bf:secretpassword # Directory to backup -R "/" # Excludes (must be specified as relative paths to the directory # that is to be backed up) -P "mnt/loop" -P "mnt/storage" -P "mnt/tmp" -P "mnt/backupftp" -P "dev/pts" -P "proc" -P "sys" -P "tmp" -P "var/tmp" -P "usr/tmp" -P "usr/portage/distfiles" dar-2.6.8/doc/samples/automatic_backup0000644000175000017520000007710613476543220014710 00000000000000#Written by Manuel Iglesias. glesialo@tiscali.es #Notes: SystemDirectory=/sbin # This file should be copied (by CopySystemFiles) to its corresponding Directory (see above). # Exit codes at the end of this file. CommandName=`basename $0` ######################################################### # BACKUP SETUP. BEGIN. Read Dar Doc before modification. ######################################################### # Permissions. ################## # Allow use only in run level 1. CheckRunLevel=false # # Allow use only by root (Super user). CheckUser=true # ######################################################### # Paths and files. ################## # Directories. ######### # Backup files Directory: Absolute path (Should start with '/'!!). Don't end it with '/' unless it is '/'. DestinationDir=/store/.Store/Backup # # Origin of Backup/Restore Directory: Absolute path (Should start with '/'!!). # Don't end it with '/' unless it is '/'. OriginDir=/ # # Directories to backup. Relative to Origin of Backup Dir! Empty means: all dirs # (Except those in Directories to ignore. See below.). Separate with spaces. SubDirsToBackup="root home" # # Directories to ignore. Relative to Origin of Backup Dir! Separate with spaces. SubDirsToIgnore="home/manolo2 home/manolo/documents/Secret */.Trash* .Trash*\ */.mozilla/*/[Cc]ache */.opera/[Cc]ache* */.pan/*/[Cc]ache */.thumbnails" # # DestinationDir will be automatically included in SubDirsToIgnore if DestinationDir is a subdirectory # of OriginDir. If you want to include the base (IE.: Temp if DestinationDir: OriginDir/Temp/Backup) of # DestinationDir instead, set constant IgnoreBaseOfDestinationDir to true. Value (true | false). IgnoreBaseOfDestinationDir=true # # File systems that should be mounted for a correct backup. If any of them has to be mounted, # it will be umounted before this shellscript exits. Please mind mounting order!! # Absolute path (Should start with '/'!!). Separate with spaces. DirsToMount="/home /home/common /store" # ################## # Files. ######### # Files to backup. Empty: all files (Except those in Files to ignore. See below.). # No Path. Separate with spaces. FilesToBackup="" # # Files that should not be included in backup. No Path. Separate with spaces. FilesToIgnore="*~ .*~ cryptfile0.crypt cryptfile1.crypt" # # Files that should not to be compressed. No Path. Separate with spaces. FilesNotToCompress="*.dar *.crypt *.arj *.bz2 *.bz *.Z *.tgz *.taz *.cpio *.deb\ *.gtar *.gz *.lzh *.lhz *.rar *.rpm *.shar *.sv4cpi *.sv4crc *.tar *.ustar *.zoo\ *.zip *.jar *.jpg *.gif *.mpg *.mpeg *.avi *.ram *.rm" # ######################################################### # Parameters used to choose Differential Backup level. ################## BlockSize=1024 # # When Diffbackup > (MaxDiffPercentOfFullBackup% of FullBackup): New FullBackup recommended. MaxDiffPercentOfFullBackup=30 # # When Diffbackup < (MinDiffPercentOfFullBackup% of FullBackup): Rewrite first DiffBackup recommended. MinDiffPercentOfFullBackup=3 # # Max 99. If (Nr of DiffBackups) > MaxNrOfDiffBackups: Rewrite first DiffBackup recommended. MaxNrOfDiffBackups=20 # ######################################################### # Dar settings and options. ################## #Used dar suite program names. DarManagerName=dar_manager DarName=dar # # Directory where dar usually resides. Absolute path (Should start with '/'!!). Don't end it with '/'. DarDir=/usr/local/bin # # Create empty sub-directories in backup instead of those not saved. Value (true | false). BackupIgnoredDirsEmpty=true # # CompressWithBZip2=false -> no compression. Value (true | false). CompressWithBZip2=true # # Compress Files > 100Mb. Only valid if CompressWithBZip2=true. Value (true | false). CompressBigFiles=true # # Value (true | false). VerboseMode=false # # Value (true | false). MakeSlices=true # # StopAfterSlices: Only valid if MakeSlices=true. Value (true | false). StopAfterSlices=false # # SizeOfDarStatic: dar_static + DocFiles + Restore shell + etc (To calculate first slize size). SizeOfDarStatic=4 # SliceSize=650 # ######################################################### # BACKUP SETUP. END. Read Dar Doc before modification. ######################################################### ######################################################### # SUBROUTINES. BEGIN. ######################################################### echoE() { # echo to standard error. Remove leading/trailing blanks and double spaces. echo $* 1>&2 return 0 } Usage() { echoE "$CommandName creates (Using '$DarName'), in directory" echoE "'$DestinationDir'," echoE "a backup of all files and directories in" echoE "'$OriginDir'." echoE "It analyzes current backup files and recommends the most suitable new" echoE "backup level to the user. It also creates/updates a database with backup" echoE "information for future Backup management (Using '$DarManagerName')." echoE echoE "The backup will be split in files of $SliceSize Mb to fit in removable media." echoE echoE "Usage: $CommandName. (User can choose backup level)." echoE "or" echoE "Usage: $CommandName -auto. ($CommandName selects backup level automatically)." echoE return 0 } UmountDirs () { if [ "$DirsToUMount" != "" ] then echo "############" echo "$CommandName: Unmounting file systems:" for i in $DirsToUMount do mount | grep -w $i &> /dev/null if [ $? -eq 0 ] then if (umount $i &> /dev/null) then echo "$CommandName: $i unmounted." else echoE "$CommandName: $i could not be unmounted." fi else echo "$CommandName: $i was already unmounted." fi done fi echo "############" return 0 } TwoDigits () { #Add leftmost 0 if [ $1 -lt 10 ] then echo 0$1 else echo $1 fi return 0 } Stream() { # Output String(s) without letting the Shell interpret metacharacters. # Remove leading/trailing blanks and double spaces. # Enclose arguments in "" when calling. I.E.: Stream "$Var1 $Var2" TempStr=$@ Length=${#TempStr} if [ $Length -eq 0 ] then return else CharNum=0 while [ $CharNum -lt $Length ] do echo -n "${TempStr:$CharNum:1}" let CharNum++ done echo fi return } ######################################################### # SUBROUTINES. END. ######################################################### NoUserChoice=false if [ $# -ne 0 ] then if [ "$1" == "-auto" ] then NoUserChoice=true else Usage exit 1 fi fi if $CheckRunLevel then RunLevel=`runlevel | sed 's/.* //'` if [ $RunLevel != S ] then echoE "$CommandName: RunLevel: $RunLevel. Please change to RunLevel 1 (init 1) and try again." exit 1 fi fi if $CheckUser then CurrentUser=`whoami` if [ "$CurrentUser" != "root" ] then echoE "$CommandName: User: '$CurrentUser'. Please login as 'root' and try again." exit 1 fi fi echo "############" DirsToUMount="" if [ "$DirsToMount" != "" ] then echo "$CommandName: Mounting file systems:" for i in $DirsToMount do mount | grep -w $i &> /dev/null if [ $? -ne 0 ] then if (mount $i &> /dev/null) then echo "$CommandName: $i mounted." DirsToUMount=" $i"$DirsToUMount else echoE "$CommandName: $i could not be mounted. Aborting." UmountDirs exit 2 fi else echo "$CommandName: $i was already mounted." fi done echo "############" fi if [ "$OriginDir" != "/" ] then # if first character is not '/'. if [ "${OriginDir:0:1}" != "/" ] then echoE "$CommandName: 'Origin' directory:" echoE "$CommandName: $OriginDir." echoE "$CommandName: Must be an absolute path (Should start with '/'!)." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else # if last character is '/'. if [ "${OriginDir:${#OriginDir}-1:1}" == "/" ] then echoE "$CommandName: 'Origin' directory:" echoE "$CommandName: $OriginDir." echoE "$CommandName: Should not end with '/'!." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else if test ! -d $OriginDir then echoE "$CommandName: 'Origin' directory:" echoE "$CommandName: $OriginDir." echoE "$CommandName: Does not exist. Please edit '$CommandName' and try again." UmountDirs exit 3 fi fi fi fi if [ "$DestinationDir" != "/" ] then # if first character is not '/'. if [ "${DestinationDir:0:1}" != "/" ] then echoE "$CommandName: 'DestinationDir' directory:" echoE "$CommandName: $DestinationDir." echoE "$CommandName: Must be an absolute path (Should start with '/'!)." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else # if last character is '/'. if [ "${DestinationDir:${#DestinationDir}-1:1}" == "/" ] then echoE "$CommandName: 'DestinationDir' directory:" echoE "$CommandName: $DestinationDir." echoE "$CommandName: Should not end with '/'!." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else if test ! -d $DestinationDir then echoE "$CommandName: 'DestinationDir' directory:" echoE "$CommandName: $DestinationDir." echoE "$CommandName: Does not exist. Please edit '$CommandName' and try again." UmountDirs exit 3 fi fi fi fi if [ $OriginDir == $DestinationDir ] then echoE "$CommandName: 'DestinationDir' and 'OriginDir' can not be the same directory!" echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 fi # Find dar & dar_manager if type >/dev/null 2>&1 $DarName then DarFound=true else DarFound=false fi if type >/dev/null 2>&1 $DarManagerName then DarManagerFound=true else DarManagerFound=false fi if ! ($DarFound && $DarManagerFound) then if [ "$DarDir" != "/" ] then # if first character is not '/'. if [ "${DarDir:0:1}" != "/" ] then echoE "$CommandName: 'DarDir' directory:" echoE "$CommandName: $DarDir." echoE "$CommandName: Must be an absolute path (Should start with '/'!)." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else # if last character is '/'. if [ "${DarDir:${#DarDir}-1:1}" == "/" ] then echoE "$CommandName: 'DarDir' directory:" echoE "$CommandName: $DarDir." echoE "$CommandName: Should not end with '/'!." echoE "$CommandName: Please edit '$CommandName' and try again." UmountDirs exit 3 else if test ! -d $DarDir then echoE "$CommandName: 'DarDir' directory:" echoE "$CommandName: $DarDir." echoE "$CommandName: Does not exist. Please edit '$CommandName' and try again." UmountDirs exit 3 fi fi fi fi # Include directory, where dar usually resides, in PATH." # DarDir not in PATH? echo $PATH | grep $DarDir &> /dev/null if [ $? -ne 0 ] then PATH=$DarDir":"$PATH fi fi if ! type >/dev/null 2>&1 $DarName then echoE "$CommandName: $DarName neither in PATH nor in $DarDir. Aborting." UmountDirs exit 3 fi if ! type >/dev/null 2>&1 $DarManagerName then echoE "$CommandName: $DarManagerName neither in PATH nor in $DarDir. Aborting." UmountDirs exit 3 fi ######################################################### # VARIABLES INITIALIZATION. BEGIN. ######################################################### # Backup Paths. ############### #Backup base names & DataBase name. FullBackupBaseName=$CommandName"Full" DiffBackupBaseName=$CommandName"Diff" DataBaseName=$CommandName"DataBase" # FullBackupPath=$DestinationDir/$FullBackupBaseName DiffBackupPath=$DestinationDir/$DiffBackupBaseName DataBasePath=$DestinationDir/$DataBaseName # ######################################################### # Set dar options. ############### # Backup base name (Will be set later): -c PathBackUpBaseName BackupNameOption="-c " # # Reference backup (Will be set later) for differential backups: -A PathBackUpBaseName ReferenceBackupOption="-A " # # Origin of Backup: -R /. DarOptions="-R "$OriginDir # # Compress data inside the backup using bzip2: -y[CompressLevel]. # CompressLevel: 0 minimum; 9 maximun. Compress Files > 100Mb: -m 0. if $CompressWithBZip2 then DarOptions=$DarOptions" -y9" if $CompressBigFiles then DarOptions=$DarOptions" -m 0" fi fi # # Verbose mode: -v if $VerboseMode then DarOptions=$DarOptions" -v" fi # # Create empty sub-directories in backup instead of those not saved: -D if $BackupIgnoredDirsEmpty then DarOptions=$DarOptions" -D" fi # # Do not read ~/.darrc nor /etc/darrc configuration file: -N DarOptions=$DarOptions" -N" # ######################################################### #Set Slice options. ############### if [ $SliceSize -gt $SizeOfDarStatic ] then let FirstSliceSize=$SliceSize-$SizeOfDarStatic else FirstSliceSize=$SliceSize fi # # All sizes in Mb; Stop after each slize. if $MakeSlices then FirstSliceSizeOption="-S "$FirstSliceSize"M" SliceSizeOption="-s "$SliceSize"M" # Pause between slices to change removable media. Ring bell: -p -b if $StopAfterSlices then DarOptions=$DarOptions" -p -b" fi else FirstSliceSizeOption="" SliceSizeOption="" fi # ######################################################### #Set Include/Exclude Files Options. ############### # Files you don't want to backup: -X "*~" -X ".*~" if [ "$FilesToIgnore" != "" ] then InclExclFilesOption='-X "'`Stream "$FilesToIgnore" | sed 's/ /" -X "/g'`'"' else InclExclFilesOption="" fi # # Files you want to backup without compression: -Z "*.zip" if $CompressWithBZip2 then if [ "$FilesNotToCompress" != "" ] then InclExclFilesOption=$InclExclFilesOption' -Z "'`Stream "$FilesNotToCompress" | sed 's/ /" -Z "/g'`'"' fi fi # # Files to include in backup: -I "*.html". if [ "$FilesToBackup" != "" ] then InclExclFilesOption=' -I "'`Stream "$FilesToBackup" | sed 's/ /" -I "/g'`'" '$InclExclFilesOption fi # ######################################################### #Set Include/Exclude directories Options. ############### # $OriginDir in $DestinationDir? echo $DestinationDir | grep $OriginDir &> /dev/null if [ $? -eq 0 ] then # TempDir= $DestinationDir-$OriginDir TempDir=`echo $DestinationDir | sed s%$OriginDir%%` if $IgnoreBaseOfDestinationDir then # Include BaseDir of DestinationDir (Without first '/') in SubDirsToIgnore. # if first character, in TempDir, is not '/'. if [ "${DestinationDir:0:1}" != "/" ] then # Add '/' in front. TempDir="/"$TempDir fi TempPath=$TempDir while [ $TempPath != `dirname $TempPath` ] do BasePath=$TempPath TempPath=`dirname $TempPath` done BasePath=`basename $BasePath` if [ "$SubDirsToIgnore" != "" ] then SubDirsToIgnore=$SubDirsToIgnore" $BasePath" else SubDirsToIgnore=$BasePath fi else # Include DestinationDir (Without first '/') in SubDirsToIgnore. # if first character, in TempDir, is '/'. if [ "${TempDir:0:1}" == "/" ] then # Remove first '/'. TempDir=${TempDir:1:${#TempDir}-1} fi if [ "$SubDirsToIgnore" != "" ] then SubDirsToIgnore=$SubDirsToIgnore" $TempDir" else SubDirsToIgnore=$TempDir fi fi fi # # Sub-trees you must not save: -P dev/pts -P proc. Path must be relative to -R option # Enclose each directory in "" just in case there are metacharacters in the name. if [ "$SubDirsToIgnore" != "" ] then IncludeExclDirsOption='-P "'`Stream "$SubDirsToIgnore" | sed 's/ /" -P "/g'`'"' else IncludeExclDirsOption="" fi # # Sub-trees you must save: Add without any option in front. # Enclose each directory in "" just in case there are metacharacters in the name. if [ "$SubDirsToBackup" != "" ] then IncludeExclDirsOption='-g"'`Stream "$SubDirsToBackup" | sed 's/ /" -g "/g'`'" '$IncludeExclDirsOption fi # ######################################################### # Set dar_manager options. ############### # Create DataBase: -C PathBaseName CreateDataBaseOption="-C "$DataBasePath # # DataBase used as reference: -B PathBaseName DataBaseNameOption="-B "$DataBasePath # # Add Archive to DataBase (Will be set later): -A PathArchiveName AddToDataBaseOption="-A " # ######################################################### # VARIABLES INITIALIZATION. END. ######################################################### FullDiffBackupSize=`ls -1 -s --block-size=$BlockSize $FullBackupPath.* 2> /dev/null | awk '{s = s + $1} END {print s}'` if [ "$FullDiffBackupSize" == "" ] then FullDiffBackupSize=0 fi TotalDiffBackupSize=`ls -1 -s --block-size=$BlockSize $DiffBackupPath??.* 2> /dev/null | awk '{s = s + $1} END {print s}'` if [ "$TotalDiffBackupSize" == "" ] then TotalDiffBackupSize=0 fi echo "$CommandName: ### `date --rfc-822` ###" echo "$CommandName: Current backup information (Size in $BlockSize bytes blocks.):" if [ $FullDiffBackupSize -eq 0 ] then echo "$CommandName: No $FullBackupBaseName files found!" echo "############" echo "$CommandName: Preparing to Create $FullBackupBaseName." DiffBackupNr=0 LastDiffBackup=$DiffBackupNr else echo "$CommandName: ..$FullBackupBaseName: $FullDiffBackupSize." if [ $TotalDiffBackupSize -eq 0 ] then DiffBackupNr=1 LastDiffBackup=0 BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "############" echo "$CommandName: Preparing to Create $BaseName." else echo "$CommandName: ..$DiffBackupBaseName: $TotalDiffBackupSize:" DiffBackupNr=0 LastDiffBackup=$DiffBackupNr BestChoiceDiffLevel="" RemainingDiffSize=$TotalDiffBackupSize CurrentSize=1 while [ $CurrentSize -ne 0 ] do let DiffBackupNr++ BaseName=$DiffBackupPath`TwoDigits $DiffBackupNr` CurrentSize=`ls -1 -s --block-size=$BlockSize $BaseName.* 2> /dev/null | awk '{s = s + $1} END {print s}'` if [ "$CurrentSize" == "" ] then CurrentSize=0 fi if [ $CurrentSize -ne 0 ] then LastDiffBackup=$DiffBackupNr let RemainingDiffSize=$RemainingDiffSize-$CurrentSize if [ "$BestChoiceDiffLevel" == "" ] && [ $CurrentSize -lt $RemainingDiffSize ] then BestChoiceDiffLevel=$DiffBackupNr fi BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: ....$BaseName: $CurrentSize." fi done echo "############" let NextDiffBackup=$LastDiffBackup+1 if [ "$BestChoiceDiffLevel" == "" ] then BestChoiceDiffLevel=$NextDiffBackup fi Choice[4]="Exit $CommandName." let MinDiffBackupSize=$FullDiffBackupSize*$MinDiffPercentOfFullBackup/100 if [ $TotalDiffBackupSize -lt $MinDiffBackupSize ] then BestChoiceDiffLevel=1 Choice[1]=" ($DiffBackupBaseName<$MinDiffPercentOfFullBackup%$FullBackupBaseName)." fi if [ $LastDiffBackup -gt $MaxNrOfDiffBackups ] then BestChoiceDiffLevel=1 Choice[1]=${Choice[1]}" (NrOfDiffBackups>$MaxNrOfDiffBackups)." fi BaseName=$DiffBackupBaseName`TwoDigits $BestChoiceDiffLevel` Choice[1]=" $BaseName."${Choice[1]} BaseName=$DiffBackupBaseName`TwoDigits $NextDiffBackup` Choice[2]="Create $BaseName. Faster." Choice[3]="Rewrite $FullBackupBaseName ($DiffBackupBaseName>$MaxDiffPercentOfFullBackup%$FullBackupBaseName). Recommended!" let MaxDiffBackupSize=$FullDiffBackupSize*$MaxDiffPercentOfFullBackup/100 if [ $NextDiffBackup -eq $BestChoiceDiffLevel ] then if [ $TotalDiffBackupSize -gt $MaxDiffBackupSize ] then Choices="1 3" CreateRewriteMode="Create" Choice[1]=${Choice[1]}" Faster." else Choices="" fi else CreateRewriteMode="Rewrite" if [ $TotalDiffBackupSize -gt $MaxDiffBackupSize ] then Choices="1 2 3" else Choices="1 2" Choice[1]=${Choice[1]}" Recommended!" fi fi Choice[1]=$CreateRewriteMode${Choice[1]} if [ "$Choices" == "" ] then DiffBackupNr=$BestChoiceDiffLevel BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: Preparing to Create $BaseName." else Choices=$Choices" 4" echo "$CommandName: Options:" ChoiceNr=1 for i in $Choices do echo "$CommandName: $ChoiceNr.${Choice[$i]}" let ChoiceNr++ done echo "############" if $NoUserChoice then echo $Choices | grep "3" &> /dev/null if [ $? -eq 0 ] then Choice=3 else Choice=1 fi else let ChoiceNr-- ValidNumber=false until $ValidNumber do read -p "$CommandName: Please choose a number: " UserChoice case $UserChoice in [a-zA-Z-_.,]* | *[a-zA-Z-_.,] | *[a-zA-Z-_.,]*) echoE "$CommandName: No alpha characters allowed. Please try again.";; "") ;; *) ValidNumber=true;; esac if $ValidNumber then if [ $UserChoice -lt 1 ] || [ $UserChoice -gt $ChoiceNr ] then echoE "$CommandName: Allowed number range: 1..$ChoiceNr. Please try again." ValidNumber=false fi fi done ChoiceNr=0 for i in $Choices do let ChoiceNr++ if [ $ChoiceNr -eq $UserChoice ] then Choice=$i fi done echo "############" fi case $Choice in 1) DiffBackupNr=$BestChoiceDiffLevel BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: Preparing to $CreateRewriteMode $BaseName.";; 2) DiffBackupNr=$NextDiffBackup BaseName=$DiffBackupBaseName`TwoDigits $DiffBackupNr` echo "$CommandName: Preparing to Create $BaseName.";; 3) echo "$CommandName: Preparing to Rewrite $FullBackupBaseName." DiffBackupNr=0;; 4) echoE "$CommandName: Program exits at user request." UmountDirs exit 4;; *) echoE "$CommandName:Warning: Incorrect choice: $Choice. Aborting." UmountDirs exit 5;; esac fi fi fi if [ $DiffBackupNr -eq 0 ] then FullBackup=true else FullBackup=false fi if $FullBackup then BackupNameOption=$BackupNameOption$FullBackupPath if [ $FullDiffBackupSize -ne 0 ] then echo "############" echo "$CommandName: Removing previous $FullBackupBaseName files." for i in $FullBackupPath.* do if test -f $i then if (rm $i) then echo "$CommandName: $i removed." else echoE "$CommandName:Warning: Failure to remove $i." fi fi done fi if [ $TotalDiffBackupSize -ne 0 ] then echo "############" echo "$CommandName: Removing previous $DiffBackupBaseName files." for i in $DiffBackupPath??.* do if test -f $i then if (rm $i) then echo "$CommandName: $i removed." else echoE "$CommandName:Warning: Failure to remove $i." fi fi done fi if test -f $DataBasePath then echo "############" echo "$CommandName: Removing previous $DataBaseName file." if (rm $DataBasePath) then echo "$CommandName: $DataBasePath removed." else echoE "$CommandName:Warning: Failure to remove $DataBasePath." fi fi echo "############" echo "$CommandName: creating $FullBackupBaseName. Please wait." echo "###" sh <&1 | awk 'END {print $1}'` case $LastArchiveInDB in [a-zA-Z-_.,]* | *[a-zA-Z-_.,] | *[a-zA-Z-_.,]*) # If DataBase empty last line produced by 'dar_manager -l' is full of '--' echoE "$CommandName: Warning: $DataBaseName is empty. Aborting." UmountDirs exit 6;; *) if [ $LastArchiveInDB -gt $DataBaseLastValidArchive ] then echo "############" echo "$CommandName: Erasing previous Differential backups from $DataBaseName." while [ $LastArchiveInDB -gt $DataBaseLastValidArchive ] do let ArchiveBaseName=$LastArchiveInDB-1 BaseName=`TwoDigits $ArchiveBaseName` ArchiveBaseName=$DiffBackupBaseName$BaseName if ($DarManagerName $DataBaseNameOption -D $LastArchiveInDB) then echo "$CommandName: Archive $LastArchiveInDB ($ArchiveBaseName) erased from $DataBaseName." else echoE "$CommandName: Warning: Erasing of Archive $LastArchiveInDB ($ArchiveBaseName)\ from $DataBaseName failed." fi let LastArchiveInDB-- done fi;; esac else echoE "$CommandName: Warning! $DataBaseName does not exist. Aborting." UmountDirs exit 7 fi echo "############" echo "$CommandName: creating $BackupName. Please wait." echo "###" sh < $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf chmod 0644 $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf $(INSTALL) -m 0644 $(NO_EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -m 0755 $(EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -d $(DESTDIR)$(sysconfdir) sed -e "s%SOMEPATH%$(pkgdatadir)/samples%g" etc_darrc > $(DESTDIR)$(sysconfdir)/darrc uninstall-local: rm -rf $(DESTDIR)$(pkgdatadir)/samples # $(sysconfdir)/darrc not removed as it may contain system admin specific configuration dar-2.6.8/doc/samples/JH-dar-make_user_backup.sh0000644000175000017520000001177513476543220016351 00000000000000#!/bin/sh ################################# # # # DAR Archiver script # # # ################################# # Jakub Holy 25.4.2005 # This file: $HOME/bin/dar-make_user_backup.sh # IMPORTANT: This script depends upon /etc/darrc (options what not to compress/ archive) # But the file is ignored if $HOME/.darrc exists. # Additional options are read from dar_archiver.options (see # $DAR_OPTIONS_FILE below) USAGE="echo -e USAGE: \n$0 -full | -inc" # ----------------------------- OPTIONS TO MODIFY DIR_TO_ARCHIVE=$HOME DEST_DIR=/mnt/mandrake/debian-bkp/ DAR_OPTIONS_FILE="$HOME/bin/dar_archiver.options" ARCHIVE_NAME="`/bin/date -I`_$USER" # Ex: 2005-04-25_jholy DAR_INFO_DIR="$HOME/backup" DAR_MANAGER_DB=${DAR_INFO_DIR}/dar_manager_database.dmd LAST_FULL_BACKUP_ID="2005-04-25" # The last full backup - the unique part of its name LAST_FULL_BACKUP=${DAR_INFO_DIR}/${LAST_FULL_BACKUP_ID}_aja-full-katalog MSG="" LOG_FILE="${DAR_INFO_DIR}/zaloha-aja-dar.log" # PARSE COMMAND LINE --------------------------------------------- INC_BKP_OPT="" # dar options needed to create an incremental backup: empty => full bkp if [ $# -ne 1 ]; then echo "ERROR: Wrong number of parameters" $USAGE exit 1 elif [ "X$1" != "X-full" -a "X$1" != "X-inc" ]; then echo "Unknown parameter" $USAGE exit 1 else if [ "X$1" = "X-full" ]; then echo "DAR: Doing FULL backup."; ARCHIVE_NAME="${ARCHIVE_NAME}-full" fi if [ "X$1" = "X-inc" ]; then echo "DAR: Doing INCREMENTAL backup with respect to $LAST_FULL_BACKUP."; INC_BKP_OPT=" -A $LAST_FULL_BACKUP " ARCHIVE_NAME="${ARCHIVE_NAME}-inc-wrt${LAST_FULL_BACKUP_ID}" fi echo "See the log in $LOG_FILE" fi # ----------------------------- OPTIONS CONT'D ARCHIVE=${DEST_DIR}/${ARCHIVE_NAME} CATALOGUE=${DAR_INFO_DIR}/${ARCHIVE_NAME}-katalog echo "-----------------------" >> "$LOG_FILE" # -m N - soubory pod N [B] nejsou komprimovany # -Z pattern - soub. odpovidajici vzoru nejsou komprimovany # -P subdir - adresare kt. se nezalohuji; relativni w.r.t. -R # -X pattern - exclude files matching pattern; nesmi tam byt file path # -R /home/aja - adresar, ktery zalohujeme # -s 700M - na jak velke kusy se archiv rozseka # -y [level] - proved bzip2 kompresi # -c `date -I`_bkp - vystupni archiv (pribude pripona .dar) # -G - generuj zvlast katalog archivu # -D,--empty-dir - vtvor prazdne adresare pro ty excludovane (s -P) # -M - skip other filesystems (tj. namountovane FS). # -v - verbose output # --beep - pipni kdyz je pozadovana uzivatelova akce # -A basename - vytvor incremental backupwrt archive se zakladem jmena 'basename' # Misto archivu lze pouzit i catalog. # Soubory kt. nelze komprimovat (upper i lower case): # bz2 deb ear gif GIF gpg gz chm jar jpeg jpg obj pdf png rar rnd scm svgz swf # tar tbz2 tgz tif tiff vlt war wings xpi Z zargo zip trezor COMMAND="dar -c $ARCHIVE -R $DIR_TO_ARCHIVE -B $DAR_OPTIONS_FILE $INC_BKP_OPT" echo "Backup started at: `date`" >> "$LOG_FILE" echo "Making backup into $ARCHIVE; command: $COMMAND" >> "$LOG_FILE" echo "Making backup into $ARCHIVE; command: $COMMAND" ### ARCHIVACE ----------------------------------------------------------------- $COMMAND # Perform the archive command itself RESULT=$? # Get its return value ( 0 == ok) ### TEST THE OUTCOME if [ $RESULT -eq 0 ]; then ## Check the archive ........................................................ echo "Backup done at: `date`. Going to test the archive." >> "$LOG_FILE" echo "Backup done at: `date`. Going to test the archive." if dar -t $ARCHIVE # > /dev/null # to ignore stdout in cron uncomment this then MSG="Archive created & successfully tessted."; else MSG="Archive created but the test FAILED"; fi echo "Test of the archive done at: `date`." >> "$LOG_FILE" echo "Test of the archive done at: `date`." else MSG="The backup FAILED (error code $RESULT)" echo "$MSG" >> "$LOG_FILE" echo >> "$LOG_FILE" echo -n "Ended at: " >> "$LOG_FILE" date >> "$LOG_FILE" echo >> "$LOG_FILE" echo "$MSG" exit 1 fi ### KATALOG - import into the manager ............................................ echo "Going to create a catalogue of the archive..." >> "$LOG_FILE" echo "Going to create a catalogue of the archive..." dar -C "$CATALOGUE" -A "$ARCHIVE" dar_manager -v -B "$DAR_MANAGER_DB" -A "$ARCHIVE" echo "The catalogue created in $CATALOGUE and imported into the base $DAR_MANAGER_DB" >> "$LOG_FILE" echo "The catalogue created in $CATALOGUE and imported into the base $DAR_MANAGER_DB" echo "$MSG" >> "$LOG_FILE" echo >> "$LOG_FILE" echo -n "Ended at: " >> "$LOG_FILE" date >> "$LOG_FILE" echo >> "$LOG_FILE" echo "$MSG" ### Incremental backup # -A dar_archive - specifies a former backup as a base for this incremental backup # Ex: dar ... -A a_full_backup # there's no '.dar', only the archive's basename # Note: instead of the origina dar_archive we can use its calatogue ### Extract the catalogue from a backup # Ex: dar -A existing_dar_archive -C create_catalog_file_basename dar-2.6.8/doc/samples/dar_par_test.duc0000755000175000017520000000236013476543220014607 00000000000000#!/bin/sh ### # # this script is to be launched on dar command line when testing an archive with -s option (slicing) # you need to run this script from dar, adding the following argument on command-line # # -E "dar_par_test.duc %p %b %n %e %c" # ### # # if you prefer you can also add the line above in your the $HOME/.darrc file # under the test: conditional statement (see dar man page) # ### # # usage par_script slice.basename slice.number extension # generates a Parchive redundancy file from the slice file # ### if [ "$1" = "" -a "$2" = "" -a "$3" = "" -a "$4" = "" -a "$5" = "" ]; then echo "usage: $0 " echo "$0 tests and if necessary repairs the given slice using Parchive redundancy files" exit 1 fi if [ "$3" = "0" ]; then exit 0 fi PAR=par2 SLICE="$1/$2.$3.$4" if [ ! -r $SLICE ]; then echo "`basename $0`: Cannot find or read the slice $SLICE, skiping, Dar will ask user for it" exit 0; fi echo "$PAR verification slice $SLICE..." if ! $PAR v "$SLICE" ; then echo "trying to repair the slice..." if ! $PAR r "$SLICE" ; then echo "PAR repairation failed. (read-only filesystem ?)" exit 1 fi echo "verifying after reparation..." exec $PAR v "$SLICE" fi dar-2.6.8/doc/samples/darrc_sample0000644000175000017520000000146513476543220014024 00000000000000###### # this is an example of what could be a batch file # (given to -B option), a /etc/darrc and a $HOME/.darrc file # reminds that it is a simple example... # all: # make terminal bell when user action is requested -b create: # a list of file to not try to compress -X "*_all_*.*.dar" -X "*_diff_*.*.dar" -X "*_inc_*.*.dar" -Z "*.mpg" -Z "*.MPG" -Z "*.jpg" -Z "*.JPG" -Z "*.gz" -Z "*.tgz" -Z "*.bz2" -Z "*.tbz" -Z "*.mp3" -Z "*.mpeg" -Z "*.zip" -Z "*.dar" # create empty dir for excluded directories -D -R / # we don't save these directories -P tmp -P var/tmp -P mnt -P proc -P dev/pts # here we say we don't want to save dar files -X "*.*.dar" # we pause before starting a new slices -p # and we use gzip compression -z default: # if no action is given then show the version # in place of the usage help -V dar-2.6.8/doc/samples/dar_backups.sh0000644000175000017520000001054413476543220014255 00000000000000#!/bin/bash # Script Name: dar_backups.sh # Author: Roi Rodriguez Mendez & Mauro Silvosa Rivera (Cluster Digital S.L.) # Fixes by: Jason Lewis - jason at NO dickson SPAM dot st # Description: dar_backups.sh is a script to be runned from cron which # backups data and stores it locally and optionally remote using scp. # It decides between doing a master or an incremental backup based # on the existance or not of a master one for the actual month. # Revision History: # 23.06.2008 - modified to work with latest version of dar which requires -g before each path to backup - Jason Lewis # 24.10.2006 - changed script to do differential backups based on the last diff # 18.10.2006 - added BACKUP_PATHS variable to simplify adding new paths # Jason Lewis jason@NOdicksonSPAM.st # 22.08.2005 - Creation # Base directory where backups are to be stored BASE_BAK_DIR=/backup # base directory for files to backup. all paths for backing up are listed relative to this path ROOT_DIR=/ # Paths to backup # add paths here, in a space seperated list between round brackets. # you can escape out spaces with \ or '' # Paths should be relative to ROOT_DIR #BACKUP_PATH=(my/first/path another\ path/with\ spaces 'yet another/path/with/spaces') BACKUP_PATHS=( home user/lib/cgi-bin var/www/cgi-bin var/lib/cvs var/lib/svn var/lib/accounting mysql_backup usr/local/bin etc ) # Directory where backups for the actual month are stored (path relative to # $BASE_BAK_DIR) MONTHLY_BAK_DIR=`date -I | awk -F "-" '{ print $1"-"$2 }'` # Variable de comprobacion de fecha CURRENT_MONTH=$MONTHLY_BAK_DIR # Name and path for the backup file. SLICE_NAME=${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/backup_`date -I` # Max backup file size SLICE_SIZE=200M # Remote backup settings REMOTE_BAK="false" REMOTE_HOST="example.com" REMOTE_USR="bakusr" REMOTE_BASE_DIR="/var/BACKUP/example.com/data" REMOTE_MONTHLY_DIR=$MONTHLY_BAK_DIR REMOTE_DIR=${REMOTE_BASE_DIR}/${REMOTE_MONTHLY_DIR} ######################################################## # you shouldn't need to edit anything below this line # # STR='a,b,c'; paths=(${STR//,/ }); TEST=`echo ${paths[@]//#/-g }`;echo $TEST # args=(); for x in "${paths[@]}"; do args+=(-g "$x"); done; program "${args[@]}" #BACKUP_PATHS_STRING=`echo ${BACKUP_PATHS[@]//#/-g }` args=() for x in "${BACKUP_PATHS[@]}"; do args+=(-g "$x"); done; BACKUP_PATHS_STRING="${args[@]}" echo backup path string is "$BACKUP_PATHS_STRING" ## FUNCTIONS' DEFINITION # Function which creates a master backup. It gets "true" as a parameter # if the monthly directory has to be created. function master_bak () { if [ "$1" == "true" ] then mkdir -p ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} fi /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R $ROOT_DIR \ $BACKUP_PATHS_STRING -c ${SLICE_NAME}_master #> /dev/null if [ "$REMOTE_BAK" == "true" ] then /usr/bin/ssh ${REMOTE_USR}@${REMOTE_HOST} "if [ ! -d ${REMOTE_DIR} ]; then mkdir -p $REMOTE_DIR; fi" for i in `ls ${SLICE_NAME}_master*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } # Makes the incremental backups function diff_bak () { MASTER=$1 /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R $ROOT_DIR \ $BACKUP_PATHS_STRING -c ${SLICE_NAME}_diff \ -A $MASTER #> /dev/null if [ "$REMOTE_BAK" == "true" ] then for i in `ls ${SLICE_NAME}_diff*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } ## MAIN FLUX # Set appropriate umask value umask 027 # Check for existing monthly backups directory if [ ! -d ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} ] then # If not, tell master_bak() to mkdir it master_bak "true" else # Else: # MASTER not void if a master backup exists # original line to get the master backup does not take into account the diffs # MASTER=`ls ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/*_master*.dar | tail -n 1 | awk -F "." '{ print $1 }'` # new master line gets the latest dar backup and uses that to make the diff MASTER=`ls -t ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/*.dar | head -n 1 | awk -F "." '{ print $1 }'` # Check if a master backup already exists. if [ "${MASTER}" != "" ] then # If it exists, it's needed to make a differential one diff_bak $MASTER else # Else, do the master backup master_bak "false" fi fi dar-2.6.8/doc/samples/JH_darrc0000644000175000017520000000271713476543220013045 00000000000000 ######################### # # # Shared options pro # # DAR archiver # # # ######################### # Jakub Holy, 25.4.2005 # This file: /etc/darrc # Note: If the user has her $HOME/.darrc, this file is ignored => it must be included with the option -B /etc/darrc to dar ### ### Files that shall not be compressed (because they're already) ### # archives (Note: .tar is archive, but not compressed => do compress it). -Z "*.bz2" -Z "*.deb" -Z "*.gz" -Z "*.Z" -Z "*.zip" -Z "*.rar" -Z "*.tbz2" -Z "*.tgz" -Z "*.jar" -Z "*.ear" -Z "*.war" -Z "*.BZ2" -Z "*.DEB" -Z "*.GZ" -Z "*.Z" -Z "*.ZIP" -Z "*.RAR" -Z "*.TBZ2" -Z "*.TGZ" -Z "*.JAR" -Z "*.EAR" -Z "*.WAR" # media - images -Z "*.gif" -Z "*.jpeg" -Z "*.jpg" -Z "*.png" -Z "*.GIF" -Z "*.PNG" -Z "*.JPEG" -Z "*.JPG" # media - audio -Z "*.ogg" -Z "*.mp3" -Z "*.OGG" -Z "*.MP3" # media - video -Z "*.avi" -Z "*.mov" -Z "*.mp4" -Z "*.mpg" -Z "*.AVI" -Z "*.MOV" -Z "*.MP4" -Z "*.MPG" # documents - compressed formats -Z "*.pdf" -Z "*.swf" -Z "*.sxw" -Z "*.PDF" -Z "*.SWF" -Z "*.SXW" # strange formats, binaries and other hard to compress (empirical) -Z "*.gpg" -Z "*.rnd" -Z "*.scm" -Z "*.svgz" -Z "*.vlt" -Z "*.zargo" -Z "*.wings" -Z "*.xpi" -Z "*.chm" -Z "*.GPG" -Z "*.RND" -Z "*.SCM" -Z "*.SVGZ" -Z "*.VLT" -Z "*.ZARGO" -Z "*.WINGS" -Z "*.XPI" -Z "*.CHM" -Z "*.obj" -Z "*.tif" -Z "*.tiff" -Z "*.OBJ" -Z "*.TIF" -Z "*.TIFF" ### ### Ignored files ### -X "*~" -X "*.o" # *~ are backups, *.o are compiled unlinked files dar-2.6.8/doc/samples/JH-readme.txt0000644000175000017520000000056113476543220013736 00000000000000The script makes a backup of user's $HOME, either full of incremental, not compressing compressed files & compressed media and skipping some unimportant directories such as ~/Trash. The darrc is to be in /etc/darrc, dar_archiver.options is used by the script. The script has a part 'OPTIONS TO MODIFY' that shall be altered for customization. Regards, Jakub Holy dar-2.6.8/doc/samples/cluster_digital_backups.sh0000644000175000017520000000577413476543220016676 00000000000000#!/bin/bash # Script Name: dar_backups.sh # Author: Roi Rodriguez Mendez & Mauro Silvosa Rivera (Cluster Digital S.L.) # Description: dar_backups.sh is a script to be runned from cron which # backups data and stores it locally and optionally remote using scp. # It decides between doing a master or an incremental backup based # on the existance or not of a master one for the actual month. The # remote copy feature needs a ssh authentication method which # doesn't prompt for a password, in order to make it non-interactive # (useful for cron, if you plan to run it by hand, this is not # necessary). # Version: 1.0 # Revision History: # 22.08.2005 - Creation # Base directory where backups are stored BASE_BAK_DIR=/var/BACKUP/data # Directory where backups for the actual month are stored (path relative to # $BASE_BAK_DIR) MONTHLY_BAK_DIR=`date -I | awk -F "-" '{ print $1"-"$2 }'` # Variable de comprobacion de fecha CURRENT_MONTH=$MONTHLY_BAK_DIR # Name and path for the backup file. SLICE_NAME=${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/backup_`date -I` # Max backup file size SLICE_SIZE=100M # Remote backup settings REMOTE_BAK="true" REMOTE_HOST="example.com" REMOTE_USR="bakusr" REMOTE_BASE_DIR="/var/BACKUP/example.com/data" REMOTE_MONTHLY_DIR=$MONTHLY_BAK_DIR REMOTE_DIR=${REMOTE_BASE_DIR}/${REMOTE_MONTHLY_DIR} ## FUNCTIONS' DEFINITION # Function which creates a master backup. It gets "true" as a parameter # if the monthly directory has to be created. function master_bak () { if [ "$1" == "true" ] then mkdir -p ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} fi /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R / \ -g ./DATA -g ./home -g ./root -c ${SLICE_NAME}_master > /dev/null if [ "$REMOTE_BAK" == "true" ] then /usr/bin/ssh ${REMOTE_USR}@${REMOTE_HOST} "if [ ! -d ${REMOTE_DIR} ]; then mkdir -p $REMOTE_DIR; fi" for i in `ls ${SLICE_NAME}_master*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } # Makes the incremental backups function diff_bak () { MASTER=$1 /usr/bin/dar -m 256 -s $SLICE_SIZE -y -R / \ -g ./DATA -g ./home -g ./root -c ${SLICE_NAME}_diff \ -A $MASTER > /dev/null if [ "$REMOTE_BAK" == "true" ] then for i in `ls ${SLICE_NAME}_diff*.dar` do /usr/bin/scp -C -p $i ${REMOTE_USR}@${REMOTE_HOST}:${REMOTE_DIR}/`basename $i` > /dev/null done fi } ## MAIN FLUX # Set appropriate umask value umask 027 # Check for existing monthly backups directory if [ ! -d ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR} ] then # If not, tell master_bak() to mkdir it master_bak "true" else # Else: # MASTER not void if a master backup exists MASTER=`ls ${BASE_BAK_DIR}/${MONTHLY_BAK_DIR}/*_master*.dar | tail -n 1 | awk -F "." '{ print $1 }'` # Check if a master backup already exists. if [ "${MASTER}" != "" ] then # If it exists, it's needed to make a differential one diff_bak $MASTER else # Else, do the master backup master_bak "false" fi fi dar-2.6.8/doc/samples/dar_backup0000644000175000017520000001064613476543220013464 00000000000000#!/usr/bin/perl -w use strict; use diagnostics; # Device that is the DVD drive my $DVD=("/dev/hdc"); # Size of each slice - DVD max is 4482M # MC - for testing # my $SLICE_SIZE=("10M"); # my $SLICE_SIZE=("4400M"); # doesn't work # BUG - Linux isofs limited to single files of 2^32=4096MB # my $SLICE_SIZE=("4000M"); # value used by Daromizer is bigger than mine, use it # my $SLICE_SIZE=("4189500K"); # need more space for parity data my $SLICE_SIZE=("4000M"); # directory that all paths must be relative to # NOTE - all backup paths are relative to this my $ROOT_DIR=("/mnt/backup"); # where all created files will be stored my $STORAGEDIR=("/mnt/backup/backups/"); # list of dirs to be backed up # NOTE 1 - these are paths relative for $ROOT_DIR, above # NOTE 2 - this is used for naming; everything after the last / is used # for the base name. DO NOT have two things be the same (like /usr/bin and # /usr/local bin). Otherwise, one will be overwritten # MC for testing # my @BACKUPDIRS=("test"); my @BACKUPDIRS=("local","home","pub"); # this the path to the slice as expressed in things that dar will # substitute the right values for (it's just used in 2 places) my $SLICE_PATH=("%p/%b.%N.%e"); my $SLICE_NAME=("%b.%N"); my $PARITY_PATH=("%p/%b.%N.par2"); # par2 creates a bunch of "vol" files, we need those too my $PARITY_FILES=("%p/%b.%N.*.par2"); # list of stuff to be compressed. This must be in the form of # -Z \"*.mask\" # with -Z repeated for each one my $NO_COMPRESS_LIST=("-Z \"*.gz\" -Z \"*.GZ\" -Z \"*.bz2\" -Z \"*.BZ2\" -Z \"*.zip\" -Z \"*.ZIP\" -Z \"*.ogg\" -Z \"*.OGG\" -Z \"*.mp3\" -Z \"*.MP3\" -Z \"*.mpg\" -Z \"*.MPG\" -Z \"*.mpeg\" -Z \"*.MPEG\" -Z \"*.wmv\" -Z \"*.WMV\" -Z \"*.avi\" -Z \"*.AVI\" -Z \"*.jpg\" -Z \"*.JPG\" -Z \"*.jpeg\" -Z \"*.JPEG\" -Z \"*.png\" -Z \"*.PNG\" -Z \"*.gif\" -Z \"*.GIF\""); my $PRE_PARITY_MESSAGE=("echo ; echo Caclulating parity information; echo"); my $PARITY_COMMAND=("par2create -r10 $PARITY_PATH $SLICE_PATH"); my $PRE_BLANK_MESSAGE=("echo ; echo Done archive, erasing DVD; echo"); my $BLANK_COMMAND=("dvd+rw-format -force /dev/hdc"); my $PRE_REC_MESSAGE=("echo ; echo Done erasing, burning to DVD; echo"); # Command to record the DVD, with options # -dvd-compat = make the most compatible DVD by closing the session # -Z = create a new session # -r = generate sane rock ridge extensions # -J = generate Joliet extensions # -V = volume ID # %b = dar will substitute the base name # %N = dar will substitute the number of the slice # %p = dar will substitute slice path # FOR TESTING = -dry-run my $RECORD_COMMAND=("growisofs -dvd-compat -Z $DVD -r -J -V $SLICE_NAME $SLICE_PATH $PARITY_PATH $PARITY_FILES"); my $EJECT_COMMAND=("eject $DVD"); my $POST_REC_MESSAGE=("echo ; echo Done burning $SLICE_NAME ; echo"); # deletes files once done with them # note - use AFTER record command # MC - for testing # my $DELETE_COMMAND=("echo deleting $SLICE_PATH $PARITY_PATH $PARITY_FILES"); my $DELETE_COMMAND=("rm -f $SLICE_PATH $PARITY_PATH $PARITY_FILES"); # dar with basic options # -y = compress with bzip2 using default compression of 6 # -s = slice it up # -R = root dir that all things to be backed up live in # -D = store empty directories too # -p = pause and wait for user to change DVD before continuing # -c (used below) = create an archive called whatever # FOR TESTING = -e my $DAR=("dar -y -s $SLICE_SIZE -R $ROOT_DIR -D $NO_COMPRESS_LIST -p -E \"$PRE_PARITY_MESSAGE ; $PARITY_COMMAND ; $PRE_BLANK_MESSAGE ; $BLANK_COMMAND ; $PRE_REC_MESSAGE ; $RECORD_COMMAND ; $EJECT_COMMAND ; $DELETE_COMMAND ; $POST_REC_MESSAGE\""); &main; sub main{ my $backup_base; my $backupdir; my ($day, $month, $year) = (localtime)[3,4,5]; $year+=1900; # compensate for 1900 based year $month+=1; # compensate for base 0 months my $targetbase; my $pause; # garbage input... foreach $backupdir (@BACKUPDIRS){ # this gets rid of paths and such from $backupdir, just in case $backup_base=$backupdir; $backup_base =~ s/^\///; # remove leading / $backup_base =~ s/\w+\///g; # remove everything matching "someword/" $targetbase=$STORAGEDIR.$backup_base."_".$month."_".$day."_".$year; print("Working on $backup_base\n"); # MC for debugging # print("Command is: $DAR $backupdir -c $targetbase"); system("$DAR $backup_base -c $targetbase"); print "Work on $backup_base complete. Change the DVD and\n"; print "press any key to continue..."; $pause = ; #Like a PAUSE statement in DOS .bat files } } dar-2.6.8/doc/samples/Patrick_Nagel_Note.txt0000644000175000017520000000237613476543220015700 00000000000000Follows a copy from Patrick Nagel site at http://www.patrick-nagel.net/scripts/ftpbackup ----------------------------------------------- I wrote ftpbackup.sh to conveniently backup my root server. My root server provider offers a 40 GB FTP storage, where I can store backup archives. To put them on there by hand was a bit of a hassle, so I wrote this little script. It calls my backup scripts (namely backup-root.sh and backup-storage.sh) which both create a .dar file that contains the whole backup. This .dar file is then being sent to the provider's backup FTP server. After everything is done, a mail is sent to root which informs about successful completion or failure, and the used/free space on the FTP as well as on the local backup partition. Configuration is done in the script, everything is explained there. The two scripts backup-root.sh and backup-storage.sh are two examples how to create the backups. I'm using these scripts for quite some time, and also did two full recoveries without any problems. backup-root.sh includes backup-root.options and backup-storage.sh includes backup-storage.options through dar's "-B" option. All options in those .options files are documented, so it should be easy for anybody to understand what the script does, and how. dar-2.6.8/doc/samples/MyBackup.sh.tar.gz0000644000175000017520000047340513476543220014727 00000000000000‹¦°/Jìî®¬Ö ì³[oÜQãÕ‡“³ƒóO|»O÷?Þ×k˜ÛÃjs¼[·°\°À€€•€õX£zIrìTÂ!x÷uðÚa˜Ö H½¯ÒRé¾Áúu/J†vat%ïê`YиCÒïWq8T®DdyBk¢ñùsÆ•,‚ëà‡ƒ«QâgèîF$¾7þøô­iJ¬R»ÿÎ ‚ WÜl¤Ž»&ïÜwퟟ¥®q'aÜÿ ’ÖÊîªU¤¶#…ä{ðé“‚ »»`­Á?>‡l B¦í1ÊfÑeTé1<¿v_S¾J€õªÊ •ûZí?mVÿ5׫èýoŽã3þ¿Óy²Éþs§ó¤Õy‚þ¿ÕÞÞþÓÿÿ×¼òƒ"Ð1¢·B·O §¬Z 'K1>€Íf«¶T[‚‹÷!¿Šc³Ÿò8|C.öÂ1¶…}°=„ 6,öðûa”·©{]†?åQ&\Èì^ vèBÛŽÀÛDèÎ ng`ã %ØCÄvbgQ’`‡q6†À'GïìÌV¯ö€ad>:‰ÍRÕ‹k1&$/»/\óbÛOR æÈÕìD¡ùaªÈ]?C i="t‚(E`~)rÑð$Kr ,Ř(ÄÅýp®Ó|¸ÇöP¬ÃÅ8Æ¿Çb$OF! aàÇ€²9ÉÐOI$%àר^—£ÅS˜^aÔ·_}ë°…žßÏ‘ üx9ùP„?2¶$ê'öp¦‚°è^Dô ÙÙO¢<~¹Îj%nmÂ\ë’mñŸB}P%x)Ò!* }¬ÕAq¥ ð7ÝohÁŽc挼·Ã1RY«QÏ®YUçVs.SÔÌ7¹ïŠfìz&a&žu'gO«–’K÷)…¨¤nQë ò©¹"˜.:=‰»73Ê.0NŠce{ûIëéÓÎö‚(ŸHâ{ºÕEqº¡@¯¤;wsksÝð“í' "ÞÖsMYºMÇë&ÔkM§½ó´óìY«õýO»ÌÿS®ÿMGIã·Âñ™õŸ2Àbýï´:¸þ·w¶Ú®ÿ¿ÇÕÞêlmn>ÛÙzò§]ý¼ŽBÔÿ øMq†_> ÏN;ÄÄ(òÄûóý·8fïÕÑ»£‹Ö¿>º8>{,+d¥+2ÛR=÷(Þé \™z%Âþ Rgc?/A†b¦’R+Ì|Nõ¦0ÊÖAVܲè¡ly|)ßu8 æ:l?ƒ ÁYÈi€)XpžˆÍÍÖ:¼ŠÒŒº¾ßƒV}‘ÕÞlí¬Ãå9©ük ÜCŠsA¹ÎZœô©Ð·kbRaR—Ó<‰1ìBmÉPR¦¬|cæxm÷)ù{î;לýº~‚ò鉓±¡éŠª%ÒÿBGå{>¥µˆâ2űŒ€”ÓÓÊ*xyèpˆÝA4" 'y(ùf ”¢âÃÛX8YJ(<Ñb:›a¾Ãö”‡D ¨Bk'ÖD/Ùìÿ›ÖÉ“»µs².Â4f§8ôCLõSDšô¬Ù‡·Õ,eòqF¢ö8²2û¨ȧÃðÆO¢Ò9¢ò­$@[s™Ý˜”“ "¿ò^f9R'BÅ‘uƧ¢p£Å8$™ŠòNÙø(­efÞñv¥A·V€æõÙRë¿Ök5Cíb¶aÌC³²ƒI›Ùiz%È3]eÑ-)~˜ ÀeØ `#Í¥·LxFäEKS ÒJ½†Ær7å.»Ö=|‹ˆÑ²'Pš]ЮÑo‡ÖŒbw´v°DŠÖð3^P˜Jw+¢ä€ zS{D)¬`ÇU½å£¢ÑѤϵ󔼒 gÖqÝœ 8Y‘ÀÌh’–ä3žÚ¢ŒìqŠ! À‰Z!ÖÁ„T9ùæ‚ÂçÉ‘è ózãîõåñþñÞûÃOЂïë`R)×k â y&†‘¥ha»¼®$òÅô«ƒZžVJ½ÿgá«Fa1éÞn½qúá`Ÿê5ÑâóŠ6ùzCuªw½:/¿k¯b'Tž:9ü:« é |÷]©â¤¹õé½=ä(‘h5á“ø‹ }!Õa_NQ…a1Ä3Þ(^=ä"¶”ŽÒÁá‡ïNNß_t!½öã˜–Ô P€à—æÉ[ê3±SyÎ@Σš{C‚*e fÚø X¸æµfΗԒaAÉH35'^à@hCi¸â[:1StY^†µû‡ýŠ' QŠ@íl–ŒX@@tKŸSoÌÀXGo÷àEE½ê&E1Áô“¿!"¤b¢Û´ì)‡'ñjáäï¿/WfQ¸ZþCšÒðhñ4LÜ|§:!bûÐEÔãs~VzÎE½¬ò›…›åÒÆ)±™Q–VÖ†´KåæÂ#ëb†j©BJŠŽ–«ñö/ÅʘœùaJ3U~ãiD±ZуRèãÉfåãµ+øÄ¤^q%äݾ—/ÊA/›Í&üˆ1Æö A¦EK¦§ƒgzGé¹+=ô½×r&ê4Â*†°Âw´®¸°¢V‹ñ*­PÊ!3or«zòWŒÏù¶ ¼s­7 “Û‰¥(ÙÇpYŒÆ¨ç‰/R•Õ9•?¹‡ ³ßQ4T0!Á£`Ú1ÖqZÿI„ùJ(¤”0…õƒtáÕ‰ù!#(–ÿ!If7Š#Ä.ÙsIN©Pd»j¸ØP»²+wÉÜŠÃ¥{°úû3~åFò4{—Æ]{¹»v/[hQÀ†¥µî}M •¨!…oº­n›|udªu{»lO¾—Õ佊‚˜ô]hȉV õáónõFœôQ[ äxC‰^ÊžReYÒ!{b&C6ëðò»(pÊ”Ûz~Š’%8’FÀ «‘…9r<¨l@Ñý_ä >½ØÐ¼’§^q²O9æoÁê)^T#?èΈ³ê°bÛ…odIÿ}ùŒzýÀ ˜4®E++Z+äÏ÷¸X¬Ê·:Âe:‘}xâÅG" oƼ¯?:‚‚y· ûW$ÞA¼ûñE€úKDP þJįñ’úyÖá‡Ðw w¡È¦[³²Ns3¦sãW—op})»5ÚøØá®òs†2¹Qð¿Â)•i ¶Ü—(¾®\`f¦W¤F‹é[Ql­Ô°0@Çè09”ë *Nå|l™2“¥­‹ow_V=âr-þKa8¾Rñ»6/cªò·Ë5?Y 9ºº%+ý³ªJ²jÑ>Ÿ!ðû:»Á“¤­]/-K­¼ÏV¶É(™ARŒ‚¸]þ†üjèZ-œ[¥&JIB£œLEÕK 6«»Òç²3f´Ø¯Ýy?C{³ÅÛôwkSëÊôŽyh„ ‹h7ÏÞ¼½<5e‹ ˆ ‚7ÕçèøbºO{ºÏß/¦;ibÊN‡gï':I:­Û“\Vµ-9DOt|€¶Kƒ¨¯Å3§`Xp\îç]l6V¹ 4£ò‚­7ê,j‚¸.ùG€ióXï…®«ÄYøö׌çy µ?ªúÆVR>ϯjw¹s­î{•î5%‡ŠÎU”G-ØÅvB;ÍN4¤Ãû,’Å|m‰æËŠõv‚n–|®à £Üm+vTg2Ç{» ó¡iÈk öÎ÷Ž -Z»yÝ´!#ž…!·Iwë¦49kWxvdª0Gèk‹‘$´ÜCo<ÁfJêËÿpÇëÓ>õn}CdÙhÄ×ä½7PUÝÈQ/ó4ÙHh½ØVíBœÚ5Mâ „¼š„o6ø3NÝa öŸ3R2ç”Ô`R æ8<’öZ×¢(B&z¸\ó×§N÷ ë‚nÜýá¦K;KÐyÉ>†yÐ\‹"KÄç Ø;K7®7ÚNN/öÎÞp>È^eÜÎó‰uvŸ'¼u–SæÑÒ˜ Æ„÷ò ZÕ>>;{¦«LSv^^^[ÙX½ß¨O¤aè´è,Y†¡§r²Å¦;®Ï>G½i@ç2 àÄæJ½¡$_×ÛòñJÜp'â â„Ö“bQR ÙÁ¯gÑÁ•EJÿ5‹Ôã<Ö2Í"WEáWÈ·õ–àuÃÒû¶*Ý‚_ÏçàÊgå;4ŸÕã<>žfšÏñ¯gQüGe‘òžšEêq‹ _;Í¢”²M Šs~ uµ>Go~=ƒoþ¨ V«f°zœÇàbÍšfðšfp9õ#5_y˜'Œ7f]×C Y9Áí‚r/ùy#’[„xl‰ÇH‚¶B¸LŒ<:>°dX im:üÕÌšAà$ÞŒÞÍÆš©÷8i£‹ÎurÐ)OÚÍ †jÂð0øVšG/e‘IÄþ¯x’ʧ²@ÂóÎó[UX{,sìl×äðΤ-ïaäêºç¶hmxoª-{l£H‡Ûé™ÚcÝ1Æ'ׯ¨M>Ó-Å=©zR|^“VȾǶX÷ˆ©Æîê1¥ÐÓH³l¬ðÖ¬U‹)ÕºìTašyq¨¢Øò£mû®w‹¿4AyÇÇ#ø.æ"—o‚”˜:¾‹e’#‘ º™Úò¯žV˜¦®;Ò7>ØÌùë´ÈôŽ=jVÓ.i|²ÎX71à˜Ž‚¿¤Ç×é :oÓÛO°xðËö…i)•õB×NÜ:í l 3T¹×Ú8=oó-! _óp"’õÈ5\- Ñ+äXÉ.Cò9JbÑÚÅÅš¥¼múÒÇ/þPåøÄ,«]Øs^?íxªe±)æ_†ÌcœŽ+$ƒ¸pS2¯57âT×0G¶>Ö£÷– ye¥’[¸T/ù³ @fèðéŒÏG¿Hï.&õn%×t[Ò¨Š|Fë*i˜¬†«O*'òûãÊÖÔ/Ì¿jF>´Ók ÿ»‚Q•)k ªs@ËT«™ëÄm/¢Óíü ”vΨCÇÑ,•|i ³}´Šé:‡<Â9yVScA³Õ·Uëìä¼<#-+"Øúk&‚®(š‚]Œä1ÜŸËô¼GP=ª?]²™* È@d"ï6”eš8÷wï7?M]·á|þ3õXaöëâª+{Œ­9òŽëz}y@\냻jÅ õ¹’>½>iÒ ©baVÓ>fæ‘щҗy<³YX<ƒ÷èaÏ… †Öcyj‘³U³KS•bÓBÅ©©3È}ù5—²*æíÙNåÐ*ñçFÙý/{_E±­j‚ Š‚xšIÂ$!3ÙI$aß·8ÉL’ÉL˜%!@pA6Q‘UD@A@Ùd_TDÜпΩªîêžžd@î}÷ýÏÜ+Iw×^§ªNå;TuNÉ1Î3ÎKÐÚRåy ‡E­ÐijÂK)$Oȉäs>ÇC…\ÄÀDÈ;ðïkbtžrà&ÌÈ‘äGO&ñÇkEж•…&SJ{ ¬ÅªáÈ7`\–\TÇú ÆÉ‰‘@Y ø²Ðý7ÇIA]ÐUn•9c"è%-Œž¾èŠ9ÚËÍÏdklfVEŸ•Œ17ãEû¨HØ~<°ÓãÐE'T¬¶%´©Á¨zÖÚN§+Ö7.ò7XxÙœò£.Ä8€¾ ÑQvñ©æ9ðhÙv«ª‘—ïàiüÜkøàË6ø—¸s^%øhà.ÆÀà‘SXöË«z EX<¾;Ñ[5È)ÂŽ)]¶ÀLìgyýwªtcå~ÃØHÆ=’Ñ¥³¶Ì6›j7LñÆ’¼ Ç€ŒX[¹åJs¶ž¼ãµëÍ…O¾×”‹ȬUX¹Fÿ›‹¬ ŽC{í1È9*~˜‚ÆïŠˆ:T_ñ`ð[}ºa„ÍÈ—€f›ËÖA²ýÏM¦£%÷¸êóf©¤€Y½{÷è­“/AÌ'Ó8rzð¤±g÷c¿‚ö à%òb¥n;xzBR “â½%5›Í™Ÿ†=ªÜ ÜBÐ8îÄdê]Á1 \´@•HG¬Šf€¿Å€# ÔÓ †Éë­ÐºÉ‘»»r åãy ÂhêŠÝF0lö”ÖƒÙ:ÙîŽ^‘X<ôË(X½è'²;y= ›Ëê—Ú° ÛJm ¯¿-I©~ìwïVø9dY†..¸9á•_‘:×m+%·"ÎßxÝÀ*!ÃY¼$EÒ©ÌÒN%BY ò؆†à ³ÂgcÇG*be„“ªç?4—α i&D$(@lV ua¶x:Áo¬äâof7$ â¼ä|Ò–‘ª3?„S©±#í ðß„2bþŽÇ §Ã ˆÖǘyíJ¦Ê®+r½¤Y‚³ž¢죄Ë/ùCLR•³ÚÝm+É£•\ÔLÅðCjg |€ÇR© É#?ö‡ʤ6e7}ÇvG®¾E’͈U"ªH²þ9¶¢b›£4Šee'(M&É6·- ·UÒHêrŽ^ô*Ø;ÅÁÑ_Wb–2™àJVä H±f.X§–Žj;ó3…\àÎÉcU¡Ùµá€Ø¤«l‘UBoÅÞ“‚¿1&(æ4†Rt*#‚;#KâzÅæ=˜¾pó±`Üp•=)ZÁÁ}‚lɵšÞþÊX)màÂ0í¨• í•áaâoŒßU´œ-¡`ïôbº1'²]]ØÂ¨;Ü¿IÊÁ ÒYµ‹ÉÌñ+màf'hÁÖ)´j~3Ôcó‚’.f£ùƒÓå#§¹7ÔS ·q“twpõ!=¥($ä™tì¾-^r¶9©Ã#ÊbJmýVR7 ,:=þm ¥bq0ÏRËw.²\Ì —PîÊO<`MWi©ÀÁ ,°' ¹™©ƒ1°(p, Æ€3*”O€À™gµ¸óF¸ò=éq¡ÔT(]Äf5†Ò?=|l\XXtl%ÊN8y£Yxy¬ F¦8T÷¾• ÂŒQq™Ohe”Œh3ªý’G¾ ¥¨ß—xò…Ú‡j?%%’OÔ*4T”þ«& T0d“ÅB0i¸äeg·éÌdG»x;‚Ùrs‰µHÜêBâ¶ׇ“Ä.‚TOˆª.øÍµ¹…¥ÊW[GlLîå†\‹Fc„¢Œ²ï6YÈ(¬¨ÞV.¼ˆA¨à¹ˆÂ;æïê/~fÞ²Wr­#dO}<¡dÒ@Zõ¬E«Õ¥UAÏ’g=ÈbÐJ‰m$îÝ-)?]z{…KÑÍŒVÑ6eÝeȧÄPÊý"ÏjòI‰Ø¹üBqü„y ÏaÑ`ËÛ‚²°8A!OL~E†¥›\1¾`ØÜNŠ×-æ'ë4`~n“ë—1` Ãõ#AÆÈ+Ci“1¾©XÀV†2`·¥"=’SrOܧÔ7Hr6ºü0¾©J©}l2$zX%á,¼Rr@Š?˜‚ ¡¬v("{!Ø[C™´R0&É“ Ás—Ág!OÐFR‘Z¨ÝÚI3P’›#ýÃVž'?¸8Ñâ“ ýÄgôÝ…?€˜ì¸ÙŒm& »rq©8L7ý&…Û¥aàlP®Q<™|“âKJ¾Jù5[] øBÄ«,Rž{TmŠM&çá#Q]>yióœÊ°U—UØ%xfq”«ËÞ=2i>îI]U†h¿{ipÔ Nþà¯Zà*r¢^ŒOx”±?hRÔ‡"6’cAУ/–[Þ!A}Š;¶TòŒ¦µQ€ö€ü%tŠfˆ!~Da‚4Ñí¥Ó%7•J,tŒ•;Çîï×ׂ€UY—LQ×¢B%@5µ ¤x-ꈽºš˜¯E•@„ë¢^5›>é`Øà”‹¸5*v(07”Cx>£ö @½cÍC^ hc ø”­Ãòª¾ãª±D_]HìÉUãÑ„SÓ«‚¢áC6V'C¥” .ë€2«ÁÖ^3°Øœ´ù` *k"àÝ,î"›`0UÅÁÊAJ÷Ô‰SÚmó´Ðñ©GÜÕ†r¥VƒÊÒ HWk /(•ù9µ ,N›|b©P(Q–È5ƒ5¶šÑùû-à"(e,ÁÔ¯ÊV”¸¬ZÆçÚµCD› ÐÆ(2œl†&‡º `щ¼2“47Iaß_€2&0Ì¢ QK>(Á®…³\Q 5iõ3ª )ªö“Pˆ1”î)zŽ,š=E= 0ÑhË-"U3©ªÝ#ÉEVŠøã’”ãÅÓâà˜ËJ³áï5FîÉŽ ªF~@ fVºž‡]0]uÁœU¥Â2O Д( õ€N‰Ùn/g‘ }Ê@©DTƒ)lä’†ë€mÁR¡¤—Èd“ÍƒŽ‡j3£Áè0So* óH£yÌÿùH8 ©#|ÎT½P8Íà«ÅW¤ý˜a…ñ±äSPÀö%­—D9F{Àk¹]™`äèÄb•V…¶|mè\lf|•’ªâ–Ð*sÛJÈgª†”—|B^˜MâÙ¡ù?<òbœ`$íB‚Ø24:Ô–l¬¾~úJ@ 8_*$Xq2W*WêqÁÁÂj’£ˆæ+¥Šg7Ù-°$¨,¡e‰Å­í«P;,zBìÞÆ=ì`D]„ˆ¨ “¡ ¸ÕüÙÛY XܾŠ\ÈÒ£í5 ‚Ïãca ,¥èNh§~õdi9*)³&î¸1,p!Ct ÇCZR®{Ôõ¶ ‡€2’1ê»"–KOtE3N/L½m ¡Ò¹SÒóšÛ±MI¾’3µ?„)üE›¤Ø:äÐ(_¾ µÉî;T"ãd)¥twÁ ä•¢J ™ØØ¹YàðYmB9ä3±6(!±Z¸fH‘…ž<*Fhô=­ ¦Ñ¥í¤A¤x“ øa}X0úY#TRB:ì°ç“ÝÆÎcƒÈ€ÀÌøì+kºƒdAAÏ쌇B~¿Ùið®ÌvÀŸÅQÚ 3€dK7¹(W- €Á³Á¼ëàpYÈá±cäJ‹ ¦áp‘Û;ùg$O ðÞ.ªü¢”¤2´¤%¹#˜™Ef-uôº-:ÒÓ‘ ]àØ3Žƒvönt–¦%róÃp[!¦KgWy»f4d¤ ‹¡Ì ÷:%=ÒÂ4¬÷3òÙ….d? í¹°SdÍ’dyÐQ›ã†î ¹*’ /œïÌ¢7Z:g·0z 9°­¶‡…פˆ?²¼€æÀÈKs5`úœÂ@Ÿ=¸K*'éÙÛª(IÅèPHC~’¦Šr):œ2Ï\l+Ωh©3²á†^s;ÉDøFBíH^rL"Žþ†õb8P+h—á0à´F®<”wÙ0äá2XVàSFÈ°Èæi‡éá®.S—¢h‚?Ò»ºÚU*»ÈÃ;ؼ–(q"è"Þ€®#±IâbôñSÉÛX!”ÁÐ £IŠJÃ8F–Š|Pùy-ôÀ«ÇÝ_l"™ `Wlü3*`Ïóþ˜÷?˜×anaP¬F]ÒØLwÀ(EŒ'eq{IèõZÄãŽmò‘ÍÃWÚÖÔF]Z[3'XØ¡d›'Zº„Xm B¼`í}8‡ƒê$OE‰|£c‡_³9é‚u?¡måë+Dá¶2×´BùFH¸EUã WäCÅäÿ„ᆃ]¬“*ûv\âÿU‘ÿ †üwÿúß ™Ù%ù½—ï§BAÀ­æùJÌP ¯“ íÞle@D:_É}òsÖD'…Õ…¾‡Tæø;œÛú_Aä&dg-§B ÏÓɬâ…¼>¼ Ã7á%e5/Qï§y§—¹Ìæ¶VhK¼¶A¨­n—2ЈÍåFtY>qà¿&Fµ?e)ÚY9ˆ¼À)UQ‹j³IÁ!øe õ–¡6Æn­öcr@ÒBÚÅv@ bðÇ«²2 !5B%¼y!™¨Ã„ì±k@Œt #§†‡ÐHó)dŽÈV!Âuu Qà„Æ²L¬Úa•þ^©!DÄàŸÚÀAŒ°Rf8.“'©…’¢Ï ¢q¼…žƒÕ f¯ÃD[Î3¨ðƒüB!©®cŒQ…y[iŒDmÖ˜À#wƒººœZVÊÈ+pË~¨a/jhª¬Ù!y8 ŸWB6Ä•kM7°1Æ‘£Â0ŠŠç° †Uk¨Påÿ•|¦…I<2&r'ÍñfBs†h£ 7*°²Üìv©—a´±»‹ÞÛq P¯i&Ô î['xÏI4R›ÊÑ™Ûù¢Ö càG—3.4K=„+,ô¡œp„FhŒ Žc€ÆƒôVË7—0ͪ!ÕÅgH8˜è+ºÉ.Œ“ Ntà Ê•gšO"Ìà“OÃDñó Poü¬uœ,ôR( çZ‹+Xx¼­xæÕ6Õù¬nµIÞ´høêÌKñJ(üN’à>3:S%Ã땬¢Ž¡vº²6„Üñb„À]w—мz—Ø1¬»—GÍ“U2€Q¦‰”d b¾((ÚfÆ:rÈ¡Èpµ7(aïâa&²T󀋺f3 j9ê4\h ¨æÇ¦1eh@ss м àd¹êcwóé 6z·ÈÈ)3J}ØdØsjjå¶â­ 52Î6ŠˆP®ÊÊPbk+EKþ [qw0¨Rc¤xÉ )G®/¨™WU5\ èXB”uk™5Ï]žWb³Ú-(5„C ä bÒC&F —nFR.äI‹Ž”(¨ð!6Û±$,˜Y  †¿#"ÈÅN@àùÉæNZÂC„y/*’ƒ'Ñ a6‚`ר[?6"¶ …½è))؉rà„f´Ú$•‘9-U»ÑHqèñÇI5„ûÚ±#¥úüñ äW¼šÐÕû?¢þJÑûÇ ÿ'}ð!è9¶²ùŸØóÿÄžÿ'öü?±çÿ‰=ìza‹†ï5ÿÄžÿ'ö¼üç?±çÿWÅž×HûVÖ î8²N‡ ~þR-)‚ÈA‚‚E¾€È£Ñ†œ:ûR@24UƒRY ³UÀøƒcÉÔm¨â.fPźƅÇt¤ÉÔ²¨"QæêИÙRÚúÀn9ìwÉ—Wb-¡  Œ”Jœ^j?Dó1æªà)üe‰’ JÎl,€E¡IÏÌ ¿Åq¤ÚÐ`%Û4,.++Ý@Œj ×B^%hvåsÉ_²^U™r8CvÓd6«ÚØ…±¡þâ7A+ˆ¶ *¤œ0‰I›Dv¸£}È•‘gx0ìI9 T /B¹´Jê"øUö®»K08•Í—Ðd.¹ ×Ô£+Ú<(ÂóHØsH•«¾$Ã’b`s€ûˆ§‚ì+%ÌFGn8êaᤰXå~¯6n‚˜oÔ;†‹ ä¢üJ®jJ ~#¬“¼RÁÃ$­ißL…,9E$æ.¡Ò)ôÛ½ÐÊ T ?e¯æ[ÃìQbcá!¶]¨¶MÂ#I;t(à0)‹•ñŠÔöU—2”üüŸ/–Ž.'¹…RÃÖ U~A].¼—¸bu¨·œeLIL&‡ð’Q•†šo:颮JÍŽ!+F¡‹(¸å)d˜&8a¡Ž‘z?Q³P lK¾Br@«€<5ˆäÊ\»¢ÈÉ^ËH&•[ G«¼OšzRVƒºxÒ;ha”VX9SÁØÃ1MOa$±êXR‘‚êÆ΃k8¸PœA( .ž:ªqUµÑ®:»L%,èZ®pqÑå@ÎYÜK‘¹®òõ†¡â«ROóe =&<”ÄEDÊCÝd²8Ȩ¦[bÙdB\T‚üšJ¨Ñ»[JHnI.8Àðœ½ä/²zóÁ<—ÅV£ËãmQ>ÒìƒH³ìj¬yö¢1öÒôd¹M¤¼É›KÆh³ÅShžÊìÂSþ€ÚT=&ÏV{Ùhá±È.–T¤Î;¢ÔV¤zŸJJUO6õGñ©Ô)>¹-ª·øT"·R¶ø‡«ò‘²M(8xÒÌCïÂ7ŽaA‘¨'£`=£uÛ´r¨,_¶@´x@~êð¶·Q rÞ€á=x‹!WDm!å™±SÔÕ©°Ð^`‡¬èsEÝèu_ÇR²ºíäxvTÈΜè¡Xj¡¾Â—z!WŒÜ±§¸ƒ˜F–*Â@Y²ßÚb±kR$“åI!cCé ƒÎC—Ôb‘=ó±­1ßbEb$rugåa!¬E ñ ¹€ƒŠ×É^ÄH QühëÍ=ÎÑ&OJ…ˆE$C$µfŒ‘˜U£•þÒ{Šá ‰Rr†º 1äb¢ù†ñ'${7Ä Þ%aYù¼íôQÓ4- ©dò½YrºKcÅâ3ªò™eX4˜ˆN–Ú£‘w°3xè&Çã?¬Û¤ Cã’(ÄŸp7&ï»÷0h§ƒ¼6Å«Ëñˆå¤Ð|ZR ô4„°ÏC!'¾¤™Ë$¬ÔD”AÊäÈà.ÅéÒ«A•ñÐ5iEÚxlÔõ£ I‘*@ÇœŽ\”L­à¼êðqü3™Lɾ‘GÍY Š}N× AWdd÷ÞR|"ÛÔ*ÕÏ™ÑþЙ#I³vøqh´«§ŸÑ@H¬¨õ¦–6â6G¶`I5vvXøC%0´Hå`#)( ¶}~@1¥¾á½¹ÅÞ?iáãÆI±Ã3¬Vù5ˆy¨ËZš˜dsüàîl ƒ m¨þ'$êžz£2’kî?#Ô ùÿ(ÙÌÑ„h\VÛÐHÏÐ(r=/³YUQ\D~ËÕÓ„Ôº@“V(ƒíAaÒP (ú%–yÂõthT ¬dÓw¹1#ã%q‡'YáŽ8o:eÙÀ¡“Ù©re¦#Œ®Ô×.*N¹ëR×F(ÏMÍñ­à½ šD)+ƒõ 6“!q¦VÃÄ6pr Qý€(.ôšÝŒÿeJ¸€ð8 ½¡‰b¬Ð=†¶'1%x‰ F‹$vÌ¿ú\ô ¢R…æ%¤œiX±îº€õK—Cü0ˆ"‡Ç‡¸z«îM%Ý'™Ñ`5y F.\6%;Dê¥"6?÷ì4I® D<ÓU§¥Øì”_µðÆh€N QÒXI½¹Êêc*%è ù¦Ý°éW¡Çê]CM¥à«ÇüÄAØ3-˜$ + LE×nét&Ò«šWÝa‘KÏÖt-FÚl¥R‘KvêPH™‘Ùÿ±èO¡ê ©ª§¤«ýü‚êë5ìjp=Uõ1«{¦ÈKÒ”~Gr@ÂPfºØà3pzÉpE§cˆtN}5ÿÈõ é*>RL¨é$àk $Q–8IÂ3aÙ èU±ÅlÄýšÏÞ+uŠÓ#Tʲ¨"VÉyT¨dªDI¿FJLd+DLÆP>­æÉït….ð¬äÆpìr¤D”£‚;ÎzÌäT%L)ØYS'{ÄD!7%r;Vsè¾-Û‹1Õ˜\Ô®\ÅMN#½ë°öi”níQ /g3î6p6 fC ~ MǵÆ?Ú¤k[­©E05 Ñ”Oþñò˜Ÿ¼p¨`Sh€n¦ÐQ+~V©j8iÿmDfqy?ÒÂâaà®YmÂk)V#j»ö«¨P¾ìçÕ#Š‹®Â~Z­’½-.õÉì§©¡"¤aÆÿ¬ì)YŠ, ÁPÜ)¨ºÙOËËÊ AóiªÑUøFY7\ECµ:[4Ò¿ëYa”‚Êå›rUT b€m °du1¨J@-ʧÇê)*ÿ.V)\¤z"éVGµÿ]òGá„ÅX‹œ{À(bþ¡CäyüS3¨B<˜œ ãâ…hã‘1Q—« ŽÇoTlEÈ0BlÿâÝ"Æ5œô(šä´NzÉXHh‘‡Q"KÈ‘¡‘!'E¯„Ã2F+ùðV‚±IhÈU£Î'~×»ŠG tïÑâ- ä%ñ¢!Ó‘ÂU’n²¿T±4˜1¡Ò(«K×Ca8R,Þ0Œà/¨ SÕ5‹qi8£%NŠ "NE‰¤RåuSÇs1$Dµfà¼Ö»UàýAãÊ%^ÈB4‹KU}¸’¼‚"ÀpúÈŒ9µ6!ªûŽFt DôÏ~L›As}¦Tœ—*¹Rê0ü›±,Bý@4ûùßÃE*|ž(¼ ÄŠÅ—ÉH¿I G§JÐaØPH@±ç4¡5„ïUFÎÒ Š!$ ìBHCœ<ö1¶4ámÄyg`£`©ó‘: ù}í-S²ú-d8û®_eR˜ø â4úP›×Ãú¸®UϸAJ§zAµ«L¹S¬ÑÕ2ÇJtÑ4¨é¤È«¬ñQz5Äà^\nq2'MR+Vúwq{-ÈÏÿ!ޝÛk­ªaïôsÈÞ±]œòwzEG’c´Ð>šß`à£4üÝU1T"O%‚Aˆ,•¯„ÆA f•Ôœ=.”´œ}ÒQ÷ Ò€«ÊVE`Ñ䣖É~ñ(0¹‚•§  DÉ©P/®ÏG*© (jV« çŠ|OÓ§ZŵÞN£ ÕßGeG{Fêí$“múÚÊl|µp ì Ö0#¬éb ŒRe‹ÌÊz #’N âsÈh\Q’ÄFCˆÛ)ì¦U€Hø]Õ51‡iÚ…Ý™QX‚!Ý`dŸAzøƒ«6m&R¨è﯂«Í¸‚ lAŽ7b2¦¥øer÷—É6 rÜ CÃ|2 lyrôP5U9á¬ÍWZ¤$ô÷¿}Òå†V=íW¼c«¶Î 6l :eÓ¦È,ž«.ñ+÷?^âß„¿`øô n×"+ï¶a ;?Säa‡8à’zO J 0´JÿÇG1«¤Âð’J} {”aqÀe¦B1›‚ Ú r*â‚l¿¸ ‰€Ù@Â*ïP6tžc$#£–ÅÜèú „© lŠj!*UØ&KpU‰„³Úò}hYÅ–8ã)Ñ6gÕ€-AÌZ ЃÁ°è¹³¦&ß ÷Ï@M˜B.QÓN¿@øà…à nÀ`ŽUð™°Z–6TÁ™Qöž»b±UÝlý¯ŒrA'D у® W¨›P( $ï!žTEM€j*tSi=x§Ê EdTÑ“%@{äjiÜ„¼ê¬Þ´£Ú}iÊᦗô˜%Æ1³˜¶¨†¯Q@þ‹äÁCàÔBÛK†š`p<Àu ”½Ð^`ažhš2Ø+û‡?=*{ˆp ”M  :–¬cáw¡ðiÖú3ƒš@ìý´É´P~ŠDžAWº*ÔšÒõ¥ƒæìÆBý¤Éø T¨%ÆÐ‘£av( le1 W‹ &Ù©®s9,Y¾C±6¥åx7Î’ÇÀ ˆì*LDòÖÃ/_Ê…XL'§ëÅßE^sBÎC1òë!½XXÐ\—l e=ù扑ã…BÌD1¥†7Pâ1TdsÒ =‘ 1¢›‘šäh>tÅ ö º—’K+õï„Ø>6zM-e@˲ƒ$„†]Ø.èùb I³ ŽÖøF³hI|ìƒÅÖ·}¤@¥¬H.?c»ðÿm¥2éô¿Hüÿ—¶8ZêKf`ãòÞbM½3rù¬øh$’PÑR’9¥S˜+j—wµzG’[Y‚F•¶E ÏqÙÅPÎ)ï©Ä5U4Žÿ.…ãÿˆ²ñjÕ…l+f±2¯ÞöKˆ®K òÓúé5&yÈuˆþnvXȱS\j±b¼uÓ=¶ÃK×7:¶I›Ú2!)MÊ 0.´f«3– ±¨yõ¸ãbAÛOã½ÛÈ6<·×B¢: £ÈÍ=ð!§ˆÅ4'·r¦ud«‚Å(g6 Z´8žéA¬äJ@áҀ‰˜n¸-w²yÅXV ºë=›bÎRXýCXPd±kuTÑ­^V˜‡ì&+Å`™©u^âYj–"¿ŸU3:8Üš11ŠõClh”ÿmNâÈaäk¹ÛRê‘ùQ"r˜]äQHÃgB*ƒâi såð•8CE³ôhvi`§ ^¨¼,1©.z:YA;ŽE±ÝmHrvób0âT™WëÁ¡œä^g f70KCå°*vîD³MˆË>¨}B `¥lØ&‹`s–ÙÝ. W‚(+¹Ê£ ‰ÜÒUª R3Ťµw‘‡®S§dcb[Œ«+Ç §b%ÓIŽ—¦»CCtüÒÕ#.{U˜nêTLÎ3útÌÉ‘ï›1’Å<Òl‘œ¶rJ Ô1Ý`¤ _\;,ž:‚¾CHÂýƒ‘–qàüU¡æa¬Qˆôwöºg F:u•zcë î½a°-D”ÆXé%ˆ‚@ 3®éFôŸõX¶^²ÓIfGe~×v€/($WbƒÐìz®ãàÚ áÜ+œê´Åd:U-&_b WÚh¦‘£–•p•rç‘2µm…vЯÜlx:Ù;[34N›#SÄÇ„Iê kCS·¾hˆöˆè4S¥,5æK Ò¬™iÅŽ4wZÿò4h Ù´q¸>‡CÔÆ»0²&J¼eŸx”TõèÙ7£w'„_F)J¬Ö!h‚íSŒ<*Ãi&†#;]()ÅïþhÞð”mßJ¿fÊJUäëY)\‡b¤4&ƒô…?9ÉÊÊ€£Ó•W}Fj´#æãÞJlä¤{Ðé6e*ÀÝt+0²Ö2]®\ ×3°çbV(Fû“Leø@Å0qB2ÇΑN [h–r)CDZP’ª™’—”jy[Ø×´ð±aìÏÊRÊaÅê„»š),˜Ý€¬ÝçsÉ׬º½AN¤[&94GsÙŸ•«Â…– “áÚj0Åå,·ß.#“0ß@ÕýR›=ÊaÝ`;’–¼jKѪô#Þ¾G{ݹ×UŠÉÁ PM­Ééži¢øŒ,¢µXŽ÷¤Ó@u½àÎf 6d±·Ü@Ø-Èò°Í(P¶!ó æ4Ë6&‹&Væj*•LÊîn”p£WŒ(1ü 숟šQ|¤Xà¼=4(¢ÄÃ4²PO7¸y2üóª8r/†FöÌèÛ9=ÖçqÃM“¹ †ÃKŽŒË|uØ]8w…Èã„mAÏyZo’ª|$9ãYé±ä‰PoAq‰ËJÞâÍþ†w#=¾!%>)›èvöá£ò’¦PÂw„a2²dw o ùs!<‰bJ1w™µ…»ÜÄ9wÉ?©ü de ¸’à3yaŒXíð'¾aC'¤–ßßRFUCä³¾ñgüDà”Oô?y”·PLÉHØaiÍø7¾Aüà-èÆøKø›¼3f¡}?¼õ°d¥4&²€?ÅHöŽ?—þ¦‘’P}_@«É’âßðÎë­à¯ÈŸä ¹+;„ÜðHÞúÄfûX»Qm"ªš è:Æò`1 ÑŠÂ$l7®c½`õS…ös;OWGÅj”‡þˆÃ´ÂÑR86ý HÿRè–?²ß^ý£þ©S|#¾Dò¿€ªØŒ†ð‰S { tÂh $ öPý=<à¯RšŠÏ*} ]¡óGÿ„Y£‘ÉÂ?`~ðŸ\˜Ž‘,HÚÇP»“Zðººf_ãúrFÈRt3AÜCQz  0, \¾ dR¼¡Yx=ŽàïGøäCä@p[ hñÈ`[Rƒì¡}âqŸÄ}“FôMŶ@ÖÅ—z$S©.™\Ee¸(ƒŽr& ªÖ·o´‰ï¹  ÍÉÙ£æS©yI(ç"D;.Íà+«Z)ƒ'”êõ—"i)·x˜¼˜ÇÉ‘çC¶“n±Tòƒšd‰s© \vu×WfFå¡B_³øSU» \ÇÅ¥¤`‹r¹‚ÃUT•žˆjßA؃ò&Úä™$MSß#GöA%°¦@úïq9Ê@ÏL…,¬+[sR8cOüeh¹¹Îׄ…EÇV2uµßnÅùukùU­Ð¾ê C Ì![žU­ÍYöÂB&€Óe‹ˆ SD‰)ÂÚ>¢sZDnZDhS‹§äKF%[:+óÂår˜/F7¤Èž2Fœ›IÐ0ˆì‰lapP·]£HN”ßa %¤ÝÝUŽÚY˜Qp¼u¡xœ¸„“FU‡åµ@ËòÉÛbîã¨Ðª:¥APv¹|È“ §—µHS¶ìŽƒy0=*JÐÖV©Øì§¾tùTÒm•ÄÈ’²vcˆ°d<Ü6%QÕÞ¤H~n¤hµ9,¨Y‹p3«Èò¬QFÙàöTƒ1uªj§ jiX»õéF;UI©ÝÕl”“ŽÔÚß>ÈÎðY çàfÝCW=‹!™!–|à2+óÀ/$wZêqs»½@Ò÷‚@ͯÀµÊÚ¶ =[%~€(­fÁ^åaþå, ·zªä <±‚‚9BOº¹«×6HRˆ°¿òú“2J[Ôºõ+F5mZ@Mš’<š^BPþ ©¯Y¯@à(„fQuK¨/˜Î0-¿Ø/lª!Š-‚æyk`Vä j«6[ ¡|°dL‰‹Ó˜4\0[9I}n 3:úûVdrܸt8·¢ª÷ÇÂõÏ]€#— (”ú2:¶«Á+Q^{%-Äû¡2r„1Õš<(=]¯I]š)ÂYó]“~£®¡Ì„à ¥XÎ"ðàò†4«›1 U,•ˆ‘6ÐqH6ô]ÁÜÕjb“wWÛ‘] OÄ— áˆåêåš«^-A,—|mÓŒ¬ašu׈b)*sî §¦1¤…x( BþYX©¢Vœ3h ^Šª)’F{¡l!Va‰ãÅϦÅ` Ü»œ_áë±ÜõC‘Š6ë!~_Dz?#b£+cYZ*ÔïjZ¾»\Cð²62AKÂÅpÖ~wг°Tr ©y(lÁ[G\¸þ’5?W¥J­Gn~¶§êýb>‰/„ØQa,0•‡²þ4RË, mò§ LSGØ$TúY¹µ³–¶Áí= Ø_M®j„Q°ÜñßÎrVÛs®ÍoäÏ3uÊ«õ–×Ñê§lÆóNkFÊ|åÑ}Ï«:A­¯cÍM |Áz É•^™æ™Œ?•Ìæê:,CRkǯ¨éèfzJ7D¦¢ñI»Sa#¢e˜\’Û%™@ØàjÕ²eœR¨È ({„å²¹·Ñ³¤.—Éj³ym8 2­ˆñ¬Y!Öq"¬… Ç£”¯#mn§™ü‘¦r‡799Rl$ ‘$äöËeðco}òN/»…(]óVyúEºÇD^)2W'8{0–÷ßjÆ=þ͆aâbTüõ—¥²Ì‚[•ž"µ‰ pàõŽ]åKÙùlŠÍŒçÉ„Cú’cœgœ—<8,¤Êó(wFÍÂÔ¤ L ò9`b‰–öœ§ô°œ€¤…PZi`ZæÀ“…ÝuÁì’¼óˆï<^+:ÂÅÙJŠÂX÷J.‚ÀEš3nÀ¸,¹ ´…¢} ÊU×¾Pʉ‘@Y`Ãf/ ’”'ý’ Ÿ-Ìÿ$‚^ÒÂøPp9þÍQlÏñ—Ç-Žrˆ©Ší#A¶á‘ãÛ ßÓ¤Šm65T6‰¾ª(;àCÏÚAÛét!b9ÞÁÉßÔrW~”ÇŽ¾á5´*Î_ÃD3U{¯Ã½rƒˆöÔ ‚j#ÓãBdÆ4Óå4ÂyPl#ó&Y¸û¥Ð>&FU·Ry©n+{¯6¼p¤9ƒµ·`º|6fºÚ\§ —µý镊Üa”uتnè™;xTiUÔKîU%×v]'ã)uc"òQôédõqP;¦ëTë\pLË‘E†þjv¡¾ªj£UÅp½·X¼P8< ~öÔ¸9€n[Eš¸ìdÕ0ÙüìTI$„+–"é›[QÈøŠ.lòÞddOM/ñ¦€_ 虋c™†YÔ>_þâo`)©Ñ„ŽŸŸjžÿÀ¡Ç7U¼lAÆ…Šbpt>ø8¨ÀÝMØ9Ò.„6õ_¡»*…Á`èTU$~f\ ß(ç"5o.…‹{Sºl†Ž™WÃòúïTéþˆB`*ÍfBl2ºt6$]9'.8¿„‹c@F¬­Ür½0pæ¬v½¹Px£rqY´qV®Ñ_[#»Ó)¾C:Ç ×òôZ°0ÿ][_ñ`ð[}ºa„Ím~Øh°¹Ý]Žß>@“E†] "o–ʼ'«wï½uò%ˆùdÇ«\?+~ƒÍMvqãT¨0‹‚{›¨É­ÖÁ9 .lv§_)f³9زJ¹Þr¢Q±Ÿ‡\D˜N¤i'5·‰â1qt¤¥›Þë&—tÝèÏšÐÑ =Z\þ@c.¸(ûlÊ`øff*MÏR¶òåN¤Q€zá5ûSÝ¿ {;•ì ’æ×54„ 4)’oA-¢„|ð[R;¸qa +UvmÔ Ü9ãέ…-72ˆl0jã®’¿_1\G„t•Ñ®P‡p9`ZÖiíØ¨ÊX0öÐe® `ð 68ˆäÁ'G}±Àò“®RÐjM•IFLÑ‹: :KÂUÇG¸°ñ¸ª˜þ_‘e‡©>¸ýIjLm¡MÃàÆaÅ!ô"³VG @'J–-Ј–ü¨Ú¬˜£«¯¼¸'ËS"ôSɨݤ›0q´ÉŒÃÑ&1²rùÖë_„vsVã²é‰²µìÏ \ !ëKË«¢fÿú„D$CäÒDH†fœ¬c*ÆÏ|«GÕ›0r0â‘M;þ*;€3Ô™î {‹ÖKY3Õ( ª¢ÒŽ¿Èv]ÑÈk ®rÔÅZ„ñæÔöð:€l!¢BK¡[Á%€BIh²Q‚5 Úǘʙ¡µØ#».Ç%³,>ŒŽk£yy"#™gc”fØÕ¬o°c^ýf”&j@Õ‹â¿l=¿4.®ší‰-‹8Í¢Èb;¬fRÔ—Žk9)¼Â*'A³ï+“ÀÞWµ³Ž1¨oM¿]ˆt—™ÆBµ8/ÄÄêefæj·JL²¸ÇzmNôæ†í¾©· )d<0¹,Wäëè‰Ï,H‰toº jVÙ<@ý`L#rXáæ­¡ê¤=vþÌ+R Òߘ'åZ¤?SUœÝæªG‘CåTdW{y÷ “äšÊã§÷ÒR€pp;=d{i“ZDb¤h„¼)²ʬpz-£™jÇi³¸ET ¯k¤Í)ÝiŒ‚‹W)Eµ©ºÉDÐeÊÓÐKt 66&¶R·*6«Ž »JÃöé¬iÕ„‹»ÅIZ Wèäéµqw/6ˆ²Åd fÍöD®ÍoÉʬÜ5݈heUnC*.RÃ[Vß“šÐ¼~NÝ-r¢ÞÝsÌ’‘_yüqýaHªŒr¥ØRùê\%´Vþííî1Ù‚$—rŸÓª ’\ÃëÇ5¸{¨qÒÔhäL9Ur:iÔOíïš(‚Ìàäž oÎT_2¨®<.PIAi^CO„ÚÁ¤htÆ’šÍfƒl¤ _¹£ QñyhR«Kò¸‚‡ê·…£TR\E3¨7U  ÐÓ †É/H™*l~D]°[¾@ÑÒûÇ:㥠-Ù• D”ÌXú×v¥@ä,[h¨€lÊjDZ ,++ÿâåÇjà»þ›+ZèA<² J‘ÜS3,é”lG0©Øâ¶b¼ ëF*p !ƒ[ ˆtxn¡ÌD ¥ÿRÉ» ‘ôj1`-h|šIh/«©5¢OxI®=dq)§lAÀÇÌ ÓŠd‰ê25&j´c\­o‚éQÌ:8&ѧ·$“ªYQüf Á+&÷ÒÒdµ›.m‘*ez2ûBv$ÿ·d-æ?aRŸb±I^¥™$u„ÎsD?¤s¸ô¡ý¸îa˜Ÿ`–lµ°©K¦,è’);ðÐ0ð¿RJY<®AÝoY}÷[VÕýøøªfÌŠ¶^pæ{Aé¡7*þ‰… ¤òhê &ÏjPM¯¾íññz—yÐWÐ(ô¹Ñ§OÜg<@än€‚àtÀŽSÅŠPAðûQ;˜3÷êåì+µZdƒ ŸÝ+.ÎŽ\éñç¿?µ­{lÞ<Ÿ~à jSͬ ¨U¤¿T„ã ‘ ")¨TàÜCr3À+ìcq ¯Ô›‰H¢”CCÓȤ—¶¢¿ ëUPbEh/ˆŸÂäVªP*hqÇ¿ ÷_擨ƒÏî`[hèÊl*Ô+˜¹¹Â‡«õtÕ‹ctS •¼;VAôcö–É/Lô‡ ‹F#*eÒ X\xYùÏÉ>i‚Ë@^%G%~֦ǎ•Éš>¶¬¨ÒDþuêçÔ°!•é|_`ÑìF9ˆAÖ83aÞtmœ(î+TÉFÅÏJe)]ãã„_ ¢×º6„Šª§ ÎÌ›ò%½1 Hµ1ð°Az%ð(´Af8‘ …-ƒ2°ì®CáïpM Ë­ºpLPuOB ¯j¤õîÈÔÄŸ6XŸ¦0LsCçðî*ü%} ðUá!X-^Ù°¶wvëO;g)´y+Ì´®~N¸<Œ¤¢¹.P<ÁnÛîá;…'ü)ˆ»í¡rgô$f~ûœ· Qced›5ùX=&IŒc#Ñ…Àè_ñ^ŒÏ§w ªú®";소&ìŠY*Eûj4Ò@Rž^3Lb R wëCš9¤Ê~¨†O;nFfª°å*™$Èo*Ë¡r¦>\èVÛqà<ìž‹ô–íðyŠUç©£ÐÈ,UQ#ùNØú‚ÖÇ߇¿T/¢´æú¬Jæ%<õã'ƒ´ÄB˜8½„l]n{b¾S^:>..B ¢0ïá|;¹›8]¾¢b H…hA™LTL…ƒX9žyëL&<“ô÷Ó@Ô˜¸ª¢­Ý¡ľv¾ºÚÍ‹ù¯*=~Û-3÷¯Úip Ë*ÔT"hÁ'˜ ”#‰ð"ž‘󵹪 }âÂ<âtÎyJ¤*&…2ÛWà&ЫԿÕ9—ª·€H•³J@š«ª£WßOM`D…úˆšD€G.½J:å¡4ÀO_Qà’OÝp•dWŒú•ó>ØFø…€Êú;µZ„ªïôj…Ù€å¤äoܾ´×¨ î^ÌN%iôy¼Ò<Ë…:èZ¬·îIn›§Ôåô°ØfÔt‰B90±ž‡~òƒ^ôÓ(r×P’¹#^Va[© ¤àõ·%)Õ/TXG~A™oH\pW-^ùÙ|’K®£"”[²yÝ>n–Æìuã%)’JG³´ÒQ6 .„(ˆ ÀÇ64…¦¬p!¬ y”M£F¡º°—¢UTcŒMH3õQ/²j¼Šnñ€`Ão¬äš'Lë3ò’óI[Fr­*%B¸àרæ•bþާ§Ã ˆÖ§ë5és*~“ ‰ÈoªI¾ #XH”÷Ê8™ì}N+Û‹×{øÎ|Q¨ràŠˆàHͧö¹"d(ºST"òÀ”9’ÏrƤ%‡0±‘—Œ e£ÀÏ”]¢á— ˜ŠÏë’ñ«èñVät•ØLtK3Μô˜kìˆ_€6BĘÂÜ7\8Õˆ€Í£Ö!ò "ƒ¥‚Ë å¦ÐÈJ1`hñ_b°·ù ޹ÏzTë…t¤l•ÿ-Ί`e Ø.‰³Ìc@Eý]ÏÅÐj}Wè•ècîpZ…Y•Ýå‚"GÌ\@º¯ùèrâBDSÔÌåá&ÒVb²#ÿdò0ã$ŽÞÃ?© ÃUj nI=%ÉÈSøÖþ)—ÚPÛ¤¶Ã˜ ÇÆ —²’¶2ÈÏDWA±ÍQŲ²ËM$I(´KCA_‚¬*ÀåYÛ)¯RQ–ivŸSÂÎP„¹¤º oA¥Ý:›ܪ»©™6¬qXÈ‘GY8R9A&Ò¤Ûå ã̳0MRÔò\@Ëlܵ\Á¢V’²t<Ž& °¥L nû©*6¡˜S"B%Á¸S 'ðÍdIÂ(«àµtpzͼ6Ù#Grp /Z¨››eΣä˜CPm»“#}ꎶś¦ôX¶“"e‚Œ’D ™ur$Ttr%%F*¤eæéäý™é_%Š…a³£¿´êÒ+œþÊøÙd% G¹ÒŽd¹0’Š_ ÷ æro?cÀ€ò!³”5šR&Ç07‡²uó÷´àtà jo£#¨=ÒéH“΂8Á¨€! :Š–Rhõ,í†á.óÀCŒŒ+µK±+1QÃ¤Ž®’RŒAÓ˜Ù´~i©¥ *LV»g$ŸÑ¿#ω&Ïvˆ§Š³‰X BN%æ&‹Ä XWØ!zz¼™¤… 1mØuh6ô‚ö­:qO&ƒ©£Þœ°ËŽÝ%. sŽÞ Šp«! ÂEœ@U<ÏêÂy†I,¸ÈÈÕÌjsнúü5,;€ÉàFy %¹sœ„¡‰e| UbʆxWr•˜:‘[Õà|–4ÃFΫ z¥xóçòG™®õIl4YJø†ñèÿ–+%ýeqDiÅ•á„#Í%”έsBüˆ ×BµT5^‡B¨ÆÕGõƒ)×Pý8ªPW7‚´6eð”(žtÐØCqXI®@B^¥ëÔËH™…ÝÏ¡JcÒÈÉ#bp2Ôè);.jEÉŒ‰"l5~¶ 8±«+IØÃ˜œ‰všÅÈe# Í]‰±¥`jÌCÕ3‹ Œ£Òž”¡`Mc-’"=6ˆÿDŽÐ¨+0ýªŽ¿δ Î@<²µG`«ÔŽ× Þ’T×\9¯ÜѨ¤¿Rk$M|IÛ<ý‚ÜPŒ6Iõ–ø‰9å²Ë]io~÷rh~ɱ§¹³Y*Ÿ€„ ÇÀáBü%X@p£™ŠâΉñ_e%”å§ü ˜%¶Gé·þ¹é¼DƒÊ,Qíò·—P^ê„ñ6@}°ØÓ5_¬Zëh O@¦¦*®@¿|yR3 S‹² ’ <+^ÓDº†¼*‹ä˜tX¥!ÚhFÛú(ù„a›†u´zÛ&éO¦Æà#…Ž,T€+—Lƒ°&á½mÖôxÉD¾‘Ñ/³9¤$¸´˜_è5Q}º¤ ‚N\Ѫãˆ^IØN¿nëGïüï Mú¿1œhŸS8Ñl=é¿N4ÍIÝ¡ð÷8É!ãŒÑFÂ8,n” [œÂw“°¥M{ Y]V“¾9 ʬԩ|p¡ V,²'ÓDب”ÙŸB)H¬<ª}ãÕhBêÜ „°Ï¤.A‡¦i(\±ÄHzJ ʺ%(ùU×u)›»ÐöPAkZãÿÓ:^Š3'JhRÐÉg·ÚÌ¥ÖÂkSGùIJJÂßäGý;)19..¥F||B|bJBrbJJ¸ø¤–ñq5¤¸kS}Õ?> Iª.U¥«îûÿÒŸˆž™Ù¦xsRhĦ—6­Ý´zÓs¡ ä tåmÓ&¶›ÍYDNþDò¦w,á<Ⱦ› ß™60ŒhÛ6ÔãÐÓÐÑs}½>Q3þ浿·ðš‡}¼tXܤsuö¹÷…‡wôù-õ刷ºÔâ눨—m–þ»·,˜6ñ™C;çÖ¸."´ã²[Çô(»ÿB6ûß{ôXëõÏŽJl¶·Iïg-c'èÒþ¦}g ²£ç}¾åÔö™Óã§f{ü¸ýÝ”O ^ïm³ü§_úDÎx"áç‰qG{f¹ºÎ¸ý®—7LH4¯*¼óƒn'$.ÿcØáý©?öîžoˆjšüù¶ú‡ÿõáCnØ“74þ{_›]‘;V—ýpw퉋¦.°²û¶OFÅ Û’ºëƒÅ“7›îî>¦²ßöÞj÷zò›‹t‰x6«îð_û:}nûS?è;}O‡b›U{駯åÌ[t[»OêvZùnë1³Úµù½?ç¾|±ŽÍ)±ÙbØ­dRZI±mö¢b¯ß2QŠí@næ=mn&”Ü7R%>aÉ0a¡:3ÛÑåp¹ûÀ­bˆ›Þ*6kl&¶õîÔAJHNm“?áÉøÃVl|œê§cV\üÎÊNh•Ø*!5+Aýœšš•×RIŸ™š’‚ä߬ÔVãR•ï-“húTø—|OÍjÏIÉ­â²;¤¤dÇÑüÙì'}ÏŠËHÎ’•Ÿù²ãÔåª~H:¤Æg¥$’¿ ¹$aý”ÍÚ›Ùªe©'%5;ë‹ÏÊHNIáù’ãR3 žŒÔ¬–ø=)1£Ù¸R³“à99;µUf2´7).3IhoGÚÞ–ÉZAù©q™ð;3þÎÇ‹”ÏÓ'±ñO‰OHÌÆvŽ"EIÇ¿gÆ%Äãû$š/Žõ¬CǬ q;ÄÑrx»:²ï¤Gñ0Žrù4|!6Nl|3Ɉ`»ÙsbG:>qñ™ð;;.Ç­ïSb\¢Š~T“Ôú‘˜AƯ# ì7™¢¤löÌÞ%§ÐwɤUIXºLò_¼òÊJNRÒËÿ±²1mKÛ †Œ–ÅïÙ´LÚdVVB«¤x·ø@´ÅLJ¤kUuZñÉ1!ð÷dR^ |‡ñL…ñMè€ô—Åæ-1 ž“³èo’F52}ûÃg‹µ‰Êq)¤-ɘµú•@Æ®e }ײ%’d\J²ò=)™ÑWRVü-ì ˜XErÿø>Àû+÷ŸíœîãZ²rµ?zï ¨ 2D¸eÅegòe¿[ÆeeŠë$5ŽŽ#_')컼Ryè{¿ù蘇ó‘DgvÌØ&øÉî igëWGe}ü;Ú† Í…(ƒ-ñù]sê,Ù}ó¤ik'}{_¯ƒ=çÜÿܬÇÔ1±gZ±6ÿæñù/®^õ׿±û íß(¹géÙ½ÇíØ5ª´äì™7œµªé[¾ùnçÀ¢­ßm}-mÓ»oÖiÑ&ìü‹Ï˜°níþ]7F^¾ö©·­‹òKÂ¥âç¥Nµ=ún“5g¾Xv䑯FÙ –VÜùØž Ëj>·þü!sý'O~xƒó—mwÿ9æÂ£åSš×þÆ>êØä–‡Gm=stü÷×]jñöuã>X×taJÚûñ7ÆZs¢gÚ#¥ùÅ¿LëõZÍû½×ÑcZ²së›sË7—¯lóm×§>¯ìPš4iÓŠ‘gßÙçl2fþÞ²ØÃW/º¿÷üý¾^ðÕƒ“'~÷òSaÓ7O{;iý÷”Ü™Ö嶬?zSWÏÃ\‹'waĉ䰥o½zÚÚöpöÔšs'Ôšºö|Ï= lÖóí¸Ç·»î&çljO<ö^ë÷%z56ôÁå3Òÿkïà#‡;tøz×”®ëæüyÈ´>{Èk£Ö¶{L½uWHA«—65Ÿ2µxXÈÜßæÔÊøär×¾kz½²8½¦uòÀöC.×ÌîÑyi·vi3[¶üƨ˜;ržÍö»Xîüñã~ýþúžM'?ŸµúðÉG_¹û‘s{þòý¾Ë]Úf¶9à“ñ_L.1ÊP·ííSŽm›vªý’¬ͧ>ÝáËqµ^x|ùäÊm£¿0a nœ>£mb¿…ºß6¬fEó? rÊFçß^w•uÍí¯u©3qèÎg:7+¾Ü>Ûýà´Å®'+ë<Õªt㢨µö5ÏW¼²µæW#g}‘›uâÑä&Ÿ—5ÿìÁì3ïux³F G·»ÞÝi¨ÕýEûâ7¶5|ócoZF£Ý¹…ko–ju›}ü¥y—B¶ý5lXïÖ6øÒf÷Ÿ÷c÷‡Îm=cCÁ¦ˆ~ß=y_άyzf³§b×ô¹Ø¯[³»{=³«4'Ût}Ïž*éÒ`Ï‹#L™Ï-›õË[ šv9×lîæëºHë<;æ<6¹î—“ûñã´ZË ìK[‘ÑíõÁÿ9Øô˜iÖꯇ¨ucÉ7oÕÿôÅK7ÙmI¯oÖïìÄq¾lm:øøüÛ/?²zîâgÇÝÛpçóŸ5èµîÙ{»ÚvÿGËóf\ßuCîÍ[©CÌ)Œn“”«b+¿ky|\÷òïæåºŽ¶¬7éüŸw¦5\Öú¥Ì^×ß|Ýäú O•ì«‘~GúÅ:‘ïÔ~gæåß÷ž¾1sîÌ'ΚŽo¾ë۽˚ô~i~£[ÛuhݯòàÃzeµöìÁo»íJ±§ßsãßÿËk¿\»ùÇÓ—¶]Þ¹ùÍqÏ~â¯û~{ù·ßµØÕ¿ÇúËk~µ{숾g‡ÏhüÓðÛO*Y9`Þ_‹N>ùKÍ?ÿøì¯ý·Œ;}pýÅçzÿTóÀ-íÒ+ß­·wÜMµ{Ɔn¯7sgÃöýÝ|ýÔ3¿äµû~Àþ‰KÎ=üЊ—^õ¹.¿bÍN ýzÙƒS,#gOÚ?§ó€Úßu>òÉí]µëdéñw]-û–Ø÷ húkóó³¸üÜ®KuöuYµ¿^‡Ú.w³F«ÏøÂæï½iÿ‚ΗÍõF>Øv{ä§>;Uë§Ò§»N>_ë;ºß{föÒ˜ïZ»Û^žv¬ÁÈ—úâ¶Ó[×>:¾éu]Í úÍØõDQ×;:ùÃC—jl;4î¡Wθ9ÿ¯WWüѵã6Ïç¼þÑÉ™–ÝäÀîIÃÂ÷ìŸôRËÇîØðFÏGßï³oÚ»óNÿ>êéÎÍÿx±áéWk¥<ÛºÓÎɦ{F>Ø|Ine÷ú3'}½×ú`‹1ƒlõoüÖ¼¸hùœíÒ„C£¿êw`I÷‚K‹R6Þ›Þ¸“÷ ýÃö¿·dU%õœ—óú‰ûþäKËâæ¹?6ü×C¯ž¹Ø>·õÉû}ù^¯•߬šûàýÖv®›V<èÜ3» {c`ýôSó&˜¾oóÌ3}N­{t挟ÊlðóÊîá/\7¯ó’¹÷¿^Ð|bLÝ~ úÌ/Í»ToÜÈ´Cçüøî¤7V7þlÖètïÊä™}wÚ³òR½3£ž^i›g®™Ô²|Ï;K_žß êÅf;¿míìÙg|¿M]1Ì4zêÄ_ߘS\ïGém÷Kç}oµžñ}Ó‚Ÿ;¥,ô/ïÒȵ­Ùðñ¬™®CkÂJ ¾šÝ È“ÞsUÿ¾Z¶»÷q§aKÚ°IûŸÚû{ÿÇsç_ÜÞ§[è‡ÍŸúnÇÃV.Þù¯éßf-ˆ8øó_œ~ÃuySME›z'G}ztÖÔGO\÷æái*æ?7¯ðºü'Û_ús]ÿ£+¿{¥n“åOw¾«éêa¯,ìÔ¡­òVº¦7=·úð§^?dP§÷N'­—Z6½´mõ´¢K…éIÖ’±©Z|eÞv°|ëå”V=8̘úй’§7~úÍÃOÔ˜wô†Å-îÝq[q“ÛßßhûÙ_÷ßvyü¤ß×2¶zà%{YMÓüîº85¬gé¤w´èRnaÔõoÛ÷¯Üûè‚_Èl¼ÑÿËZÇú$瞈,iøê‘µ¦Æ~[Ôkþv©Ö…¡­S]ßü꟭º¥Qßöyy{Õh{dÈ ­gŽ]zçâŒÇnü-âõuM†ç…\H ïwº¹±ó´ZO|iuž:ðÀ )ÇÇví˜ÜýÐK%3O_ÕyZ·Ü·ÏÝ>ë½eómc:ýý“³ó¿:sðÑŽï ¬˪œÏ‘!Ëþ«æ]K[ø†¿Ó¢VÓeËž»x]È-§jºoý­ÆWVßß½í¡-£ŒïîÛ,ÍôÒ¨Š†;꯾núÓyíCnŸ÷ý≎z¨áÐÖé/¦|¼åøíÑÏ7Ú¦¸bG›Ý^_ðÃo¯Oê¼4Ã<32jÿ-×?Úðç•‘Ý‹ëuµdî ;^7¯zÙÓaùœ]k/E69šg÷„çn)˜ýóêìŒ <ä»+ј{ë¾'îÝòíªíÝ[ÔôDìNùÎꔊˆ™›â×|·ÐÍ£o Ë]ób£ßÌ»Û7rú§‡ß*]yãæo‹~€Eÿ ™õ·G¬xuÊs·¾÷ð²/v½³9鑇_?úÖ½o_}zsjô¹’Ó§5Y´àÃîe GÕÿäôìƒÇ&.NÜôÀà©c‡ïÙòXˆ!÷®®KçM&}ˆÝc<Ûíñéç264Žù¶è@F‡‡¦¶Þ2#±á”9ïå:ùnmþÑåÝýÎlÞd]Ôÿá ~;ñõm¥™ç>ïxèók/5>ðpæ¯ãntÓoŽåýräÂÒIƵ̚¿¤~Ã…%Y%¾þÂo÷Ûr¾xý_ý×¼0 ùÙÎ÷?÷Ì­7¾u dæn÷­I;jm[òÆÄÓ¥„L’òoýfúoæý߆žùó¥.7¾’TïðÊã[w¹·ÍɆíü¾`Ä[cÆíoŸ[§iÕ÷Ü»{Ê‹{Ìy$rìý{Ï-ûîîQ‘ N¿ðf׺}3eÔwO-ù)·û3Ž”ýzðÝE]/¹¿¼¸8dùÓ¥EoVÚ— Ÿ9·á¨™%¿¿tâå˜'ëß=aôÜÆîïß5gË gëÙÛývü››¿>ºæ±Ä•†ãùŸv·;¢_ž?¦½8è–ç~¸éd\ëòö=\¸¶Î NÖ˱ Éù¿¿½²ö† =W6i3¨^ÓÔæ_™s?ï×sLnl‹o[oòÕžW/=¾tàmºcÍ„¯×¬Û5y]_çœzOì sMMþc«Ç^>vGË6sv´þ«ô“½¶•;6» ]+RjÜýÙYÏÌ\¸yö©QçŸéûõx{ç¥C>ùýþ¿<0ñòbcg²YÖ~Û]¸î^“Ó:ê•»óŬmçó^Ú2ï¡Ô„_óšÔ›:ñ¾wlÊ›µªsÝÜ}«GǾÚàlÅÓ}Žwõ w†Ýç~zܬÑ/MþÜ™öÊÃÓOœJþxæ c>nr{)Ù>¬ Àgž—·Yû«Mëo(šëÜå}~Ó›µ~lr!í×aç¶×~¬â»‹‡žë{øl~ú˜·êE•üÞ|æ3ž{o¹{Kã·£]¹ÜÑëü¢Ãçj}1Æþçå¦Çïé=ó»&|YçÂØ’øWgÞýç¯õræµ(zðËÏÜ9þ̪)·Œ(šzôÔÇû>Ó´ù‘wéð;ÈÉ l<¾UKå­¿&">U͹Ÿïkyó{'ü2Tz¶ó 7ßuÝ=Æ&ˆ¼U£õ­K¾»ßÒxÙ¥ëÛï¼âÈÀœáއWÝŸ÷ÑS‘?6ûuêS­Ú_(¹T^pwG×’ò÷Mhpkå…Ÿßþõ׎f>ñÅ_o}SþÊÙÏ~íXœvã¶‹ox·Ñ¿nÚvñÓW6tû(ýæÒ…ë×½ï«aKÞ}òõ‰/´þeÊÄÒiÝkÔ;öÆñWN;}êÝK›}ö†ã…w^hí}'$éþ“'=øÃÎ[OüQºê|nÿeÛži6¨ùk?}8v׎÷"~:W{øðãSfmo|ùÞÁ«~]óÖ_?öEÉÞ?¾wÇW1{¯ÛŸÛöãZƒrŸw»Õ–¼ôõ£_®r¼z¾îVŸÎÙ´áý~ûhaÆèÖ5ÙóðÔ½no[ûÎÃwöLÉÜ0úæ‡nK2¼hLhÒëP|¯H×½Æ?ðÑãu†gœÈ±Þ»ëæÆvLM=ðnîÏõóÇêM¾µ°öîi{¯7U¾Óf’µÏ¬7toÞ«ádÃ3x¢í÷ RÛ6‹:ÿñ°^‘ë‡üèݿܰ°ó³‡8WüÔ‚­üÞ7{ßíϵÜùxÞŠ©mŸ½¤íŠEœ ›8Ľiöί{?ºõýO\xsIæ“Og$>»rÌæ­÷~ÙïÛO½Ñ¿ãÌ­%c6oû8Åûfß˳_,¨»xîû¿„o^´õ®~áÑß¿UãÏ¿z0´ï¦Ä[îŽiûúƒÆÛc;¼òá …-YÒp×ÖE·>þµùÞŽ‘ß7Ÿù@°Ï¶¿‘²bäÜÑ“»if¼gÆî²M¾}ï»ÇûŸ¯ýÜÀ±?ÌÏϞسçÎÆÐ?o*y!õò¬ãÆ)Ÿ%|Qëãä^e]R½u¤±í"k9â/÷KÉúëTÝyÝz7Xr|»uûÊÇ­oÛ_øÉŠˆ'¼O'/µôTÛž'mKo›vfMÆ¿Kÿ SrÝÁ7Nø­Áô.Î#?Äž0í‘/k|=oóëã'ŽïøcâÒ²Ÿ—4•Þô8§¥¬ª—î\3x_¿wzê±ágn]ùòÂOëžšèyiÊ£Ãn¿'%méÖÜcÌ?E4u9Üjh“½µßë} ¶ál——zßñŠc`Ö½ÍÃZǬßy쮓3™|käõ‹Ovx¬qúɈ!·×Ÿ{du¯ZO Ù’™sÛ ½{ò²äýö¾×u×é——¬˜•—9maŸæ¥íW?øËÍ» o8ÛïºøOü¨õôÁ'Š^¾½Oêw½üØŽ›2ÖüRÛáĽ=NL(ónûÚ5{¡Îº¨/Ó^/YÒgéŽÝw׸ãú’ +WÏœõìë6ýúìÂ/gí»TüÎÖ¼5ï}@aÎÜF#v´ž|¶Ù¯¿PÒ°¸0³iÈî•mj.X—úŠqØÃY+x:{'M:êí5‹n~U4À;/mî›ú­û"ÊviýÀúÂVÍ}öKObÂáuwî]±¦Á¸ væ6k7eNÛîosàÍä¸uõž®?ùIÓS½¼ó‹ß¦{ó'G+íÕð™FYsVOØi*þrú€šY?xO ë÷â¬õ‡Z}¸ö“{w|q{ëÂ¥¯Ì²­õF|}bø„Å_]lûÖš‹ó{_\a{=í@øõfŒ<÷Éç=WñÉæw¾}rÊ›))•‹fohujÒ}¹iÛß=Òfh‰´©ÎK+#½ue3÷Íg–Œ=øN·ôv¿:þNÃàÞû¿¹³ÍÛÍêÕ{Íc¡OŽ/7nX4oè–6Z4yöJç²S~›~ðƒÆFKÿ0,lù§Ï&4òéÚ?l8Y?nSÂô ÓZTþÙ¨bhóW4úîýIwmnñWq«’9Éë?¨³u£÷‘Í ÿúÍ÷Ñ€ŽßÙòä7ç»­<óëÞö¥¿ýVtaçeS÷¢èCûkÞ¼"¥ùÎY³Xîø0o­íÞ&{>[rêÏ{6Ô)Û]?ï»ù¯ÎütàÊ×Þ3dr½ Þ’¿˜3ÿó´ù¹GZüþÖçÛfÖm–ßrÉɰï[Ùmö¨÷kæî¿gãoS¿íÛô‡-{òG6~¤ù­wšB+Îök:ç¾±9Ù¥zÇWÕÆ'§&)o“ý£–AGëw]zvÍ-ŸË›v±FÎÅ7ÿãî+ Ðli¿Û¶mÛ¶mÛ¶mÛ¶mÛ¶mÛÞ{¾‹ÿü3É9“Ìm¥®VWºŸ•Õ…‚¾Ï>QØK%à¶„z^Èo¶Ô§YL,™Š×Fšte4…82&曼"’÷ûõ’—ÄIÑþ¨æ×P¶ÿ|âùùtW¶çËñ“•;àç÷ûtõðs&ÅSÔ]vÇ'ëç‘•äZvç÷í«ôôcm+º7÷YZëÎsðâý·õ¦×§»§×Ǫ¤ƒS½˜%Ùw;$Ãwkæ¦Ä–{gqï­´D·r#³õ.~ןEE•x?«£­»`|†¼+|Ð"xð€‰ê5Ȩ…µ éfÆò{k§YпFN-;«àd¨wTL:i[¸ Ú†…· Ÿö´ÇÍü]ÍÓw÷HŒé E9wüøol~ÉBK´Û}3vVó›Røê’àúᬠ:sE©çþC‹JSÕ=Úøƒù/B.zi7ÊÚanôcÿe/NÓß0† O®ÒóÉË`à \ét€K,¥Àôq2ijø«Ç”ÕÄ´ûÎÁîîö¶0º¡\º;ûy¼èù\™™¤ºÎÀÇž(;n×”Uäjô¦GÂJ‚IãÛ{®Fs˜~|‘z!{ïO³­Èmi=Æq4¾·ZqˆØY¤H[ÑÂy!!ÏQ+zFÆûÜf"õE¤‡¹gÍ•“3Þ‹ßž¿ãæó­ây’·®÷®±¯·(ZèÑíéð_0#<øBHžµì‹$,ϰ?P¾›gè¯JÇð¦ÈØÖ¤8/ÏHÿŃ[h½þœ×É}¦D­¹>ú ØïÛ8êÁF!Éû(!3;(è£Ut\ưaq"\cÏ€Pî–*¦à‘*åUF;iÀ @Ó/\&,‚³ RþƒyaJx\õX>K@/ønaÁxbý”ÿæËpÓðš‰wοÊ‘?coáLŠ ¸Ñ°™ðk&üâ>&!¹…·á þ"…]d¤`+\묾ÅH­~(ˆ—D<ÁöE&³Uš0tîõ† žŠW\*G@R9¢CøÖiöp¢â´aKtúñ[áÄÐ%Ò›²f þ¡‹½\™À6Ü=EÒÊÜel<ÀSðdQ¡;ÌÊ+pí€/«Ð…C&ÚêÆd°£ºšH›»Û»Aó€”-œ[k›„q‚ý1bŽëOì)Q ˆk9Õµ æÑéVLG°îž>2'ß ¿ÿ^¹_?z4  Á#2¼L¯#}] Ëö5[PRFhøÿí£Aï«KR#zð]) s—ÔO›.€“Ùc˜V;ü¨Ó)ø•׿¼^B€\f Üv€”c¡¥^hÉŠELóHŽàFʺ4¯4Pì•J Ñ -V‹«>%ø•c’l­ëÂòº£ô0dyïf¤"!혣´,ÿ‹P´^-[[)TÓ…±æ * àV…ˆ… ,«jÜIZαŽD]ITy½ÕF÷ŸÀÞW%¸¸Nç~@o¶gÜMÝ­ZZü(â=¨,“@:ÏžÀíîX"¾m‡æ6EŒŸ"@U©>µå4$eh’Æl$›c`ÆCP‚’<7þtÉ DcåA”¹BJÌt•`ÖA("Í^Èç”И±Ižf4‘?5̯ՀsSìònžvÿI¸_·9Ÿ ᶯ4µj¿2AŒlàé¸Ü‡È­Èw¥“ÚJkµjê()ñ‹ÌØqø©)ÍS6ý\ICè.ª刷fQõRj «@k8$\¤A‹ªø€Ubº'!ïº<¬5F14FÑS‘_ wb Üí˜?&³~{nø½õ*~­®ÿ«œüÛ—ƒBÜ]`b¸Ðý> ºÝaÈk-­·Y„Á@*Á¸¢ëöÆöÅï¼µÂ@1iá(³i¾#·j¡€ºáW3Bg< ´ŠŸˆ‘ìŠh°±UϪºPUâ&ŸB³šëŠªž‡. “°c?ÃÂXf‰Ôõ¯×F±>%Bâ¦!6”Ôô­„Âõ~þ²ž\jqAþHÙ‰z²¢º5Ü1–x(e‡|Ü£æ—Nò‚jþ›®²3 Ì@ãbËײv (&¹M'ï’ÝtRòËKаê?A%íAotÎÈ ˜ñj)v½dÏ É9ºÏõDfÿêfÓ% N©&M€1hEê¡3¢µÀ=ÛÒaé?ÓZ•Õ©{l,X $³Ñ½†T`ÌÞ”n#I^{áè—¦†¢=ž XºØ¶u9É+çÝK kIOqÜË&'I$|=纖”\çSÃLÎ `”´É˜„ç&z4[±*‚AÜ̈chQÁ7NÚ‚/ÇÿžØ#26Í8íj7¢ûúJ0¶ºíš`¶ ;1çà4©®Ùã@…Úšíî¬Zdq( SyÝÕ™V+3”à*¸k'ë*ÝYá~±¹'/d+¶x#´B «[7$‚t[£Ë–e fB€Ï,4g»€w Ð`^¯-õä-'Èrmeªóà)ÜæV Ýñ‚³?ε£í0Oœ&Éýw†‹p?Í µÜqîØWJQl»Û¿ÁÙP¥ËŸa@îœ ñioa;X Ðp2èÝ,`Ó°~ÉÛ_æÀ¦X±´+ýà¤Õ µÚè'™6Y{BzÜm¾\‡Få{U,™¤Fà -\_7+œNeÙ²ê[².„-®à2 à.`¥ÈO±—,‚¾S%¼§AgoöUŒ>Ì«CnߢK~Ÿfº7qåÔ¾#åizñ|ÏHüx~à´SÔóþ'‚ý¯ä“‘•…ãWÙÿ;Árü¿dHšRQ×»suç.úœ”<ÔX™ mfž ;!Ô‚®ü¨Û2¿¾—e$LoÙÀ°dÐG­/Ô+j*•”:wi~:Ë"YZÛ+Ú¦Pk'Wzt~ß;'yŠa~}¿?'‡¥WÞ»º7'Gti{|/ Ï'Gv~7¿œ;ÌÛz/24nòl?¨»/0÷(þ›ßGptwxt}ס ; -#±l%wìÍ}™XÒÌÛÒprpAp'‡çâ´¡`+2ÒÓs1’l×?—'Až£å,ÔÛJV•Gþ¤*©Y6ŒÖæ?´Ñä9Ɔ(¥XÕJëgß.‰!‚Úå Ì'HÔ  ’=OÝW8¶÷²GšžJ“ö´xp%L©X3TJ â3%+Ô)ê”=0T?Ž›ÇadÅܵªNfø’÷ZFbb/U Ø¶(Šé¡¾Ö×5»Æ²ßZÌ2/åi#5yâur¤Ú_A`™lÌ©%Vè,ð–»CÙ´˜Îz¬€Ãð¨4„ÁÇc|£ðšQ‘ŠBèãÆ²8j,ÂÌžïÉù‡lƒ‘ñÞ˜Ú·¢ƒ¬;ˆg uÜÇØíð«!Åðÿüæêÿãd »ã[Aë…Q)©ÏF@/(µÇc§ÿi§÷Ÿ&>šW«"ß±w}ÃÚ„-ÞˆFcƒºþCäK¼gb xê–;½f¹/œ|AU}ƒž.Ìõ<)ÁîLÓØC%anæŒ÷\P™¤ÒÑY^ ÆØ¬ \ÖtsÃèeñ¿mœòŽ„šÖbe¶¬Jþ˜¨_]2Aù‡É_q‰VT9ˆH„VHÂê½=dJÃVËZq/£.؈q}Mèão2O–é+µÒù¾B6eóô¦xGbUk`ÈÏÌ{Ýá¹U÷/2©­3CœÚVÞ$–W§ ’™›0Ÿ:—pë óÒ/X£ÎlTà}éygl¼ifœÖ¬`¡};¾]©Œ¶¤Ó Áz@w¼¸­$ò;iæI¥"—6þ¾:5nO«X”‘¨Â‰ ÀAsZ¦@2i2n'\?XaÆÙ,ÇÞþH½üµ>ûy¦:Â} W©û‡7:wÌׯIõl´¥ Æœ†Iôð€ïX^®›¸îK B–…;ßà4lû»k |S[ɤT~÷/áUmoŽ¥˜\ˆpäík¸ c8EäúBQ6Ñw!‘,³PQE¤>¤`·°jÌ»Aõxƒ~¸\B—±{äà¿uÊ2Ò“¿Q:¢‡1LD…ÅbÊfô0éÙHt‡©¨51@E ¢*Ñö$†›1Nˆ 2q6æ]´¢÷t¾Möï¼.r¦$Óùï¯àd Í¥Ýr 3ÇyŠ‹¨²³]~GªP]?{&3™” ÌØüÖx°e´æ9'í$á9ÇM|Té1qj¯?£‹Iíb|üŠÇZ £fæpZgþû:œã¿ùSÛ=ÅmbšàÍ}­ÑxwÉç4ôP1U²Ô_#Ó1UïÔ[•Õu¾£Ížb/a¨ªÇð§æM|da?º°fÙÙÒ!e ½Ó'õóõ,R„”g2òIÐ¥âÁtð°6¼« ;Ù.â1¨JXÀ¤¹õê¥} Æduã‹Q¸‘ð‡•‹›òÕafذlE©sf†eÖ]^9ðéÌ$•0œ­O4ïåe,"Õõ'#ÀÙ/“¡ÊÒ ãë‡êË÷× ¦›n\6÷\_ºv[5q²BêfÓ׆¼GJœJÍ{Ói-¤ô\ÂE:—Bë¿T§¿¢)ý—u—ÂÇøŠi1Ÿf$÷Sa‘ÍÅ'שèuuŸ~5$>Wt«fQL‘öȬ|ÙW>ÇMk&ψqrn !¹r…Q¶kMçC"Qz.Áâ–¬Šõ¸&Åh?©ú掜 4òAEç>HÝC Uµžål;ð÷Üí¶ Ð²ƒíôþVí‰Åä-bþg£îУݱµS þQu‹¨>€Oø‹-4¶Ð÷ÑsÓ¥£B±S¹gd =ÎߊzÙ–O,`HÖò#¿Ñ’Ä4R $¹±%êgümô•¡™r³†æ>—»ì^‹œdžbaÆÆ„0 r¨ß$¬iæFâéˆØ ’V˜°$¿ÅZÈ'œƒ4i8“†‘-F5n3G‰çɹ@u¢`&\á9©V>3ÉÔ—•®ˆ¹“gëOZª§_°Âäß4? À3æ™.#žnÅœZR¥t‰Ϩ ”ŸÊ…ûn`ðBD`sÓ  ü%96è° ”¶™ÞñºT&/§x)øÕ¦m‰Ÿ…‹f*.éB$C+‚.¤IGS6=Vj>\ñ?šÀú½.Òášcd–Óo*ÕÝx~/«Š”>.pÕ¾—[–DÞ.d_ŽÎ¶¤ƒuGÐ|æ8ýÝXl S¾~7‘vPy Ø{VHд㠽ª[µDÎü0fsSSþV½9ã’ÙmꤕwôiŒE7v~¨ÔO.=QÍ“‚œÃ¨˜v¨ë¹7Ðd¯8ÛËnykHo7á YEÇìcÈÐÃÎÜïûòJ°)»§‡> Kžæ-Äÿ1A»®3_è{O¹ fuiب 5Z9g³¬ÍWR*máT}%Ï/‰Áø8µ¤DxF”TðÛ¼(ƒñöu*kiÓw±Ë—?î›ÃÐT멲YúŠPZ©¨œØä±/ÈE/ï¾å÷Bc66¢\JÚ÷7%tÙ#$|,éSÒWF}°<9=7_t&íîåh’?Ò•¶Î‡CtiÿÖ?òçÿ'âû¯ÿAŒÌ, ÿ•‘á¿#ãÿË6OBƒå0=tèwÈ© ¶Dëp4­PPØbÍš`˜ ÝÅ•ZCâUË38ykñß’]pè~Õ0pr^wÃ<#P¶Ü½;žfÒÝ÷ªtÉCÎ3P2Z»zyØã'2CY*áÜ£-ÜLóß oú-A—JxÁè/Y¢ha¿Ëa5D$1¾*]²fì9¢˜£„ÿ&lEËUL<—„BiéÅbhf¦â–DN§†eÈ7Š=1E3î-Ê®¢€äçhÇz7˜¸¦È¥I'¶Ã®"†F© Ù3îH‘ÙâBÓº ZW²½•ŽcºïÔWfÁC'µÜ±.ª+uÜø•5Êl›kP‰³ùwŽ0lEwÕ>øéQpôÿ <ãÅ”ŒìÿÇbëž™íÿ"üÿ×À€ÏJÏÄÊþÃr·‡7öH ®ÊN“×ÜnÑK3S¢(ª*”F#†ú¾L#B*Z¿uqÿ:uÃz:5¢ub=p{ƒµu<pˆ•8øû!qð?ˆ `@Élí°x}‹ÝŠÌ›ÇÝ;ŸÝ>¾–ÎÖÚÛÚZN—Ù·lNµhmÞÅ"…a«ÐÝíŸ:xÁ:¢`š+Vž×ªjE1xÅÇù4/ÁÌ1ÅU÷‹T(^镸ø‹6pö¦ƒo–«;ž?;ÒàÜ`nËõ n3Ö ÏÉ00i: HWjƒñvf§´;í¶3eT°ÚÑó~ÖþWpÚ­¦Oó´¢ ‚|ût>&3&gn=Åï ǹÓ)BXt}†(74u|»¿šEbË!rbT cÌ‘:!}ÀD_HœáPü˜åŸ`~MFV"NºòíŠÎ¥Í¿ Ë{€s}¸¢u™Æ´îSL©Ô˜šî9Þ»ö¢Èî¹Þq ‰¥‡cœêúN°#™¸'¾ïwá5XûÁ½Q<¿hßwǦÎU¬Ç²Cé<¡nÿ¢§ žößi=‚¾c»2¶òä·ŒO“2è&· Ë­yŸ‡RÎ!ÎCúôÁš èFÓŽþBSã|²ÚáÕŽ7ÛK´ùÜ°ÜØÞ”nûÞ~%~uFG0G’*Y˜ô2><:º¼‘ µ!n" )óEóó ü3i^akEÖŠnÎeóÓºæiŸ‰½ß¢ÎUž©žq?‘Eÿà7º™ð»°q´ãñ…øEëÖîÍ Xî‡'.JnJuÄoCíÓ{Dy{t¸l«Ú"džµ›µ»¹?yk(ÀŽÜDŽ%×UV‚T¶\®ÉIˬɾÉýقδ›©Îls=e𥦥µñ‘Ç0Î/j¤¼ÁðŽ!ñGÌå«ù»ýe¶¼Ø¥ëƒÈw‡ ñÃP ·¨rãÑåÁŒ1²«î§ØWX=€²·*xõ"ì÷…ÞŒ±láìÍõ?PûÂÆê¼=„½Ë½Û‡§ÛóÏ i(ÒÙ=M:Âb£%ÆØC' 4Bˆž3<2sðïVkK9YµÜúÞ‚mKmgmóy –½Sï ÒÑÛïíûÍöÍø€“s"c!ÃQè*z&ƒœ0žŒLÚQºQñ)æÊS¨ôT¶TZ©ØUN^‚ÛWåõ»Ýý!NMQîõ§QY›>òNC%`)nG¦ ÄQL(Þ*ÞÊÞ¢EBÝ3ÈÞÞl‹l“œÜ9æ;ÙéÊê³hó/Lëøßò›|ðpÈþŽo=(>2Ä>éÍ%Æhs)0ö~JíÎî•¿ƒB•k<Àûê=Àp$¾ Gî†iøPuÀÙ~Ò9"tà¼<€Œ ž¿ÝQ± ùÍ‘´hÞÊü< þÅxXaêx§g‹I¾Kì·/­Ç•ÞÓc¾ácðÅí !uŒÝ†-`’nƒÞÁ£ Û9j.Ëm… ÇzM04F<Ò« £·Óͯ¤qäçlôa<£¤¸%¡ß7Ãî“ö ¿ˆ ¿ôññ> M6 ØQÙ‚qz¥K8ÉÃÂįÉ3MÇÀíŸ µå{>cÏïÛ‹õFÔÒľô¿yùû!2ЉjTÙ^·Ú€xà³ôC© iñýþ1íî¼ÏSRLg ;äêvÝý›eÿ~se_-÷Ú)Ù®{'ÑŒÎXœâ?é9 ˜þ°sïÃjáÿR- û@nÖœå†vó¶2Hh™À6,æH Â¨Ûi)ô'èißÁËyóUþëcÓòfá¿"h¹‰¹ŒëÌ06ö”¸f¾ÜÁÍ\w¬Š9®U?;ʹZƒo×ËUoŒ[Ž~=O6æèÕ¥ åˆÜù´b¢2ßõ5ÓÁä«=²G´3ܦSg3)sÛw,Ø–ôåJèj-{‹m¶æ¢ìŽžì çi4G EÎäà õµWK®ïò Õ-M‚¹Ëç«fñ½¶éK¶³ØÀÛ([˜¬Ím0êΟöZ—p k|æÇçèÝÔ½@¸/ÈWéäjÎe>q ªÄAæ|z>ô n°¿½-®È©ø­sç1àáø6¾•î dL‹§à—å©ÛËÚü‹C+×u"ñjJm±‚'°c±Æ+ºÔRÐŽÇáwŠz¤rIùƒì e 5#ZêŠØ\B¼·"ôŒ­àæ*3óûŨÝÊ›'zLä(óuàF)êù×AD\bë/Vÿ[H)Ï·ÃX¦Eá7Ö~—ƒêËà›Š0§ 6 ­_9ˆ5Vkë‹8<Ì(ð“çí8É¢TN.oD,9úwM®|™¿¿X*çhp?Pr½ôÏ.“¯È­á|ÙfÅÓNÖ—ƒïLꃎE®/'ôIa±Çùäõf“Nç Ø´ÅÇËLür›·ÝgFØÃÙFQp ÎÞG{ïµ v`㠜ϭˬû)Ö÷=¦C<+Ÿé-øÞÆì.áÁFJõžÿ¦ÛÆbO7Pâk?ãÇkоæÎø;#QSÊB´#ÞŽžê‘¥êŽŸˆy²ö±ÚÿdMFxr¹É½& ?…Ñ 5šC"Iã§(»ÐÇbŸTÕ/SÒ5°Ç_¹;_ÞÉ¿Àëþ›ûA_ÅÃNñéX싽XeaØ7ZÛ²äP¤]> I<ˆnàŒ€Â{•qÇ1Õ¾»Ä¬¤EP“@@‡Ö6“^Ÿ.NV|ûB# ÷ß0ÄÛš´¬b RÉý)¹Rnrß‚ö´i-rݽöA+orÇXÕ&!/–/í/Öœ]÷]hJmz|„˜T‚¢µ,wšF9k2ÆÌ¹®ÀúŒ{WOŸëÏh‘;w¬ªï؇í{ÎúփܠdöãÈʨ¿Nõ@%:#÷«>øG±r Ý.¦&6aAü¤.–ûµp¢V¼–ýÙv£×x=Ý´4×i+ªèTÆôFFÆ4ö¬}²ªf1Æj–433:~5ÆK *†FÁ‚ T \ Ò{eYÕjÄıÆXÒ±3"THÀA±Ï˜|²”ßµÁ‚2Ÿ³¾U½Ikr#Rªâ„bà¥KÇ+M#[¶½çˆ8w¦½èûuþiåÌQ8Y²Øq¨çÄUTšmˆŸ =|æÐÚ{ x0ï `Ô|¢ðÄÕÃý?LÜ"ópÄ §ÎÇO¯ú zpþÚá"B†õ¸Ÿƒ¥Ð+ÂF°b½¸þÂ̘• wEîÍw½¶Tñ4x'¥] Áǯð’D¤ZÔÆ˜Ub—Åì3’H'H±¯%Çe'ñ|”ZpR¶ª 6‹ž©g_æ,âßVx=ÿ"޾,D¤Þ9náÆ'~X#Jd#%õ&Ë•* Ô߬ò5£ïHàZd[åÚ •ò©oF„$PezÓ¸ÕÓ¤<_É5˜ÔÊ_ò=Åg*Z8%È0‡ȸ[ÅqT¡1V3¥%Eáá%msO·¹{WÛDjb%-5©),ˆE Zü¶Äªø›|že¸¥ê4©ÈI€Í£2BáÁÃð‡%€glQ`!¦›î`°+§<Ž ca0A1‹×õ­ÉYq :Gù›šñëùÛWVG|P!i_Å \ Û*¾/=i•äöa7*E§FzÇÊW,\:ñ÷ë¡LˆÀ•¬…ÜÉ|žÜ»ñŨiÝÖ /Zb5&ÈTa5@ª€#$® ª9Ñ},ªÒSÿ ÃÃbxꣻÂ.Ãñ§¤ýòºþõHŽY1zÉØÒ!N㜉HàkK.'k~}èRˆ°r¢&"š#›³bª Ù· 86¶Í";Žfð•±él-0e?²ÒöÛüÔLLuJQut•ùµ8õ®acúZüؼàbgøÚ7· (䬈òèý‹Á•ߎÀ&öS(Xäñ‰O¡–ïT\_VTDzA:{SÐýý©¸X”»Ê6ÿéÙs.̽ Næ<ê´bízaî<à[Xıò©‹Ø9¤Q)¿ï5!qbk‘òYx‹ ð¶àuÞãBˆ)@XÄš X8óá7#t87·HØÃ‹Y1?ïÅÏÄìRþÙ+ôì’lÚ ®ðÍsÂ4ùSÉâ.Òœ³(÷,i‹äåFõGþV¬É¢’µÒš^0sÙ6ƒ”ã9ì…-~˜DSE>Šl,ÏP¹^³œwŽaУKÿŒ mê‹è(ÀxŒÏaÁn-¡Ë»¥È -ª{izœÈ-A—l(Zª.%¦93 ™BÑE,[)™­7h[s.B«PöVI€¶„èzÁ­{¬äÉ¿OJ¡dÇ0jO€_Œ_êÍSÏŽräp é%òhúDÝÔ8×ñP÷sÈÕÆ·ñ5î˜Dìi³ž9«§ªùrcWG‡ztÜä÷óñà%š•Õñ³§¹*(´ôp±·zæ÷yø…‹ußÁÄßžézý~…ÉéŸ}2gŒW‚¥¡¢â:Æ„Q¸|UA\Ð/)ã·)…ÿ¦Ñèi…GTñx÷Êô©Øýo[¦ R# Þy5)¹DP#¿ì“ÂÀ²RPNb’,DMÖÈœ¡*D1|FTHR¼¢²;Jé”j:ÍH>9#¨$ZàâEV,E`­_+ úÒ?’{.–ƒ,g,;iôýbu0æät1Ðõ–¥ÚîÜÅpcã Û–c,»+¿P2ᆰÓÒ~°.:ÄT&³XÑëyel^Ýí yô¹]-ìEè“á‡ö‡¡G°o­á­ 9Z~ ÓÚ@¾¸å„ØmÈ]‰`ÜŸ1Þëf$ Ø¿Îåt®¹Õt{Ôÿ<6«AÃá Íf*Ëû ¾ËƒŒ=– šÊ2€ĨJˆÅøàú<´ÀÄÚ™”µ14_|ŒŠ|qRSDÈ)1†–8¶^`e^ $àAon~3HÜ+cÑ,L·xÅ‚F¼{8‹\AöP{}ó8+`4EbIS’³ê(™E’ zBk⸿à '€’»2xp@?Q"LN1XÀÁ˜Àg¶÷öÖ²°¦&1^'&fÒ¤‘è„–¤€¤«ZAãâ̇Ôë"S†ÝXŸØ?Y:ºüÁ¾XQOOßAɉDöŒÒ|ÐãÈm8xoSÅHüF§@z…ô#èé™¶Çš·$X>FפÅ|ŒY]ép?µçFs¢t•Ð-B×Îc°0›:"+ʈðå!Ÿ86¢kبPp‹3ºŒœ he°!Ã;Ó>’|c{h¥UYoDgD0èe 3‹dAø&›£¥…Æ©÷²Ò*ËÂnΙ²o ‹Í0ëPÖ³ ¤ÕuVÚ^SS÷‘±rÄêVB‚VÉœ¼©X­ äaï­ÒC[DÑ Ã^…q¿ÒØ\õ"<¹r`_>˜B½ðùY&yoÛ¬%Á$þ}ƒ Ùëã’=9p6ÛœG N¬bº1ØLG7‹6>…LväA3íQ Y*YJÅ'—·îRp„t„æö©›'569Sô?-ÍC- tDàŽÔa¸kDZÌeš›”7²OÌ5,w”o”Oágø¥á³üMnÑšR"Í$ääµÿfÑü\]Ü$õnJ,Ø,ßۘVi6 :R€b«xuN,8fÊû.«þ‚,6\$ïíUW+BÔ1©¯ªwþhädzp’<̘9ç@€ÚCŸ”Lj†¸ ˜b’¤*É+ÑYt`‰¾4=Š\„9¿=ÉÅ2 ³£v|‹Í÷Š(úmç3¨s»O rFÇotÎNËKÕ ¯ë¯áñ§­ÁALŽ«Åò½k•÷»‡n“ɹ€™ód¿hê@í$Nš•°eéœqXB‰&OIc¶ßÌøk$Æ=È]Ã5ÊÏÿ+ó½öáW˜]¸Ú…Ò)‡£VÀz|CŒ–TÔ)[Qù8[–61>+(3‡¤áœòTó mÍŽ_­Hæu ¥ãÆ<ËÝéÖ×Yïöë|a»öóás-§.2é¥Ï¥—·6ÏñWâ;éJüÇì\oÖï˜ê^ÐÌÌ•äÖvN÷zûÚSÄ›ã-õ]õè!(AYH¦^Ò}{ Sº n¾’=4Ø (Wþy3ØFÜ™àqâ±’›-­åDØŒ–ÊÔuóÖ¹K[¶P¡ aòÄÐÁÅEJU‰H¢l!›ñ=^; Žœt˜tJn¥æÃBÁ3yxÔ¼$þÚ>ÒÚ¶¢ß¬?òt±æRZÑ!ÇÞ0DxT1½ =”™û3¡Wc Ì^$U€<¢ˆ“25I1ª~Ü[j6‡VÄ#‘2Çüúý_>¨€Ð0»¹™ó“]ÇÏ6ÒÅḬ̀ÓÔlVƒîHÞa‘;sáõ½¨gÓRR ÁØóÞ•œu'­=ü¹˜À—§Ýþ½d¥i,ÔŽÔ—.îãÙ¸ oèö‰‘W3 ôZ©oÜw±hý¹fÌ„â°æ8û*Ûvy»†3fˆÈb‹DW%„Öà­y#›zdîG*L&Ìf”6Zûšk1kU›y[{­£ÞÁFöféÎé'égé)ï[ÏÄÙÂÁ Ød¼h‘,©RK‹f5©¯µËÆ·jråQÔêu›ðJâÔÍ슥2Ê·ÚÑK·PYtqBìÈ9®©²“ö£È£ÖÿêQ{:iü(›0I AO÷ãªÃ÷”š¼f¢ þwEÎïåÃ8¸~ð%à¿ZY… ! “>Æç;V>2¦O‘tPm•€™²j?I”¶7«+ѽ?}Oæ>êÃçÏÇâ+¯«[ŸóÍ +Â*)7Ÿ£ê´\­=¯‘û_<?‹±m^¸3¾§í¬9¾zêExÌÚKz„¡9]—!ҔʗÐÜ*\7§™¨;ÍIªrÑ–zî k͸Ù}ÔYÂ%Õ XÁ*i±‰[BËœXê¨ú„H|\ú¸ê"væ»|ü?®RÂ³Š“Y1–¹ËS)JÔªÅé|*‹}Ôsa¾<ñHO ¸•)^<˜ÔRƒ>GÿÒçoU„¨jeca‚Cµ–bwÿž%0¦Jrš|e%†{ÓGøüýÈДìlï8Ê»@Qi•´λWîj‹~T yLí§èÔ&bšµ‚«Ç¶ü-—6´Þ¨" s›–]‘H‘IØŠ"S…¦íΙ²ULÄJð®(¨¨˜›¨9õ‚.y·¦SÒÎ%űXDsIÐeó âų°‹„^îèÒâ¸+I”•Žòs¯áP}ù9 êÍâ³Â.‹†vÍmrG ‡*ñá:P`é>ö5Â6ø÷ë‚góÌâ¨y;dõ,SƘ¥ „d¶ó—ÁÜÍÞòÐ4?\½ðArÁ=sêq|FLŒ¨{ÇcõàhQ{áü¹$×bA×ô݀ĜMÌŸ\ë Y,©ÄJ5–J@Ôภû:ȪͳV?“ÕBöÄë NÞäÒŠpæcÔ€YÉ¿‡‹" ÁeúrMçÈL)”-„;/ü _N*E,ÒýïCl;qkKsBÉÍ*RIþaåbãWžqCh]m¤Ì"r…¤NüC³H]ëê€|Ã’¥èÍÌò‰ˆ_2+« áa"tzt\\ /-÷ü¼ÍQrûº :Hn@óÑ9t©˜ÄÀ·¸×K gáÕ ÉÂb*ŽƒgEK·+IMôÉQö˜x+‚UþêŠ?K¿Ÿ\D’*"‡,¬Ì̪7š®e7ÌàÛù–Ê6æÍj×ör"øO}¨¶ìF/,¿ÃVyµê‹˜‰Ö̉8{/Nݨ5KÄç74BhË,G_ŠQg4všÎK&¦k0hmk3[ÝÜÝ7™™ÊN¥½ñº¼©àÙݧ+XgëÊ¡©¥^g>@pÍíŠA% U­LAÎËT·Â1ÛQ+yÎL>Ë#‚3ÚN&1!!ò€'`0àÞÃÖÄÌd0 úBÔHý¦AŒWšñkD'ˆ`'è–ù,Œ(E¨RßpâÓã€Tˆõýb-²nijºÀ&g|ù䔎~ñ40Ó»«óá…´=½†Å@Õ Xé"`õ¥­‘,MËØl À\“ãSÒjÁA÷r%&à9GþÈäQŠý÷03<› ;é¿K@§`4pÀ®©¹«–Rµ‰^-õ›Ýœöõ Ý ÷ø Þ‡yñ 3/£jíjÒxIq2J§}êË5§ª ,óÄ-©œ2D›T2—Ô(ÅáÈ3©üiÈÊ®¼bê” ºŸòï]ùêæºðîO‰¯}-e£ÀÞü`KÅË2ªAZwL1'ˆ¶çO?ƒŠOŠU‹&ß$Áu‡çÖAâgíÁ ²XT‚x:UcŸVÌêE*†ž<ÝœãÔÂ<,À L˱²O–Ä…"ñïhM0&Œ%¥•6æ«Á1ùBp0ؘìãó&SÀH¤¦Q+”퀶}TI÷c†,š…VA~̨–á&[‘¿¡äQΪàRYÁÈô‚°HX£ÀRâê xD¤5øá›¢ÁÙHïJ å~:eK”RÚ…ªiKq— <¼Õ|…dBŸ–(öœnÅö`’íÈÐfòìY"L³¨Ð(=Ü™SñÓúJ€v¾œïçjèéè¸/²éˆ¥³ˆ¢å¥`×0ù ZDåJÈ<íçdK%üøØýhÐhXf.¼™üÈj5’Ð(jƒZ-A”*|Ïsããô,˜Ñþªâð÷Äûü`ñÿ–FAÍùàÛ’é³y1Àél) ;Ipb¦I”H• )Å_Ó3Ó³I6|'צû…ÉõÃUþµM^æC‡V†!+§¯¬HØhVïRt]Æu=_ê#ïyÌšØäy7¶’¢¹ÎäXÄy2ù9›xš2ætÍ'o¿j2¡"»å¢®pGéÚ=!HfpÜMfꯖÄ5ì\NA™Á¾±E°c³^e4u0¹Í¾¥ …'ÿq,ð€¡…J’½Œc‡Ò^¶´0/R^4¯Eª¢%¶§4ÀW ÷‡0wu’ÏÔÞ‰“­¹Óæ6ß‹–x¹v(uK¯Ï×)”—¨wn‡×C*ˆÎs¦äLš†TÐѽKCÖa#à ÞÎS`ý(ëñ6äÍzÒœüÊX¶8e¸L—[š$Sª¬g$²^Î5†ÍpCXޤŒ¢Ò1‹Ši aõ«ú¿­7ðv0/«ûÓJt>«Ü`[¶PA@…¨ ²2ùIAÎI´Ì^ÈÖHêX÷²»txFyH]’¾UÆ­ü-£;:}>þ„HAusƒ™­hs4HBô]¥bZÖ²ªÛ;LÇ"H1Î>`,¥Â£Ø$I1¯®4)×`¯Ü‚¶<"E¨S ß‹+Ò±ðÅ‚2°l©L“Fîë3éP®®¤Nv²ä¶Ì—ÌW 4=Û¤0)ïw'ÓTB_ŠTÆЍô7‚?f‚B¬]0Z—”Cæ èe 9·$DßÀH‹«¦FòËe³LžâÄ”¡Â"/ƒ“¯°V„“¬ãäô=¼:…ÙayVèÞQ»Ì‰×ަ¯Ð£1N¥Vçy½v8êÑbJÙÙh8]ÈØÉò´¯ØJÎy{ëîâEè‡ÕÚò´µò‘a¶Ír70,o»­‚€ù2x§Š³iåץůœ“ IlD—Ä&±8:)µ¡ªYÝËÎØÂ,lW÷½[nªMÃÖӢqÌ F+¨ÚNonêR-ÕÒá‘ôr(Ž¡U2§š1Ó)]ËÐ3ë·–Èà¶q––4äGÐ*Î[‘rÒ\^¸(åHâ0xH)´‘ß—@V‡3Œ¶P®©øvÏÆý LÄ€íÙ¶žË_ TPÃ+tISå`ý+…ëñwob¤“¥UY“a`Ñ»º×b„TÔ*«ä5¹PÝ—B^#'T¬Ê<»úQ{à3\0Øì›°å”.õîѲ)–p™,R§ÖÓ+ù JyÇw^ïÿÇÛ †aX]›NÊL¡©N¡¡& €Šdóó™²Ò5Yé¯aHÏ—Œ7C%óA=Ñ6¨ì9sŸù ÿ®¼VÐ6xWÌ{ÌíƒþË^ÒÔ\ ª¨'¡—¶à³]î³Ìs‰ñb+u7Ž.½.Üœ-Z÷l©ÄWãÁRüê›5ruߘ»ü“ùÕ'¹îCTÝD ó ¬féàëâëdèt‰ztT°ó××óÜ9§‹ï‹Ý¯o‹ÄÞÖŸÑo»¡Mõ©Cçþ}ëe á«ëܳŸfEj?ž$ÀJñýNFžTS#îZùU…DñÙÆÑØl4q…Mc¼÷K2BÆžUÅx °o1=œAjtÙ¡“áY¥Äe”þI=ÊN‘•°î„ïpÕÐ\æ…ºù&ƒm6 ¸„L“„¯"©¯ì5…ž˜‡[†É¢¥ÆA©ŒN³ùelÃŽ ‰åÌ$F˜1ê‹‘‡›Ø@æ?*/ÑdþjBSàŸøƒò—éNˆñÏè4 É’.¸­â’®¶><ÒænÑ»àSú}ú[$¬ p1µÓõ0ǘ¬MéÍß`š÷÷)Úã®®ƒ®™©ïO<“ÿæÅ§‘k2ÔNŽÝ—¾ý¼lTÃIÛ1…ÙA bj1`I¤¦õØôÊfS4RñXFö^K3†ÿú%Yý˜2¤bUŸ&„!ü<.·!U±°å‘ZêDƒþ±ñ?'E 1TÏYº5£‹Rô=.O?Æ¡ÿz>lvaFÎ_­®÷œÚ3Œÿ?SÚz )gN35 ²Ðé‘ûkg$l (Ñ:Î6§duŸ5+:íŽçÅü pиšõÓS·‹ç 5æçã™@‘¹›Ü 6Ú[I¶¤Œ«^§ß¬¹¾£Ù&)œ³–4Uh¿% ÆÈþ!hp‡? L  …ƒ^ÒSbóMÒ¥ÃôS7—§“"F%NáRŠð|åo XåäHA†ÔQ\ D”BšE³b Úü’ Bž*þC¥ a;¹¦³’†á¬Ífj{Ñýè! !~ðRÂ%’HrÓ×÷K$EÌÒRÞŸdÉK%Ê!brè—|†éÚk Kv•¾º~U»(ê`¶QÈ{ùOq'é©~@xÆ÷üšÏ˜úÂJ²ˆõeö¨ ‡x°¥¦ª6‹Ëb~:þC`¡ó g¥#Š`Ý€s2ÀF½$ÅŠÎà,›D«¨MÒËRãÈôÂ"#™o÷%ã¥RÚŠá™ ¡fqÛÁDMé÷¤l Öµb€Y·s$ß*á‡o`³d¦Cê%Õ{´3ŠUÿu5@Q¶,)h«Ñfwí(™M‚Ù,MÝ)í´Eð}ü,\i¸’…gõÌè2I¨‹‰çæ<\ëâSËèÈÊØi¢”³ÀЉ@ØlyÏ•š÷Ö¶u‚5Ñî…º!SQ*–m“g#ÓNäå_‹ö‰ô ü 9*ä ~PT*dj>X}ÇéóM¡­M¡ÜªE¬dsÓ-S/š´ôtjni=ËKޢ㤙€¶^«á4‘\ekº; ßÚxµñÂvè¦Yö¨¹nwƒ‡wüìÖ+¬4g³ï¯v¡µbÛM‘0âüÏÛ»,–€To”È®˜ÐøÒ"G¢ ’ÇçúÎjÏâÉq™ ,›á­…8æhŠøjÇ×cíÝó@œ3ý÷f<(x¸«rQöìȵHýbؤ’*ÃW߸²1™„åçŽáŒãYk…þ‹=¨[ÑñŽç(£‹š× Bú×V RPÕ LS³››W5 RDFT¦XÖe åõé –ÇÕ„Mµ\µêï5VxÝ7iVz¿A8%¡TßÚíÉæ¹jÚòøp>ÁhyK­O';y³ÜNs`|¿cÓb^·LÈ´>Ïû'ûR¦½ ئÏëŽç žÝçÿSã®W˜vÄmÒ6™q6euÜ`þBAÉe :èÕÝ‹ì‹ïáj Qì³L­Î/§‘€ÎëLwázð>dNåXðÄž¡V" ¼Ç–ÚýIÇÊàÎìm©®Ï‚äw;öÈÈš@ö€mý4é‰çŽ`ÆC™ûLtê˜),Ò„‹!hÿžÙË7Ü¡'+*l©{¨då{ýZ,©ôo!ªS ÜÕ ù!Íç|T[{ÍITÕrawÅš‚Ü’cqú뎘Ù)±í B£ú…”–gF›’^ÄZHu5 A†ÄãÕ( GÍ•¡€RN–¬ö€+òi[Q­X0Lli`ѽÓS®‹K0MÇîÿÏ/z"Úr¡ŸÂæ2+qd£Œ%lV%F²X =åfõvNŸÕbátZ&Ù\5 ¤muo`Tôì÷ÆsÅw¿Q[<Ì‚¬Áñ¶ÜmºÎD©íþÈï0­Eëý½w§Ó•¨6SºÀét¶wM Ù=YÍuY<„há• ««ñ®©3¾‘«åzw²j/¨€QLzT i…Se¥D%kI{#G¬„]áÄ®1¨ªF&ìn£tZIà ëFû઴¢¯ÀA¬âªÕ—²î ijì#¤‘#JÛÎ/“’Nú™¼sÐ({×£Ct S›3[jè)õ§-s°((¥Û´nŒ3¶äB¢µ$îJKå°UíT—jŸa­åxIÏ<^æZ¾«âÐq`ÃSÃS Z‘C¿áÞ½ˆ”šHÍV`}Åä0FŽNLà2ÝD„y.iØÙÓ})ÁÁŸÈÀ57ýG¸$ y„ ©8Àp®Žu“¯®à&aé…[€oó…¥à`{·M.‚ž`èÇ"$ãK4}EÄn)DÒ“C‡±_âçâmTªRR¸X€NA…ðłÆ¤¬æ˜™š ‹…¤kŸ²ƒØiŸˆ¬¯ØúùíM+Eâ¶Ú“^£L‰x@!¢æäiU† {¾Í7h9CNó€÷ PÀæ°ªËþd+n¥æ’ÔtÂ|pEîr–1a–í¾Š#®NÐr®š‘ì{N[°!CÍeÜŒ¦NÍ)D:šXÅRñ¿÷¢< X"ΖªuUCRù(«lp`bnÊ `d+ íÌdX)H ú‡sµÞn5ýãó{q¬éñeÿ ‹÷DÎGãtЬ×P 9VÐ[e2—>¬t4ÞlMƒ¶õ%Žªƒé¦‰YÌ4UƒÉðK†\m_}¿U<(Vçõ$³Yíä§aC°Æ·#W®9%·IütP“}jÜíÀúÛþÝÊSn¦áT§±HÈÚ–ñX X¬UaÃk§› Ó,´±]Îפ;5œ²Öãí^X¡˜·ûA|‹ýWZÎI0Û€-xúýËꀸfôÊ÷ÿ°•¸e¸ÑùÂò@QZµlë¼ðEpf“Í¢×Ìí÷ÍwçŽÛô$(æôŒXß4G©‡5¬ Æ Õ[5'Qw›pèS#.KÑ;ª—ZŸE(0ÃÉWž6(Õ”õ¡Ì¡¢®ÚÔD~þÑÇÎñ÷T£kTØóMÊsÇKCµÐ#Iª|PˆK¤äÇE¨ÛD+]„S˜ç•øë«’´Xƒ à‹rˆ#)NÇ¡/¡Ì}k¬}k–3ìyŸ.³G×@®‘\¦™gì¬!ðÜ@ˆ3´–o¨/”š£üùKµ}››~®xý™5Ŷòoq ïsN™u}·fTéöù}€xÝyÔXM’V­ QÐfht^]SÁýŒééfft“a´éÌcF‹B%L™®NÛð> òåþîúé6 J¯ yèw>MŠy„Ê"ó[,꜠,TOïg*Ã6]F>Q;‹$Î ´†±;['ôO¿25$$ _bž9‚1xU—Pñv*Ãqèõ ¢ÁWU3‚6ˆ I–°Ÿ:ñÛj°WaÞ* ’f´ú`z±ºHŒ9<«ÞÅÃMõ à=€8ߣç5@€!Í žþÒ1èùE(³’˜0A¨†ùd;f(fÌ(ש´2@tòD¼ñ·)ö¡ž¾Ýqø7âŸÍÑS˜ñ1eP@›ÖÔ©XŸ‰5u5%i¤dãÌ &»,o&M!O-K%½U*C)«O²ZžY¥YõMð©vQUeNãJ!J*ÆOZû9ŠE5«sÊuò{U8·4#Ý$çÊãÈ÷(VEUÖ0ødØå˜½`ÛèöÙéþ³]z àºjá‡,SEãïÌϵ1k\Ã)ŠÝ¥ÝGw‚#‹Ð%ÂÍ|ÊÊÓSn5ÛlS%œ[~ÿ·ÓìןP~nÍÓ1.o"nÚ^$¡‹%¼Aâ~Xl…R‘¥^ׯî±QÜ=I:Ë€¿’á*‘‚˜~Ä\å¦ý‡^cên¿.âlÃË gk¯¦šÝ;½ÀlfdˆÊj°Ìg —ʆɪ߲¾×Í/Œ‰Ê’µ‰ư‹åÈEwu£ä&ˤ‰ õ ¦¼šG/Eµ)·ÒŒt™¥)§ ºK=!ØÖÛ…=2Ôø˜å§z@œÎsœ ·Ûê,dD”1âÈà_•ËEBV”£‚NñëŠvC»:6e, Šî5dÇC¡c·{»øh”;<$iT¶Üø» Š’áœÛJ9’¯‘ÚxÚ}eéj§ºÖ¬Û\”à‹ÇëÈü¸1£eNl½¥tÌØÊ:ðå¤ì –¥®ŒLM®ð.Z- ±'RPD0_&ºT4{2<"ñÀ2JVQ<:µòtÍÄŸ›S•ˆ³ƒ$ïáa¶nµ|$ä[uCËá³8 ˜{°†ÖŸMÍrÇCó ’8C\^oÕ—â„ÁòJÓåY‹·ÖœJ–óhçmGë,§RÿMLF=D©î¥î®ãÍt* ZSc&êQU-V¯Á(Ûþ=€wv.q@WãÓ¨ê/•Å_4·{'ç!ï ó­‘¶D™Yq]›VÃàd¢*+Tu’*gZº@hakd†h™l6«/ ×ë"^£³Ï»¼ë¥~Jé^ÖŸg¤¯Ž¬h,ꦜ”W¹ÒV1w–½‚e(T¨³ºÙ£þ—k÷|èÆ•1ê`— nµ~q•¾qµ>qM½*imbK²ΪnÕÊOLtFO¾Ëç}±½8;;}ä1É>8s¾ŒžfŒÒâeמwß•ÈWT,ÄšíŠ,ÞîéhÔKLÕKŒZ赡ô1¶ëÂÉ)=Œu®NzÓYe͉cÿtÊ'¡MóŒüÒŸáÙõÙwÎË­„D~£qc¯Ã?qp£\µ`›UnËg%'©'„Ί¯ãVµ ‹È¡ƒ~‡ùõRþl€Y¼š÷«þ’"t~ÀÀ%À¤DËÅå2ýà¿§Wy—:G_W0§òõÆá]»žTxp³,c‚EOŒ>-zvÝÜ7Ò÷|¹&gÏpŸg¤‡Vk'juDX±Q¨Âse®_°Rímá½’jºYºŽÓ­áªÝ‹ÀÂunOÉ‘ç"™ñ5Íñf+îfÂ5H„åH1U‡+ž“ýÅk#òÂmuPÉ&a‰+ qÛ-GcØs캣u–°Ÿ» ä4~ˇ ™°c_3ï°3¬K\¸*üµtÓüÍLþz }—¤Žÿè”çZG±»’Pu® õí¹y¥‡t}ãñÇÞt'ü*Ó «÷*È>“Z.$kp&nÞov›Ôn¾Ðn!î¡æ46Þ_2D¸³‰wúvß„ŽO/⬵ÆÇr—é÷ÊTÙ1ëçD%†±VD“š&H ð\¥Ö ’Ë“WðÅׇô×8HŽùíiŠǼE‚è†n|?$K-oVF«h²pÔ"ÕçŽÜ 5¶ËÖÿ©SÆB{ M–Ÿ­bZwp|:žŸ0·Ñ«g†˜Ò— dŠ33â½Ä½czÃtpÛw4W˜:sõª|š~J|oœÃ-ÔëPìßï!{áƒÿâz0wsý~Þlë-ž4²/ÉP;¸Íê¬e[ÌêÀ÷’WÕ¨ Õ¡má*ÕƒŠâÓÏ¢CsâÚÑ')c׬ƒ6ñ~#'„W²>€3pZ8©Vª±”–SÀÉœQØúé°®ŒùðÒ1`Ä µ„Üzno©'l7´‡v;ثſAÁ¨ƒêüÚ­ÉË39uf¾eʰºÄ Í¢7ç lb6ò)k;ý뚯¦è‰|H8r6ˆ¿%WuE0®ê`¼ú{6¡€‰6NJ>jß\žÐaHMÔ’VÝù±EA^PG¨@êœÅƒ^,°Æ]û _.~$Gž•xñˆ6MÓ-×c$'$²vZ@6RO÷D\—t¸Œ´¦BI{…*ÉÖ\¬üüRx;1¦§šüKÃW0à¿kÿ]Nõ Íð2‹ô˜ØGë^yU"t¥û¡Hàëó1sË>Ü!›­÷d¡ëé~=u9¶ |˜.¥ûË¿z«ë¶%ätš¡ññtdóý­áªQ¡34u?€¼%N¼1oÍ#ð5[ o³V°EM=-׈†E¦IqÆv’P¥ÒX[çЪrjE¥IjV5ÛúfU­‹1Xg ÕÝBehÚ+ÜøB\i‚Sv¨ŠZÕ).‰ÓH6n#Û¸™e± bc^äçôÔ#úJI=ä¯bEtͱ Ñk”T{¸¤+=®T M£œ‹·¬ê+¬­.R!a¼š ¿JW<˜\u£Î—×Ììsξ2N;™b¥ÝêŒÖjÕæè“.v®LM.$$j\æ0ÐÇ÷OÏ-ökÏ \üãÑøy€l”m|Gi}úFf·Zr¡ ±¼h<¡l°íÍ€jžFVL<¸ ¦’bmmÐÆšj¡I,×i2l?·*Ž@eS­G’úŽÑ±±¾Ö^€ Ñf†j$xàèê4WðÛ»èkü¢º–ã¿Î‚kiU$€Ó7Ùžïi¾YÖø~#‡2«ý^ðm;Gi¨x\ƒ»/ÊçÜþCcƒìË3tœßûl(NEôuÌ^W[J¿_\]¡–|?scø¢ä¿õ)0„ÛŒ1Ð+§9¸ æfk¦^]j]î³Îå\X'JCt`Q9¥Ó#M\´r¡$©“Ÿ¹VÝ_~åʱÀ†=%æ&ŽY:1ÐN–‰‰fÜN¶Š9Š)NHåðTmA£|~~_ÈmÀ ô Æ]ª¾Ãð?ç òùA2>YrÆÚÖõcÚvYTõÊ,(J)/é,‰ËÒÜðƒsÒ¯õ´ÈY¾—ó]~!"F†¹ Òµf^A'*ó’Æ¿d¡éùžþÇ×.§,ÓÃpÊWà˜»Éæ×¨î’©7’æºtæpç)å)äûÑò uw­ËŽíŒÞ c6ݬ~œ£¹‹¤¥ÛmŒ@ðz:›Ñ)B’C˜®¹-¯èª ’ß²;㺫æ ·–UØr¥fw{«»ê}3]z’HË•ñãþ%/JK;ObÖ™q'ÂnÍ 6yi¬Wé0ÚÛÀ 2—mòÞý¥8pIûƒ|~,GÉŠÏ·‹¿—èBRvÎS„? ßn®ÿÈ… ¡eYpùEYB]s¨V©Æ¡™V^G¯gð‘z»5ú5#w‘ËØ[é¾ò¡ô€sÆuˆp‘÷Í—ãÊõú@2vëØžÍ«Pâp)œS·Ÿy·yºMP!ŒF Û&u§ˆÉŽ >“zPÓm˜Y°0ÎêCÅr'gï!χ|‹CÛ½Ô²%¢=çà1|PDZËÐGŠÿUSUƒU£UÓíXeLõSÃÈ–hl¡|š‘xåñ-7L”j²eÙ˜`MÂ5u óÈû¥€¤¶Ïá}FÁ—XDƒ"—€ ÚZû›.õlâÂÝ%¤·Ü]@àæýH¾¥oVA_pyæ–seEÛ¾kgm¸(î÷hø0j6 Î@P¢‚iꇺ/^šàåøA䓊û9ù™³~†çA·örÎwvØ’V{îl42†àMú{úVç÷U.- HÖnúfüfÚ#á“V™Ú Ãb(†vDê¢m!G.þ}ƒw`±ÏHЕ„Mh|§${ìBŒ+ûv0–]/Rnnì£=a5zî°Œ½E“Ü5B¢f²&Qí&W­º°ÍÓåsò(Ö‹ËlKl]ƒŒ£~‚áØ}h€£<‰M+Ìå¼bIddð ÿ•üꧦ½…vìű8m(‰]ÐoóÄ"­®ßÐ’ík‹‹ùúGô¶0¥*w(KÞêÍÿUïÅ73Ã4¢âx\€ãcŠ˜jË7,¤È!&”êv oñÄvy–‹þB \ Øæ×«Âšã|6/È8§„áX¾q0%€Š¯kbÙÌ­fw§—ˆ“Xûù ‡Hu¸èåÕ£K§ìªöŒ°eÑ‘TÏ:רÈÎ.DN„‰sháJ”箳¬cEðYDŽn7uë`TZÝ=‹W~§¬¦òÉÁm­aô&ɾ®Ó*/•×ëŸá ÐymÈBåm®ÏUÆÂµÖ\´é’8SÑÑ”êTW7þÁ(ÁT¢5ÄTbõˆ¨ zÓ0,<`ðúä ìÀøhª iÀ5ð™>ñÛéá1'0-»C8Aëè²K5VbÖWÏÛÑQ%§ŽŽŠÎU…S¥lÎc¡IóÖŠ•D6¦Ò‡š…TܤäR²ðc†}Õ&òB ü„Ö¸ÅÓœ&±+Š˜ …)Xwt˜É„'ü‚€_R}±OŸ%&”Tâ(£QTŠRÞ~‡ãǦš_ß-—Q•r}éq«·™¯~úÃ볨Ó] \Ž×Eª§SIe aÆHkÈe&j]žVî—§ÒÒïM\ª÷»–·Èa.K[5e2f˜V•<í9‡2ü j:Ê%sho‰Öæ­®Šê‰³¨©Å&`IÀ=ËôlÍÚKÕß§•YΟöÇ¥x©ýi,Š+WY0'I÷³Z@ZnT›D÷äÁG²„€? oa¥+!J0s|} c}£î³IG4…V.ÃÄú:6©Öì",_’’„‘GJ31wQÈBEû:QêiYím7 ÇÓÚ­úsÙY™pï•ë‡rB¸×‹ÓÎVŸºÒ”ÏSîYÞ-nî©=X ‰[¬nÓÑdÆIT«Ýº÷] \o‰Â2€ÃwSW_ûíi‹ VäÏN·ÓÇ_3ÈâǢA†kz„†CÌlÑÓΤ_„_ŽšõIêõñ]¯þWÁYX¯Þ½eÙ†âþS%ÖîEïÌÜðʤ?ˆP¥ÿ<4W`-ÿ¶93}mûNìòî¦()#S ‰ $TSÜxDÖcJˆkcçè°èœûEEš£Ð_¯Ñ W;õÛ“áÊ@«W˜<«º+¬ŸÑþçª ³YuÓawWŠÞž®‡ ¼¾(¢À}šlñ#;M¯A)±X¤hJ17œiCÏc¹È½VîROí/ö9›¡¡ÊœwV6-›p¢UT5ËìÖÆ`u´uhªFÈÖ±—ôeHÞ…±÷é–nÑyzRþ!sÌnF$©ÐuD_©G"‹ê“7³Éú°lf±…¢]í@îu@(å(ÿ4… \ô5–[÷­IÒngþ‰ÚŒbè ª©%4Ø•ŽIûïD‰d,©Çðåuñ¾éÞælŸýôá^n¼„Ì€é~»«ÁéwïnÓ9òͺÒíkr‘ͱ^{S¿yßîlÙ#ÔŽ©ñy`U` ¯EÊ‹€>K1زé#eZÁËT›ŠxZ+ˆFhÄFpôÌú³÷ab5¨5—/ƒP¦hÎ8úÇ-“ë÷‡½ìß8»ç™Ëë—ˆÖ(W¢dëÎá ÁôãU=€*”-¤«wÇù„Ò ½×"­-œ`::-;/g&BViRX1jY^xJß$6 ×¾ÛäÆk»l4/fä×±HQÆ6r)IȪî›0fH2Ñc𬣲†Í HjÚÆa{ºrMËÏ uz¿Bu€=„"¼–>¹1êeå @£ÉRÀ6¾|U‡ÀŠù6%PV­ˆGp®hs#Êg& 7iáÀ7ÆRó9þÆÊ-\¹‹Ì€mw+KmÊrÎ/'ÞD[²#—EÀ¡2# ìÀßWqR¼&êÓ«0VÂþf^eVÀ²W‚ÄfÔî ?U‡Pb>®ªlÒ¥1¬$^.>—ìÆtcýiÛ9ð@ЛÖøîC(+U{¥Ì@ºzG.‰ú,š³v>”)œ)´Ð*ê¨ãX G= ªyút¯'¾'ÛT"ø{«·¾ÛD$,}÷Û£û;ÿ^‚%à û¡|¨| Sú#ÏSYg !)Œ0;˜.2dåÙØ*R«Ô8ÓÊ\/ÞœÚ9Ý:Þ@î¡vÐp&:^/o_®c˜Àd²¸cõÍL[0_š3£F˜7D>í¼> ²Ÿ£É®ÇIkí!Ûü<@µáÊ ¾‡ÜžG#CZ¦hQJC!F°ËÃ$¨‹ÖPÈW±Èf-™P-‹B6o‚ßb”Uõ_8“¤- Ô-raÝ Ùß½Á×OHçºßEi&EœðC? —£ê;ã­‚?_Š +iO£¢Žº z(…¡Æ™ÁáŠáDÐk 4Dä:'AÅuÞ·éÔuì:6ÔvA-Í:\sP§¦‡‰¦©ŽÑÇŽƒŠkÑY.Ñ(P‹Š…Sûþå›zQ×ðFòÜsÄ•ƒ$G‰šBklºÃŽŠ©(G(ÆÓ'x úº ¦¨«1RHØgV—tm—‘É`´x¬úlЬèw érÉuÈ=™EpÐ4±ØJkhWñ$î/!t(1ˆxåfþ‹äŽ“ ý+àì‘S«+¶$.ÀÔÏ Ÿ\8+N æà•F-®Iˆ;OÈv† w2NˆÍËm1!C¸À¬,•£°¸(¢|Ÿ„_)^åýÀÓÀ‚v/f4÷ýH4›Ö‚£½G+—ëV†Ÿ:ܯ¾-‹Ø;ã>ö½ÉRÚÅ:øÞåÎ{Xâ²ÂeE4Àñnïg}àU#QHÎ/¸úö’6Hµ gƒ‡núä¢.  Z§›ý% £±úÖùʼnęÄéåêèò®”,elÔ!¨†´}´Uörclë²{ó ò‘2D‚4ò´*ž¹•GCGO%_§†NE ´KSáày¿å¸RøfplTE¨gýÅ1ôEƒ&„Åa2`ÌüÛßIÆËÔÌç¹ï¤Gj;Šƒ{˜6‡“ ,…ÿPÈ=Ð)(z—z׉L’cAbaœ)F›ò*‹¯æ>eæ© ›Ùè6 )¦M˜©ò¾û‘î³Gêè/4-¤6¢ÖÍß~ÑÜüñ‰äÍ ,¯¬ÌL†RNâ,~{'2t}Ô>ìþ˜¤©²íuÔÿÜU^Ç‘¢Ód|¾Þåbå‹ã9$ƒyÝeí½˜¯öÔÈ`›£ÊÝqºb!¡Þ,D8í»Bq+$.š•’L‘Ǩ@¬Üê ¦MvOLg£dˆ×a|±`H¼Q€"á£Aüˆ9Q°˜Z®„"ò, Ž—ÖH¶L®åòÉæÓ}»,N|>¾øú~¿Cœè#¢ÇJdGú@œ—ȉg)YÊ×ÐãœÍJ¼ŽJøzª»ˆ€*j(f§ã”1y?ˆêÆŸžæ ³†Þà¸Æþ\£¹¦ñ –M»¿U¸U¸Qº[º>yË|K3w'©3Ôôw5é¶<èû¼…ç=W‹i²d€½{ê;7û> "ËrŸ6‹DDmB´§¸Èuè~î¿EÔfœõfJçSãk+µv™ ÙWäm€/±û¶1‚i`šÞ”«Ñ•ïº%ä$YúA”U³ºc7­Éÿ©O»ª•S6@¥,¶YÅœ-ÚÅ7³›Çù­û¨v=—tçxg9çpÖŒ‰Ž7‰S‰s‰0îØfÙ†Y©­4\èùà Ú"vBDÆ-8‰?ŸŒ[˜?CùMo~ôÎÁÌ‘ÂÕçLËpü¦pº,Æã“Ìñt#ãÈQef,™ÉN’ÓóaÄïºø˜QLïpœ|j‘NöXK¼·ÚM1£\C‹ü÷°˜‘Ò=¶÷~)*Dhätö½’ÉT‚$vbÂxŽvcð¶k…2=ìåé”EÔ‘¢¤:b¬qºEculª óþ¥‘Ÿ‘C;%"”Y³i3šÅÜÙ¥@©ÑÓ“ÛµëT¥2–ÉzÙ•\=_8zéÀýò1Œ<¨–-.Û‹GAoœ°ï²PèTDØŽµŠƒšC}©Xª¶˜C†Qe¾c„ÓRY];<t–•&(ן—2;º(”Pöõ%ð¸Z,{¡Àˆ|3hOþ*ÊŒš·vg’|N€¹¼vôïN#.PÔ²eáeÝ aµ(—šy"y"¬¼af¼’v×!*“Š:DåD]Sdnê€|ãËÖ÷+ òÍû!6ϺûÌ­¹×žõÇfiXŽúé,…­zÓeëõœRÄlxþýåîÎ’«ã/‚+¯ýóß+Î ”cÙéµvëm[Óâ^÷ßD¸Þ;‘o“è>‰·üШ0 -ÈîøŒÞ¿ç™»º}>Á auBuŒãé3ß¡*ø»7´_­àèzt_ mç.-W£éð˜”. ëöÑdõ2º¦Ý¹îm}åUùñÁ¤–cùøj’0õB)Vmþgç0T&Óôxeý†`Á£÷«BŠWäUKæô‡öT©«+š©iÓ€¹{¹Gc™@k˜‡RÀy<_:éÓ+Ê`TDwfÆâjb-útˆƒ#ìÄÕ‹…ŠøæÕ}‡Õ(²’6ùƒðoO8÷߬&IÅ&Ken“L ÐUÔ*5‡Va¿U#í£‡ÄþSš˜ ‚RPšV"kšµmÞ |±qgÔ|£~c©| éÙéšAú×ÛéPq8÷Z—á—­@®B9!çþ³¤žMÀØI8p.°ê7ÄûYÂTÑ#¶ŒÝ1>Ÿ^8 ÿÜ_µRè*ý'”Â)} ‚Øû IòÄÁõjY² ÍánÌàaöße ±ºï¿erÍHÑÈ"ÊæÒo³jÕ½uG0Ÿ*ûænO³ÿ^‹ ïÍNQµX„Ÿq T™`Ï\ ûÉE½oT¸éOÑ›óL†ŽH»aïŠÞUókÝacì§Ö ½ 3¶y“I3ƒÎYÿÍç ˜Db½3|2\Ý••‰üR¯VXe´ô¶üʸ#5M´MÔÉ‚]Æ |¡è½r—ºÖÏÙÁ¹¿ë‹ ÃQš¡ƒ ïÃÆ2‰T’%œÓ¯jHÇk#y‚’w;Òv®`Fæ³ £Y‘®c¿ò/ÊÉ &¢Ï}i}ùÎú%G}…¼‘@Ð~ƒûò›Û•">M4ÍÛÏË!¿¨|Vôò*0K<"ëQ97¾F ë¤WµQ¸’i¦ÿlL”óð8 ¥À¿%‡}aWÖ§|eRT6²Òú!lRó< ZôÛ5S¡Vv:ÜrMAYUÎå̰¡–±ivÕRCÔ,ff•QªzÌN=Ø [á Ä-÷éÍ¢=>ÒfµM´ v­Ô²RýÂ~Þ}&QñeNÕØz¿†ÿ KÊš#Ö"§¶²wà#Ö÷ƒÇh“]ê³~ºÚ¢ïÄή4ÍU^™ÍPæ…`¦¥ÎVšÁ—m‹Ä¼œ@³mnh†æE˜¹v^ñ\ÝS/u“g–<â™ ¼ñ\Ÿ¡úÕNòÍ1ÌêR¨Vœ–PV6K±9.IJ©U/ÏPU:ñѸû³>Õã¾M^3+ ‰-ݧ·„^ËF”£9Ö¨sÄú¹à¿Ï³ôs«*u”Ÿ’”‘eÊmZ‘bZB °=x&¼^[øÚü¾¹¸”}Àe‘eúɹSkxØ¥ýÈî’WTfñš)¼@‹˜þz@Òê§,YYZ¡›8(!'Â3ˆA9¤£žN›£ GUby‹gœz‹ëb ¿ú»`ÃáðËæam­_9t|€? ùŒ£ÁCYšÂ†,¡~Å=Ø(²„CÂbN¤hK¡œÕó`'…ÕÓá®dwŽH¥lÙl—ÉU-VW³¶N*j ‡ a#<îO‚Ó˜ÉÓ++?¢*ëŽjX+óAmŒÏàù%Í:¨V…—§f‡K(20*ßÉ@ÂÔíþØßž…ýÆc}Å9ìÑÕCòñª7±¢Íü\ÇuÌîýÀô¹ß²tkí±÷Y= Nõ9¾yT™Öíi4K-=Œ°¹A.a®£9jÉꪑ#ïÅ EóDé}Ы=´µ9µõï¬'ÝVìÖýÂT=À1ø2º½líiËóÝT­Ý¶¿Fµ;M5v8yUÇÕ]k謵Þ¯|mšé°·Ñ°ÁÝ©^‹râÏê¦z êlKf ZùoG:½âH30ag~»ðbz×,Öð-ÆhÚ€Gë/ÿëÎãÓùÅûx:µl#›N`p‘º[û~à U$>¬ÒR:~‡è™”]þÅr~¦|o´ÈCQ‰iY<á8ü–dMqfßöìñFƒç1””„t5À¯ÓFó^Ç@¦<åð=Ú¿'?SWx4pL‚]é Â"­xD 4i?¶~tæóëÌÐ $KPüJ©ó„]Ôò–}«#°"~¤,&¡€Â•T›öøM¢;dé^·{†·Í>cÓ— =j´r/%—jMÓsÅ’ãÖÐÊ¢®¬Áù8W©’ XckÍ¡øáòp]Z™ØÌ¶÷ò*3 2]©¯¤ctóå øf8Ÿ„*[ó÷uùÇSŒ6eþ#ù05_g¨MÆV½^Û«Û$ã—c°Þ)Ó.ôãQ¥Z´›£_Z…Â`Fª\ÿ ¦YÔÃÑàN¿*¬°QHŠsCŸþˆD"+ "â†ëÌ? Q®v~V"C·ÊË埒jH<á!ød!}¾‡ŒÔBg‰QCåáš¿”è™a‘R KjýÑ”¨ióÑÍ[sC(W°Y÷ˆ`ÇŒ ƒx•0'ŒÌY€èüû²nJU!Mböôh8;YI)å¢Ô2|ÏÀ¿qحꩱ]ôUšÄ'åí2v›gÝ~àgÛõÅ×÷Ö‚–î8U}@be«£´—øÉfŒ12çü}z¿Ž·÷{ܱwÊçFãt\J¥éçµó>µÓÒ ÞÑ+rG:ýÛ˜öuø=‚-Å:À1ïhgaÒöðY‰Ú,}1 ¦ÙÞNî詌Ôp¦'š¦u€{òwï·Ÿwoþ>¿^ƒé£fRwNga”UpÛÔhEã ‹á£Rº‚'PFþôµ#DÈ^§b¾Þ«zGùÌ,^èpÆÐžHŽj.íˆÛ·£‡‹ KÚüiÒù Ý×£·«¿+x»Çø.xþ ZÒµ­+'ŸR¦ƒË÷‚yKñ©$¢»^*ï[ås‹…Ù`ŒfZO_º}dO‘u!¼Œ‰c(>!Õ-@µh ižXDjÁe¨b.ç–,ÌÀG—\Ò²·¼2s·DÁ>SH// ü×aæ)cÆ)JnÌŒŒê žÑÍ­¤?¡Œo]^®6=“ÿ Y;êAZ¦ˆñM‡‰a¤å™rÁˆý‹¢6­².[·<7¯°æêýjW“Ù^Pº­™AUÚHUV:^;-Y4H~AA µ‰æLÛÊ7–=‘a«^-l(kÖ Ø—ªå Q†ë(ùHeèU»²óQuè+–NÞV“¥fœ½õoÛ-³ bZêñ6V4hÕ€cÜ ”Èßàqí5Û÷ï§³ù$ñËS2G3F¬f!8^N?îU=Å´“˺•ãrH¿„  øÉòñ°¯¹6áw¦Š{Eß’zª¹*G­CÍ$ˆRȸԺEµrÛ@F‹*U°lq›%³ºuT°¶¸ÒN™ÍüÔüŒó¬s[ç2lμ÷Òw¾‰î™ÝSú§þôª¶® ï©;ßWÖIÜkÜcÝeÙgÚ‰û¹ó‰èFlÖé6Ñ»°m­¦*ß¶QÙñb"‡OÏшa&Œdͺ´–¼³¥t>&¼úø•™·¬}¤›«¸BAÍœ*[cî-y,È®S¥ÍÂísÝr ÌÈ%Q*;Ð[bþ=DkÄ0f‹„]§6ZT+ÁœÇ–­‡R¼H•Âà|)+¤*p›g¾‹wa±p¤»U¾KÎKü ªF°Kçˆ`‰Žr©è’è2¯RãÅ5Æj\¦¶>ôѪQÙÜ8@ªR¢»:‘=ì%EÜI{È|ô!*ìB¢  Ükܸ<ñ-Oƒnñei¶M‡iâ‹dægvIÜ´Æ@°ŒÎm² RCä³ÌÆc¾ÆñWÒÔl‚¤IBZ%o ’žx….¨Šb9êO6V£I³PÙj¦ÄjH”)ä‰ 3Ïñ¾Ã-üÖ@ØÈô;š-µ¾ç7T¨JeßXD`=‹+u§a è”ñuW\ÜÌQr”~Æl9Ý9ÜìuŽëOLU!å”ÅrßîûÖ‹K’YääÝóá3šå$à±PÝîã¥a'<÷®—‚±§çÛálèVŒìl'¬h̰Î\oágQ_€F½Ô”}Ah¸|/‚Ï®}äQVŒÅA3ëæ½r}åd­ÐÊ­‰ÁÓ¦ºéÔÛÈÚê!æD˜ÓS˜¥°wÂ;ÉÄmÀöëgÛçÒkæM£×ÐøÓùúâÑá6þSgÇïj¥Ë«º€ÑÜ…<ª,…â´ä³(~ðò?‹u*¸ gsœéŸ=P9Xæ†!÷̪8 —#kigªÕ³XéP„ ÞSà"xôií=Ÿ˜æ6ÏTs[ŽhêŠ_e¦Ž°'æŸ<öi•hf±éÉüŽmìf™h"³a<æƒ&Žw:©‡<§ÒŽN.ñæ¼Ðΰ=ÎÀh²¸õž¦”à˜â€ûŠF ZÅ!ÂCœCåbïaÓYï2§Åg¢êa ÌõÐûsÞq禀©&ì«ùÿvŸªÅ>¸¤QÃîÂD]ÔÏ©bïðS?‚ÊŰçЦ‹²½r=°.NZLr­ïj}S³C‰4Ô¤\}Ü0Ûú²ùªyÊÑ•w«Û5&&°<¿9ž ûåý£ƒgb’?Lj[áü(ÜÆR§ó žïèìëܯœ‹—!‚Û‡ä-nÐx¸¡Å Ô¡k Y·Ò·BÀ‚ÇBˆqéY~Õm­ë4´o̘à° 7ªxN1h4p  ºyùÜg¸`˜ª1ÇlM³=4¤·ødÒaIñ$-*êRí7{VO£°P‰ÜSo#:•f„Ãc)A¥M¯õé¸â,£&{ù‘ÀÆvа¹ß\ï)š ¼Rú¯¦x÷^çû\m*·sÇ™ âw„…Ûűyÿ–ýkðY€[ ;îµFÚÆ1 Œ8ÊY(€¢…›°›øìþ™~×3ô¯«ðËÊèˆ Ë +Ø­×®šàµy¶®KÌ¡â‘×UçÛ:dvž…ØèQŸš^+Œ!œ1¼ýîQŽÕ–mÏQÁRÄ•#Ë‘ãªðÖz¾–ë< ¬à.áN½òˆ¤Õì0ŒÃX‰F¶¼ªÄ–ê‡5J•´K¶ˆú¤¼baf B³œ\SŽ0–‚«Ú/Y?É) (*;¸l£¶QÉ˃›^“@9’=ÉÏoî¼rY©aˆ#ðˆ:†sê é0ÑÃÕu ŽNòü/Îý1\Ö¦YE‡msضmÛ¶mkÛ¶mÛ¶mÛÚï÷õZ§Wï^}ö>§ªòªˆ;#2#ŸÊˆÊüq»ùïòbãøÊé ÈÎ[«Q€6K¢y¶n¢H’ì£R^Ãì‘é3bFCü•Ÿæwý"4B¿öº2»ÊúÎŽ’ibYÙœYC ´É®že5<´>œò Þóé—iåG2o¼´f‚q4óýÈMò{3câË(š,¥>—ûD Å¿džR%Ñ$OßT¹¬ZÐr7-Ä2DR3›A е·}?Þž³U©IɦO}­FÙÓL ˜>6ü‡ƒv¤Ë”QdUZjŸE]zuJå 5’(Tßb´Ýï°§­…AUøO†²Ázó©ßl¸”w2ç”ðX›1]–¡)e°ey™@>8«* šÄ\~€4F¨ ¢a½$?5Á§Ë5^×$žqÊu£7‹“QÿpÏ\ŸHýøE³L’‚ð¥5׊K0¡“ôt-d_¹ûÞZenž3À£Oð¼¹Òzü±J­² ÄcÉ86ù¬P]¿º¡ ¦Å±e±²!»”¡º‹÷r“½zŠ›¦©åƒíÐí‘í“ìùlT»³û/ΗKÇh ÔeQçß븛kÆäVRM­áâ ïlÍ&Úœê±%æ3¬wÑGn?!go…õÆ…fÆe¦¢üÍþ[úÛº[{o‰æØ]3ß%ê 6Jé®qkJ¸;@§<ö|öÍÓS%™Ÿ˜Á±×;fîhc+¬R¥y¨Ò! E0¿‰ÓÉE–õ®65N51É?ÛïëEÒz±œÙ´ü4õ’Õ²µtnHª«¨ÕeUäµQê®›‹¦Ö5 G¦«Ÿ-_k¹1O¡nÁéw7ôpÊ•[ºŽ‚„ÿ¨î€4ww#©Q¢Tü•*¦¬ŠBQV}zQ§Vª¡#§[s´|ŠyP*ÎL‰ál®êT±x0´Ðü“ýg€¸{]úì E-PJòË7W•/æ”wÕjôEÙ‚öêO™è¬ZDÍwMb mŒ;ÂÇ–KT¹à±x[ŒJ‰á¡ºÀÙԃєy¤e x`í‚ütðD†8+YëX÷ß3ÁñÅ]a¬ú7Íô2J”t=R%“4H9®]ð'™‹7”jó#¶Äó Þ¶ø8ÉéU,+7¬÷kº^é¬ÔeºÊ>… Ú!-¥ã€wªhgô™(‹ñi?®ªqÎ1H€^!×kaqC–UL}ÙJ'$=w‚sŒK”#ˆì HUÔ€1–̰~™!âf¦0éõ¿ú¨û2H¡e…4\»èåñRÑPÿ2å¦yYÛ`4ŠÇhþ᥆GÕ; |,©Â”®7'¶6 å@cÔZû†5®¤Ñ*n19L´ŠÒ°!ó%dðb%Ìè_k0êKÅ=ºÔ§V¢µú8Ú=nåܓҕO!õKjÒ'pb¦¼<’‘åß9+àñ %k¬€<=ìŸ%4Èîö”GQKFK©Yóµ|´®)2Aµë6“•¸y”s¾¾·£ér«ãÕ kŒ‚wqmi·év ýÀŸŽ†Om  +|?N­eä2’6È/èk½x¿±Ä âi'»ÿ>ç¥E6²Jc‰I½®©Ÿq¶¤WtÈ’°£.Ð]N³LãQßdŸ:ëð/JÓž ë¡`‚Ež~²DŒlb¢Û&µJ¦­.8%3ÉöǹYoi¥5¤ ¯ÖPf±:¼ÙJÎß–Q *áWRÒå´Ë39ldvÞiÚn™æh÷Ùâä=<Üb/Ý8íËoª€¬feYW‡?¥×”Yµ—’±s “¦Ääf¸¬k$)›£SÜÃFi“¨—í'gô<ÆAÆcÞGS¢K"4WزÔIZö‡Ôâ=x÷ÌÌ%ç¤D™D‰ÒìaÊ¡o"½À˜ ~ ð±òä2š£zGCf5¡/dÁ52ýCt–nÈ©ô™§^N < ±i¦{ÓT83ÁÆ[\læ°Ž«—(?„jÖJÍ!Bð„ujî‹¢¡CôØïoòl¡wbÛsö¦ž«$# þîFd'îê4™0l”!+0šÊ§@qB© W{áÜz£v—¿¿嘣Ð HbÛ¢hŸÜ:³‚ýÄO{¦ \ °‡´§{–ù1ÁüùiÝj®_ÒFÚ×ÈÍʰH* æ{‚ž«ø Uµøëx*íztŽßHûúºø+ÏÐôSÔÌ8Àöy¡°Q>ûo•Ù @"\1Ê2­„r)oðJª®;øy´eë¶û>*ñ*Ÿàx Ÿw ·ÔMü×½ßQ/Y¹Tû ZòúóŽ£þº¶n¬Øžô\®Eé—´v³‰ßøQóÜÇ;ßR7çÅ. 𥟴2Ovã7ÖçT]%‹V50-*ŽW±djéÙÔRSÔ8¨Õh³_9.//%Õ˜O~sÅèéQ¯HÓEû¾R-œ¦ž¢1²¹а¾ñ>šž¨~Ó2v뉾QÙgiNÈ#ßùN’ð^3gSƒ%]ø7¶)?”a‘“ƒÅÅæØ~9Žæ„Ù¯‹±]uÿ9ÓmãYÅÖèÚ|{ÑÛ£ôÎ/5ŸùlÿÔõ8LßÄ÷‰s–”Ì«ROÛ‹X‚F¡öÞ‚I¯*/š1¥ Ô8þkóÌÆ—¹e º°d[/.âJ]”¶ôÛ‰?Ñý²ŠýBÙ¶]fN§Ù´³Õ²êÄ7±Ÿ3¶õzW¢¦»ë÷Ií71}HÝ ŠL\ŽÌX_&ÙjÄ[•L«jD-LOÊfÇqÎÁ-ñ=çÔÕgÄnâ9ì;r+¤Û‹þ´vkrvQ•ò}&ϤËg¥èuÍ‹r6H”Ö¯nc¸”/ÁÖëy°ìœDW*÷{ï“•òóvÍ¥ç›.4†‹5°úÁP’·`Úð`Û¸yxÏé#9Q+LìKÒ¨y –@qCBlGHéÑùÐä»N *©wR –‡4›–qrû3²‡µHé+ÒoùÛ\í]êGÅ7pͼc»1~Ûtõ°‹öqNõ|Rû›s·ëŠÿ»öDôÞƒÿþ»ýDú›sŽûƒû;ZÒ{ÿÉû‚n%É- ànTF|ôøÙU¸RXï…‹×™¡à±Ô÷æa½ú.ÃÝ>5ŽÍøz‹AÆM ÉÀäKù ¸;!*&|þd¹3´qåêˆÜì†ó«³”F÷X§ÀË”Í?Ít)pfÀgd Œsä8h|@¬¸á1ðwNÄ,RBXœàÏsccúÄõo0Çú¸_»ª ôo’T;»Ðkâaeªt[›¶îo)rJŒ~îÛärþù¥•jÛ„ÙÁnð}æú‰ ò­nzôk×7ïÏvt=¿&jxZVºW[åì¶ßfrŸ}¹¾4+}ò¯€èC2ýÂöSÔLr˜ν]ÕâSÙ™…ö+ðvf‡›sVŽ~É $š©ykhIAšRc:.uéáó®EoŠö“®æ“ñ'êôUµ£rR’ti‰¯Réƒd¦º5·ìí¾ûwt÷üA}‡$ƒSô#–šR(•ëÖF ÜèýU…Tj‚fæ:£ fˆ  9·­QÙ›x>%U”˜bú7µiÍP¥ŠÕwå¡L:౿%¯üR*f?ÑD¦ Û¨X„(ãgDO@o”©îK÷ W:¢­ÂòuT£ú°{ñò_¨€ˆRnq¤ß{.‚šƒc @fðJë¥Áå ‘ù [y¤#g.–^†kìN¿ÌçÁÆÜç9â¤VÛ3‘Æ uÜǦnŽŒ…SãQŒ-Æ™êJ¯ue4ÐT+ÂV† u8Pê@T2Ù%JÍ@çòE;óáFLE‡%Ñ1+‰ÉDO´}X{§¥0L;tá¨MÅœëµ7ª;ÆÄ¯ ¿¨Z;},lnþ²u4¹°Ç NlsPu×LLÝ—b›éï¬|TTø§ÊV[g§ñ$£K Ú`JĶ Ķû¶*¢;«ßò¥‹ËÀÁÔÁ*Z®úMIº³yz)éŒlˆëË•ð5…·$¤Ëc^U9—„j­mØXG„ø«èO°:³ë¸¨„ ‹]£j‚dìš1§ë—Ø &% Leòò°“iH I£­®¢n½¹»Bøî.=CP=ª)7b/M±š4íß¶î|é?ûlNžËØÞ$$?{x5ú7YψoŠã«'Á“X…$âѳ֯׆?¡ Æ®/+Æ]fR&®)©ÃŒR×Þ—-gÐ0´±ã6Ð%<$"\âœs:[¨[nñßòÔ̲҅N!y¨Î ôå Éy¤¯3Q¢Ë’D–`øFFõj1õ–¨ˆ Kä.Uª… ”¨ÆÔg²³5‹ÑÛ °÷u5J1yKÔÍpXGŒ,ƒ&ø¿xŒ?õq™&è§¢âՕ‡*$ 2ŽG¿ØMs“?˜H=*Òo|]*0å@Ö?ʲÿŒÛ§|IÉ!åëè`f5#5qÜ'f¼ vÖ é¥¾ Ô´'Të¾U[rMh¬äî–."Í1! 2i4HGÝ[2âÉš8[6hŠÚ‚–ez")9[$%ëV/—5mD1V6`d¯Téö>øk*Þ@8tïEÜÈ—KÓ-Uí«Zá-“Nu¨£¯ŽV;¸ (¡Ä&«R4ÞDLyã,ÿÙšu<¶\¶|ƺ¶ü¶|½˜zX 5é¬ÐZ=¿y²¦Ü°Q±U±YÖ-íð úäœu½uÿ@»‘}¾ÞÙ\h5qMÞ©öäw‹}Æ;¾ùäyéMêa{Lû€œFî ÛŠõÀ{åÝ÷böáwá÷QçºáM{÷=1zíxì‰ É>‘6;§…e›^¼¾…}J²Ç­€s) Õ)ÒÀQGÐK:YqW-®ª“© }ÖÄ,÷×_=ïzqŽgC$W1qñÈÇÅo®½7fÖ•5K±\1R¨tV/¡6îïn‰™RÓͼ‰áŽÕ«º÷\ÆŠ^¹|x®“¾Pu Å’#ãvˆÄS /ÌQoqÎ ãòÍ¿õ+E¡{ŠÅl–œ»t&„~iëÖí­,.¸Á£sw_ämæL’@7fåh½!'"`›[ãTØ-âŒz(dYM·‚—ú2îœÎu|@6+oX:ÚLtnÞj~ìÔsžþý“·Ãk‚¾§Çóh^ERЧâ]-|4¶Z̶(ôÄßWèŽs†ÃW £Ø`ü×8'oƆЃˆ:MpBÝè©î°tŽmH¬¹Û1¥— ½“%çÓáfy`ëɧëüÈì¸É°êÓF»®V5u2CÿmÀ•„þa˜×ËlŽk>ânú4¦Þæ´wÂæMŒ)ÜXpß/Sžë³–2R­© 阋hÊ‹ÍÇ¡™!¡ßZX ‹ó#úDKKº¿BÇò ²çG¯ç–à=V¼m`l6/44õ°<°½Ý\…wé(ók×ÐÍ«%p=u.ÄIm@ir'ä¤Ü{I¯V=\ðn  sÀ{ŠàId%lc€>½=P£äwA mx y àè.ø†;5Ñ™`ÈþTÐÓÀ$¥($ÔÁŒÿþÈ(¸A.†Îß À§hâ6 6”ûKáþº¼×dË/?½ÂJž*±{ì¶&øŽ"÷LômÇ÷æ‘èÎ"Õß íøÌƒ`—'n„ |èœ<1ÐùÄÀ½+¤Ä¨Ø¡øMïh#`¢+òª1ïÜˉëå5ï@H%ß³üDcÛ:Èëàw»ªÁuÝ_¶‘Ç|÷¸m²yû›dv{Ç©gºG[_úMf Ë6ò‹ ƒ¯…áè,úÀ„äC™õùÛYéütâ$õ¶Â¼úImE¹º¢£“džùSqzºÙ…i'Í£V·|dËç¹7kxÖ"çqi‹ìDäÔeÆË{çý ÚCÃ\óí¬À º õư)å •¸Z©h^roÖ׸!ߢâýJ³œ®_/fƒ:ú´TŽ:ÚdHÓ4 …2Áîý^ˆ±Š_Ö[94™<ø7¦r.ó­ ¦R!é=Æj®z œ±J¥Ê¸6Ñ÷BíW9ôÙžiŽÙu.*”hÖW ¨}îULÝ CŸÏx¡ÏÐѯåÀ*ˆfQU·æÛ„zåú&о!ý â Ú¶iž¦{gò$q}·më~ñ+ iù¶6`roPO/Ò„ã(™¤EWî&àñ±? {—<ô÷Ã'ymHM13Œ!#Ÿ(µ÷Í'÷ªÕ´,žÙ4Ø9Í|›Ù{tñÛÃzGÒ<ª‡[ÄAöL×ÇÉD^ÚäHÎÏ+á (rþšâ¿ ±»Ã¿™|7té¯ n¾Ãu¼¤˜›@Vÿ<à&‘óApudz,ðI Ù}byxäÖy¯ øy@0-Äö3ù<''%¢H–›ÌüEðŒ®¯Jô,fÙ–þ4ïxVX„×iý$W~¬Ú”SB-}rsÐFü~VÚL6××ZºlÄûüåÔÓ\ÒÒ¶iá»yÌz‘ÏÂ`Ѳàüi!Á:f²&ú¸Æ*á÷Ѵдa‘aü<ç‡öÇ(1øoñ X¨‹™…'i}ÃKDq@~W(܋à /ñ4ÉÜo:ŠØ¤ØÓNª2G0òH®S¼tÈlFUß;¿{ˆŠ±óhøœÇßÛIˆhsŰA6¨Š_Žm Ûj¼B:M¸;ð¤Ù¦·ë´#¬Š“ZæÙ²™›uß$Øel3§Ä*.Uj²} Ôk Ëߌy¢;%f*•…ŠÀXÛsB yL(k|t2€ÁŽÙºS÷æ\7zCi-†½'ªQJ_e%ª³Wôðm÷¥[±.J]2íVQ²OÁ ½açà-Ê€¤ªšÅJŒ«KöÙ„.,êÀ)H ÞÿÈ—…”ÐDsBò½ÈÍÄ—§ýîú:`Û„sËv½%Sé'Q‡)ðÁOYçÙ@lAô?M®½Í‚æšè#_˜ñN9í·ЊU<Êp™ì¼Á$¾ÞfBiâå MýnP{ÕWw©¦|¨â}Â.þdü’RܯP(÷+SgÎÆ2ç°Ê¥jž›ÇyªÆfyRM‹€g+ã‰~¼c½[;Û&ßÿzFtw¥K}ÌðU¤sË´Èä8Œ«_d—zXÛÄyM–.«ÕjÓ*á¦ê/|¥}½n˜H/ȉý̨ӵN-µ?.2ܳL“ä^¡æ~9¦r‹‚–µ«J†é)jžsÏ Ü/”úe.ý@yÄ$$íË5…,ɦ.#‘Zs\²l’ÅûOƒ¶ÓÜ¿Pì’´f¯41ãK5úý¢ðæ»3:ÿ&ùzÅÓ¾Ÿ²,ËÔ‹®š°z¾¿;ú¿ôF* àêÀ¬C@yþ]a—èujàVì)y¹™fLIºÐÃRñqümû+Cw-ô[%Œþ)LóI¦Eˇ.™´Ý—´ Î$ƒm³œ3¹‘÷ÛðÑ“ÆH¡GW|¼qÜ<‡‹–0 ü+yyº˜8bÆ«ËIw²øiÌ%ÙL’¬/Ÿ3ÉgƒsªUõÜбs×;q=¬ùs„¾aüaÓýÅÌ"†û Íá$Ç :À·L®^hQ¤ŒOá¯%GÒ‡«*_•QÅy‡é®eGi܆åþ׫JHLFâÛmõ{w6êÝí|ÒÒΑW›wkµ>¿¹ZÈ][}gúüú„ÎÀÕÎOÏÎïR_Í.vnu;é¤ ®å>`˜Á@ÃîáË“@ׂ5“°’šdÉÍ Zª“ÕBŠ Œó£Õµ‰îvc‹@{¾^F}£³½ÙÌbt$ÿfåç Ü’Ö‹fìÖJÕõ >.6¥”“°§¨²º×-ÏmPÿ áN)h^í.´Ç\³ÅøÝÄ9åµµÅÍÚöz—øÉö­ô僋ÕÆõÉ­ Z‡g€D§^ÚÜ¥`àDð0–"¿¹Tçì ïlm²_1‘BöÙöÊîú'çb™pêÜn{e¹1~è:p„1¶åÅÙ Fï£ÌÓ+"$YÇ&¨û{à`åç%&G lå¡á8[8=áxåyw@¿‡†ÍùªÚÇ·ÚOÂþ ÁP4Œ‘öæÚ" éÙÙûÌêúòŒµ½ÅŵÙJ'h e0¿Û(ÏÊHNÌK ,DöC“o®ÞâaއºêÖür{‹ðéÅ9îI\ÄúòúÖõU ,tcƒ ŒÆe± Òæ· œ€%‘|c¤&gÅ6_‡òÛ¿–Rj fšæ¹ÉíIOšaL^ÍO Ûæïó`ê*$sŸñ{ŒPŒ'Á<Ðó+œÍU°+"g öÕæÙÆ`ÈA5”¼žÝ8¥òüÊÎMu +̳Е¯bžLÑÉÃ5¨Ç[ZÐì´%®Ç É¹Ž žg^î$fX†úF^GØáÜ@øúÒüî*û’䧤’›Õ¬>z“SªòôQXÈΫ¾=FFKucs/ê eŽ ðÀa¹æ¦Ö§xDP÷€¨bLÌågG§D@g~lÍíÅ N(<ü³‹PžNPöÀÈŽîYz\êÎô`Ü¥ŠÎ5î\œ°"ŸÆîf§…¿©¹ÓåÈÊe§''X‘îü. ‘>žg1ÞÆÎ&+1j<ˆí5ÞYqÿF'ØG‡VÉìK5îwNQUùLO†³çf‹Mù´£HIÞe8›[)üK“í¸ž9&± øw}nÔŠÇ;èÖʼŒçŸhG‘ÇPØÆËT6f]Bæd4¶©¯á†vÁï¯b§SÚt¯aÇ •¼b#3Ç>„¶Ä sÎ2†³©\Fc‹ò޼1y­¢OØW¸ø4›n- ʸj ð &\ƒjA6eUx»íµÝyJj’ªóìê9õîKp­üÐügÚq˜~ãMý=(\&pI<, `åÉaoøÛ«8N ‹õq÷9ˆ o9‹±nÎRg}%zIÒJCFƾꕕÂEMp0N°ÒYH*}áqÌ™T  b#Á³àê~=1-tçx¹mMbo¢èÍ‘˜__N2ä[_ÍV”Tí”ò›šiG¹Òý±6Œ${Æs¥F7l‚³üçmàì#®¸åíÌ|)ÁŸüòPÐòÌ çó0sfë6uvF(Àuعپ<‹Ë³Ûx©9ºÓL0¼³+ÜÀþÝúfhR”p^ o$8Íà‘rn4‰¶qFÁ–=à ú º õʬKØÜêéã`Ö>»K®È~7Mp+qÔÉ Åp¹Ê2YˆåâÅ1W?ÑBzn¦âbTT}µT5èe•äõ‰*OxÑm>ܨ# ³Ø!ý/ŠL‰5 b8#à8¼U ÈZí‡Û‡H¿ë…ÜO‚OêO›z±µÂ+¥\›*ï–ú‡õ‘ 3lª³ÆèäYVUÞMõwk£ãÿ³ò›ŠM•Z嫬eñ&çõº9yà¬Ý@ûGa'7Ç”íz_)‡ƒOm‚æàĉ©oF÷40èWRmŽAÈ>VùC¶ ß/ZÄY{I¨Ì—ó4žûû…7'‰M =0î«ô Wd’ÇÌÏKÿf‹ÑgË8¢Ãè^xñÁpêšæ“õSë„"hï~%m[h÷ët­JƒQžE%9©[¥Öäùfð‡ìí‰ë’Ÿ[ŵd/í›ÍxtŽü» &HøeªÐäSרèÖoèáA²àÛ~JŸ6¤wO E¾~vÜ.‡KŸ95ÿð…6xÞ¡NΟòî¹!¶Wß5È &LZ¼¯¨Ÿ ÿEâEbìtP8 ÷Š9M—¾³ßÞœãäÍîë'† ”ÀWx’ý­þmþ [¿KЧËÝkÐ'‹;é ïû;Õ^Â+üÄ|_#îÖ§ÉëäÞ-à+ï?]©G§ÕWÑlä…W}i|}«ŸûݤÀ¡ gºþ7!©Ì kißÜ›2½OÔm&{¶)Úe5×Fýô6…×à85 e ×òÏ›ù'-BÓ6b8] ø'¢Ò^DÁíˆ[¶ÁšãuÀwÝ€Ži€ÖxQ¡!žøþžøyjÒ0üo€r_v`^ði C¶AžÒü5®õ«1ÃÀsã¬xc&.£k°òË:÷ý'ÕZ>E¥8ÿ´‰®³Ä×á>ÙŒFç¼¹âØ£ ÚT°5ØœÑÃPt"Hƒ¦qè³·ØTÑã²Oè°VÆ›Ã\é»ÖqµáBÑ2kéOÊ'b ¤6Õ‘”¡Ñ4·yf—È1Œ?§cF—Y,{®ã^Ù¦¨Ù®ÔG[CW„µ+³ѯz›È¼ãm—ÙöÞ´ÝËÖëyh{z(zˆ–Ç5Y0S¬R|&MðL(‡ni W->Ÿ‹:S×";cf—Y-PQrkúAwæà˜d‚/‚›ÂcÀ]´~¾`ÑÓÌaÑÈ£®ÊoÇ®ú[ÌÑÈ?®ò{5€r§dA¤÷­‚ôÞ‰è'²É诜'îR’íÆÝ™aÓ3¸[è9zﯙA€˜rcîÈA¸rÀsõ£õ1”ïJÐÏYõÍôNŽÝ®(,¼š™§}qÅ™ÌDV×Bvån1½שk©é‘†ö5¨SÓ질KuÔ°maŸG¿òN±„bEß„µwØëÑê¾Ò§Â•»Ð"À Ý÷ nÝ ÏRû:Ü[E¶§ wGºQ›VŽOÔ—DuäC8äb;µOVÈ¡aˆc^¤ñÒÏúï¯5ßi ׿„î*M)Y×›“7ƒÆmÃ!‡ W\zˆÂûôùã¾|/íˆ\sZ¢GŒ„„Æ¢a骛·¹ä‚ ‡QLûß»Ê/-cí`c]†{ð ë[/©"a÷Š]S•éP£ÀâƒÃúAbýà0{³˜¯!1OA$¸ëbt‚•øop~–uÇm(( Ó{Óé«£Ò+ƒÃ<ƒu¡®ÎS[~–_²þìåYÄ@ZÄìÃÏáˆCÆ.“ÂÀõЫjÁËÚV¹ò²#5{õ¿ª_óœÞ¦ÉZ¼Ö(xbóFb³?|¡¯3ÙòËzmEïñZ:€lÙÅ,ûÆÿ‚ ù!äôÆZ"ýX•…l†a—ýª»ÃG£Ö†ÕúšT¹cô|P–è}?¨ßSÜu›U¹£³»Å¢º5zQ 2º_«Û(äíD±†Û!†× ƒÆ9²µ=9FHTqº¿¨·¡daŒ?²GeœhŠãC-w‘K3] ^`Õ‡›îø .©Ä†ð—“ËʂþhzE¡àøü•1òþ}æN²#áþc‹âÁ|[GTÔ1 Xu8’ß+Ý7 PƒúÍ©!k²¡Šê}½˜¶\…Ù+q,*‹7Í’èø;äWUÀ˜À­ˆ–†;jå…!AVÆx¸†¡Ô¿FVO&šcˆ•E/~nÛùT·9fÓªãŠqU'õê“ ±±?ù2wë³ðúùþ™Sôj­óÔ.•úr4Ú^1¤P3yFÄá9%0yÖÓ•ÙIñ›zÕJYg_ÙÕõÕÜAíWÜŸó¥p#†æ?ãiÎÆF³Kïz¤Q-@‹¥ÏNØ[ÓC§üjœª{{5Víâ@®F02ÓóeÜU*äau§CôÉ‘OR%5ÐEOíAHbÀ@S2Ôx—† â )M|`¯¾›…'e„ÿªã…Ö-íiÓ f#ân×ö–¶–£QËÅ#«ŠtØ‹h^gàóK°"NÂ5æ×‹áŽâ™ÖÆíš5º£'bµâm2uì úÕ–í°–ømÖ“†UW®)º¤ržS?ÉÉc®ké Uaš¦K4zâ6Òa‘F–ʹQÈo뜅Þâ4©âÇRïHŠuœñœÐ7Ʊ 7rѦpçn<” 7N–ég ¶™d™§4Á·—s­ÞOÍúó1ª6¼\½WöAØht+ÒlU5§¯šÃ%1Q¥`Zl#BF“k,`Þ» nâunI.ÅÕÜÊ-I:&á¨f½æÖ~ä ø¦È6Sô¥ðh¬ÜÖïx¬@ÜüI²¯ÏJŠâÀ9®Üé§ãþPàÊ€ÀGdg~€_`oÈdøB¿ {Øüà<õÞÔv­2SgoÅDÃPÙTjЉ:uÏnDZ:1UϦÆÑŠ@lL9ä$ûýØVÛ(c³þz€½À?`0CPõ‚[±¦Õ—ˆr¥ôÞÔ±K3‹Z(VHŠ][4r«áè*ÇEöàuK}½”žHï ´ [ôPÝx¾Sb^”_à<¢\ÜP&¶ÆNÎŽ&6ÿ’ì -¡ ˜Ùè þ%1±330üO˜ý?`nn:e{:Q;[ga'#G {g;Ç«²6&t‚ÿzQ)[ؘ8Éš¸)ÚÙØÊ+É(CщZ˜90CýÛVPÐÎ]‹†…•€†‰ž•€ƒƒƒ€žžU‡NÂÙÀÚÂHÀÖÌÚ„€ŠNÀÉÈÄÖ™€ƒŠî_óýK¡ad`…¢2°7±03wþ·#’³‰*;ýÿ˜@ÔÂÚ„‘€™õŸ°yyÿç:8þç:¤MlÍœÍ X˜Ùèþ±v6qüWˆÎ&Â&FvÆ&ÿøüÇ“qÏÒYš¶-¤GÝã‘ëoh,ë[Ó¤œ#2aàËpÆó÷OüÃâ¡qt=zŠ7Å© ÞÖósåßxmà†ñAòÉ{=OwÞŽß±¾;ò¹õñtD÷‡!ÑÙ£sücW;dlpuï õõLŸ5ºL9îjÖjâ[$wiËýN'ñâsóµŸút¾ú‚Qûjññ¤ÆGm‘³ä²ñîtöØœÞâ–{^v*„Ž÷:yÈó®v÷“æMéi#n1:³[RòìĤíÝ0ñÈ©sýêrvÓk¦ÒOXêô9?•¿¼ÀM÷Ài<¯:¦êD1ã©Mßû'u ­}áÊuŒîª¥¶«Ög¥Šm ûö!åÙrÍ"…íˆmgõÇk¥7ØÚq%ogÎŽeÑNÁ ù;ã¼vá©*ýgpÑnŸy>¶9í,lzNÏ‚çû/ýGðsQÏPæí5ļtNo€à‰«J¡@ÏêÌ-MªŸ5ÜwŠêI«:ü³–ç¼U½SØ/\ºwQºw¦÷Hô´x× [°WŸ&åÈIã÷ewA¯&þ¼u ûû­ÚæÉˆVªwØÓæéRýCä²ߪþ‰«+,åŸ)çèÖÀ'¿k›È¾ žŸÔÎn>ƒvæéÊÜž §hoø<;ÎFs¸>@ñÄüòŸçcÛµ½ýêìb|ç=ÎwÍsÏjŸˆ¢õˆxÕ²ÈübšºgØžææíðõ|øÞzO˜e1ŠýzaL±aÿ›b˜Ý,í–ørA—;îÜÆîž>^ÙU\(/—W(fqºTôlcâᄲl¨£\ŸÙ`Ÿi-×!> ®ÍÕ›¤É­ßíIøÏwšöìxAO=CPp"&|¹ͶÀVîzþÚAXL"Jr­¡Z]âtZö åÐåÇÐ’¯1½£ä0•:?Ã鹊ª|‰õ”ö^Kï6Åý’%ÜÚtZTAÕÅùá=xëiÙM=¬Ø%þ‚›÷èçù™·!Ë.%^€ƒ¹ñÖ„yà N|ˇîâ³ø+&˜©ËþIG•LÖL,Z>‡M\7l^Ÿÿ%¿A†pË3ß¶ær$Bᣠ…/­Aº(%=t8áá·E º³>¥Åb; Œàœ§XŠ´{å×ÀûAõĉG›#DSß ž˜Ï6§ò¯Žø•ì>3xà')£â y2çÊà€7c˜«ÀèŽ6sqíæ&ÊÁý¥âéF³û œLwk/0"«‹RZq~Âç—8Ã¹Ìøju—íB׬¬6ÑD¹âç²;05Ås¢öEž[a¶›Í_uHÒ¦bÌCõÿ–;ÓuÆøÊîÁ7ˆjf‡½í³;TBbö0¼öVèâÙ‰µœWl‚¹ö×D‡Û£4…í õ‰ð±{è•ËïÌ¿mÕ ­á1Ç÷„ïŽõÕgf¬\+•gÌ[ÉsÝ;Éæ_<æ† 0Ž˜!˜e›/¥ïuÎ  º·K£Oì7”åá`IF×CØÅó£øÁ{($Ø›b¡?%1g‘ï†Õ‰Œ)HͿβgGzçëJ¸Û{¢{¨kÄ~v.²gʪKn>6¨O¤Ú?ñÔPžÇ2,>`ìÿÓêÚqžÞ»O•Ž·}kòŒoR!Q/$Ûãua§š ŽÃ´R¤©ùG,Á1_o÷¹r9Á?xÀ·q3b¹•ˆª,¢TT'É©É)(è¨G§Ç'Æb ŒLçùMú–÷x­“éWu›Uhãæs—MXÍ’åq“EH1&ôeϫͅåÕ…Ú~ûÒ ¯­›U+ x'QÕ(ѹ‘˜ S°§¢ Sg̤+÷ÇeÜLAµÏ…v+oï]¦È}­Ko™ïóPtùw6(+WÏ‚À=++*ƒ±ª=®M¶R«3+Xg¶½’×µ1LLs¾ÅPü¨ü…ª¿}¾ÖÕ}(úUÍÅŸ˜Ä« …p~2×eÛzBä&ÀÍ6Ù…“)lË+Ä^ÝÓ阅mƒ»¼_bÍçô…  ˜¸¡·¤oߘ£n;Ü5B×Õ.¥½6:‹ä›+Ò#sa[ —Çf6Lg[òÛ΃WÆ®fÕÕ™dëÙ\v{b¯´v:—$NLO€6éü:—3œ¦ú†ß’x¢F5V¶XëýqÜééÉô½i½)IM¶b¨?ï?5Ù™šÌ1Ù…æÃ¬·iƒˆ<8|A8Í_3#¤½nMŒë»[ª‡[v‚\jÖ\ú±/í,队.žõŸ‰MòV¦À ]P Î@*ô¦5¤Éè€B0޹ÜóŠÆá(Iª0®¯ÈÁ˜ß˜ÌY6 ‘ÎñT”[o–’|•‘æÞß³ý„òÂgOSšÂIüoìþú Ë•¨À^ ß™³sgäÈ* «Â2‚bQ™÷0M²/^ãÏÝÇ|Î1îÙh9Y€¬Ý>#ÞbŸŽEݨïy,åp‡ÄÛ?’/JZÆYNUŠOÇO;?ªò™?xæÑÇN(•þ6qFÖ·T:£hõ”r’.­Ö>­Òé-­ÞX£zhí²“‘-¡ÿsOŸ+«ï—1þ+ajAC~Üaaœ`¡ëø–¨X|ƒ“:.@eyr)êR Ò¦qþüÓ'zxO'A•1ÅŒ<ŽC;;:=>K(ר®\Òã0=“=Ááâ¢àzSõøèÂ¥åÖ=êÖôé‘íwj`Ï­è·ÔBJ̉Ç×@åÑtsbš&ŒN jc2÷õíë}øð­à}8Á"æbÓU_o£q±ò>ý©”WsNÏššÂ¸XL<šˆS1šwn´ÇPq4¨ì:§ƒ6ç=Ê.Ú$C>Xf”…8áÜÛ¾W „·¦gRf̳۫ ž¯<€è/"Yâo¦‘â@n\t¥uõJe½_Üát,E:™zÑC©WI-–|œxL|dìõ¬ëð&æ—Ûë+³ Ä'Úý†=Ëô>ó~?ï½H`¦ )v\eËNå¥hgp-Ú…ŽH[]ÜÂxNºv©ÜÊfòÔ ¥R­Ä{m‰¿N%«”¯­Lƒ• ÿ¥Xß_?ÇJÐN¤©hu¸Þ´•;²] ;Ñ ”Dçf’M³jGªdŲ‚Ò’ö­µ=KÛ4Ô5û£¸r1íRù²f¶”Áë\W#G6ÞQÃ$fF*Šaxp?mû4ÿƒ€SþŽ5뙫\Åvœe+ƒt…vf…üØ1Dç3,gUã#[n Ž¡!Èý9©3rò1Â9é)¦äÖªÏÍvä³0:õ[Tã“bÆI`XÀü´vÜGÛ°Ažu!Ã)¹lBŠG%$'Ô]õ*Ñ iSzÍu¹jÑÂdGìàœkKWpÊ6áÈø"3Yq¹±ô°c£ŽN¡ç1Îãìñ«œ#±â)Ôà7‚ANžLn˜œÁ¶c°MöGgåR¯©¨ö[ÖÍ+^!rA(ÌË&Æ?ë=qº"ädˉ Ê’¹¼Ób›ùþ´} +À€¹«©ªYRP, .  í=[ †¼³xnÂÑ´tdÐÅŘ0žÅ‚ã\²›õ>YìT"º¼§-WÝëÒ-ÝÆ›0«îðr&Æ“‘wgöSndzõšõ&”ÙXqEÙ}¿ø%yñbs1¹Ø·²uºi,%]%Û•gij‘ÂÀÉ&uð¢س|Ÿ—Ë|™=L¾Í¡ÒFb!3~þ9HµµµaÚìtµƒ \³!W·jR›~g‰d¤–ÎÊ=5׳)P¯.Ë´²¶²¢Ôêð ÿŒîß-é,v>Ð-¾²úÿ ÍŠÊ5-å²¼Ó+`Ë ÌV‰ÚÿÃîíºÊ=Χñ4;l‡¡—ù+vØ™âFåË÷Ët¸þõ›ö}¯ýÃw‚­,Z.¢˜ŸvËAsqôÞéívŒ¿\¡Š½Šâ†nGÙÙõVÑaGéµÄlª¸£é:6H&ô³”uÖõà&“IÌT)%ïkÛ’;I¹DZ®AbøüÒàñÈ[™|ضG0i¶_åàí¶”®yö\À¸^däÕݹ;ÝëÂýÓ·Ž‹«Ùô2aîävռ̨šQñTA»n€=й¢@âM?ŸFrjvªAßN#EJF† QOcÜ}!õÙS(¼_³ ÌY_"xzÅ“0Í€0KDØy˜e¸îRBI‰wñãûÙÖ/Îò~%=jô(ß^cW$r®éV¦m@ö'=eŒžŠ9u² éß@ËÅtBp…h «@ñê ñâjç_&sñøpáýs9&Ëþ÷%d›Çb°ýûOcÅÆqKBxA½9ââ•~þN)][_z’ä Z/ò¤iÓ%-L‹ûl[ MÕTŸo&›K¿ç ªÍcÓè~0I³ÖÛK?ãÛT¥Ú-÷ùr®\ÓAÚ¹ÝÍ-¸Ènnum$Zr9'ËhÖßdhºïÕ´Ù¤#{¾c×µiœ«‰I«Ñ?Þ¾j³®µnCrN&uÈÆy«zdF2 DéÂâïRÇ›’ã)E—‰7’žÔç&Ë”å*1&bi¥ˆaà…¦£ãG¼ULSžŽÈÖ kËù_öÊ·´RñÉQE–µ:&ˆ®£…%Q0írÏ=QYœd·8öxN’Y‡SbàŒ‹¹:ו)úÞ}`Ù!¸©i¿ŽÛG óøÀËyW\æ DåŠ(xEÒª^ ˜yÐCÔM{Áç«™n‘ ª“ 3d+~9ä@ôiµ ¨zÜ Ë“"¤Ñ3%û¿=ï3¥¯q <ÂÖ\Û‰½ºK9£VWžÕ#ƧÈ„!¿â g¬{‡Ãu7‹ò„¶±´~‚/OéYUÐî’Gªchƹ¤W9nè·×È6­<‡îâ®Þ‹¾K‚boVgŽÓ’×b¼zJáN"ª²5¼rÄ7RŸg¸8ðÇ@/o!„…íì1«óé(a XåªÇÈqFÍ ¬î¢-ÄÓOMT è&æL`Œë0ó›9§Ÿüg.â¡`<ìP”u.‡íŠ[û;w>ã^1on-j·.É[nf^$%|zŸÏÎ-(ȚыñQWx5žÙ»ŒŒŽØëê”iÒÿ6I#ôg#§ò¢¡uýx÷[6GV>c$ ÿ<êA¼¼µƒy¤hkB¬˜—AÉËø{¢´cðÉFûAÛÞ£‰‹pœhV¾;CêÆ‘ß¯›) «ˆ³ØxÞT{å°?êžGÃr¹6Ýž¢rV2–˜ÀGÃÆÁî7øi>mk{źZwþöüLæ._ØVmË4õ“V¸ñ­1xD¤yÑyê­GDv¨Ö’Y‘»ƒ’Þ  Ñ̹>ƽk2Uª”e:sæÆKeпô÷ØõHÕ¾®Ç Ö§­^:ÃvqªšµÒíòŽÞ—gÈfS‚;q’}»æŠ²3EB~'²^LQÿTr­ïïó¾w°Õó9 †ŸCFM¹ÞÕá*°p΄×;á~BÀš¹àŽ!¨ftƒfÝgú™´è¹b6i€>^†¼£6ÌvÍÞ³ÌÚ¯Eq-lÞÉ¥w¿!š¢àë'ÅJâ'â&á$tSV_5 Ä]VÖ=¤a,rfŽ7¿?~rrï]š×™f *Pzs lªœ¶süþÝ;A1â=zrÜÛ<{î0w–¬£~Ydµ&‚‹‰àdm bŠáãâ÷Æ©èÍk>fÒ•5)Žáäì€ô‘‡¿§ú½ýÂÉ&Òø“–@â ï:–Kt¥ æÀƒ#ºðÖ.Ôµaä¨5äqÖ‘ÓñÌ„u}½;Ö3Ú¯Q5Ó|ÍVÝ9dÔêUÒCðá0ÞLtYñ‡§‘'kziÿÛøárÚÒMúÇgdPÃ˃´×åáñQ¢ÐGÿü¢¡¸Þ#¹Ž)côPO„ À}ø+¬ï³Ÿ)‡ig(Ì„â$¯8TII{¸d„Í‚m½˜m}9åþM¯PHT©ñÞð±8¶¿©|°†‰K°#Žpo­|³°sÓ5õPá±ÐÿRŽå¬ÙU‰Q´Š{ü¦É’eo½l.ÚÓN¼„³O"û×ðùCEÂäá¤Ï;ó´-ìžQzg©LU¡ ÕJ‰§¼µŸc ×á2 ø²Ù80£žät 16­&¼Ñ³_® ¸EJŽ-NÀa]eÀ€úÓ´¼n|‘Û-öþS^€7Ðæ‹¤ÁIŸj{4÷ý|^¯çSøó’jг›å宲VÎ'Z¥¶wÌ2K˜²÷޽jÚÕÛsv°–Iãyÿ=Õð%y•Áã<â¥_âš'¤)]ùªÝXE} kÔ žb¹Äƒ’a 1ø,ëêhp é׈XÙ>ÿå¡è2hµTß“úº~Ná¹ß9d$¯ ¥,r±=Žo—tÛ·YB¤ oº4rÒ“”F;C{ÇO é+ìôŒâÝqúZ‘u¥/ÞãZ D°Ü5êð—[|”7bõ*Îö*^’_^.zÙý79²Fþìe ºÉÞf=Êz0‰»­à+*Žž ЪA"ŸLV]ÂÜÓÉÍS†u @®åËFÁØÙªXý²Q„¡”°AÿkPóâOgs¦[ÚÒ¡CqŠ }¨``å-Ì¡ÅzeJ¡‰΂éݬ³ÚLl9½ù×` ù çøVž7ƒ‹ ¥^ž…3+££‰Øß­Ý帕Ç5Öô…ŠIߣ„Ýkjl¬œA>dÈì¿~cu1˜g± ¹”ûO‚/Šÿ,œÌ}ÊèœíÎX>omr×\ñÅAcˆÆ»ôlî)Ðb½´uŸ€ì©ùß"»Þ¬•÷3F$“°ècñTÔO)p|tÂ¥g û¶0Ä1G²fr0÷’2fc÷pûQ. Á}Æ9 ¯HIµú˜ÀIS/ÇIy £Ð™µ×²(mXK…ÃjÉ[($S: Öh[EûK‘l”ˆëLŸ Ññ¡7äÚmP¿VÏaÙëEö¯#í²^Ýz¸Ûší,˜—K±o1;ߌ¸jºSV1¨§ -]oÒ"<¸ý¸Ÿ.ø €Dt#ûmÛŽ›ýºŸË5³§Œ­ –ë^™ƒü.„’"Ù„õÜI}ôÞLŽ¥*^“¶Ï¹á¼€àCráÄkÅ×ñ0 boToX¯%×qãj†7“;¿ý úȵ¦èîi%Ëáõr‰ÇÿIÉâ~O6ÕN~25á«®ÅLqY¹˜·æsƒwÖíGŠZá5N_}h?ž^¦OÚ…ºÙ8.šzAZè«Kƒ(kÞðçR’ã5òe$éäN›¬-íȬú±oÀê˜2ªeØ‹ç ßfiž˜ÚèPF`ñÚE&Ío›ÃÖ'³'½‹â:Å- B\Ãâ†SšÖIíp7-ýÒ^ UÕ@úr÷rv‘D—üI€—Rc iÉdq6Óÿ…Üp&˜|é¤Éé!sOÈ&¥V$/Ô’¨6ŒB¶Ö²¸œ™”v ¢ŽDúp=ϧÉTž V¦ñÞ`¡Kì“å/çË%À¯·‡IÊ ¿« ¶èâ‰üÉ[=|½„kçAŠ'.6Tí×h“ý’„3Ô=¥]8.Ž+fJz2èÂa0h³`äO,_™g¸G¯ñÞÉ ºd—Ët äÃŽãí ™ <Ÿ©¶~J6/j5À˜:ékûp¶ R ¬å4hQÄ•|ë%(IÖ™-„f̾GPf ûþw ¢oEÊø˜Cxøµ½¨„;B½º VäD LùVUÛC±~ \¡’·ö N¥„qOâBö|¼¹¶ç›sdò?£(M˜1y ÉÃER5Ìñâr¬Š…+:åŒÄj‚ÄDñ9êæ©*Lå0òñÃE­ªiâÆÄ£Ê}ìˆ,yÊ*²B—='ÜW UØK ‘%¯ÄSøƒŽ•{¹žÍ¬Ù塞µŸûwºñãüs9xŸIVð¹bž•ôzÞùr|ï˜JXÓ•Ôæ3ë&úÓç'pŠ[ˆ!ò œ8t‡›Ì9Æ„=ËM1ôâyÓŠ?ØÎŽ?^Q{¡{rëŸˆŠ·fLëÔÆlMor‹ÑÈž_¯S·oœß©Mi¦±ue1š$xú„Tzþnž>¶ùVAŠ4™˜ša’›*U*†v½Éʆr›ZT¨Q:Šé‚IlkªYuWî·_µ§NmüŒ;\†ïܦ¨åd#Ìd9³|˜ÉsO”!¯*Y\\.­J¬ž†^¦l9“V‘R ©cÛZY6‡U¸†¬ϋ̖RØ/ïKoI>æ¾ •é ir8®Ð8´jdÊÆŽÅQšh„1Ã2c©Å ]*àt_tßtMdÖ”úÕ Ç~@“î îÞÌê#!üfµ³žbïÞ(óZ/ĦA›t±çÏH#GznbÍá-n*ÈÞpFjgb÷6Ž}¤ÑRwDÖl°é¦7gûÀvŠÎT¸G ¢Å÷â—ZrbÕæÑò®q¹Íé™Ìø¼+Þ•ç?Ô;€Úv¤6ô>]Kt¿ty4Ô1ê¿ÊT:Ûë•;¥/ïìÊqßyíL· §Ûÿ8/‰Ý€iÝS7bEÓú&³ôÑ|ÏçCR¿Ó¶¶Òù”ŠI÷¸mÞ5ï¦þxÿ÷Öd׫‡ø!{J;ˆwµl ñó`Åüg´hé+Ås¨q®Wvçíc{ÿ ñç×>?GZ²Ì­|uÞ¸†G{}BÿÜa¥°#¾KŠT©bQ«©V=‹a“¦Œhî`ZfŽ ¤WPÿ_> éûÏ…o^#[ÚÛ×ô/ßÜý=DuÓ Ä}í2ú3†„N0Pʼn„D¸°éñ$ Ô…¤†§¬{vÈöÔötÀWÌ×Ô#06¿t¢pÙ)0b3b=É"$/ÝÄ+Ô/[öÍV럮Ä*)té¾Í‚v뤔"¯€Êá– °y qkqÞšè?H€ÓiÉ6t¿:ËáU7ÞÖéÚj´t¥·µ]ìûÙdM‡Á¦?ø³*Tt‹L.íÛ×si4øJLÕQ—P|Τ5sõ¤ž§%脌«FLçÒPŸ jÖå3Œâ}¾‘l8ÁXïeï ÓîÉArE”0ÿàIÈ,š_õbGÇb¦#¿³o'áAŸ–¢&)ƒ„P>²¥³¾«`G­ÙQ33( }÷}¾ç‚óÎþrÞ¹ôt1ËgôR˜(õó.òö2÷ÝŠEeÙ:$üüO啨í#Œwu'`ðKj¡ ^j9ËÙ;zøø™å›”.–LÃÕIËÇÅE·›š®GÐE©H"2ô*<”À + ÂRØòY†ûL’…Y^ä-@ÞD‹U æÄ/ ¥gGI>öšFûe_ñDd$!q0ÚÃXº¨B-C39F5955¦¯lÆ“j jñg5:—ê\Q§ßK@’øë“—«—%wšÌ¡æ‘ Ø8 J”l?UàZ%†Š8êË@ᛀ0Mvc—¶á±ØÓÐ"Mi%V·dAm`ƒ£†g€]T.çm-sŒÐ5’¢WMœ&“§©$2ÝEæ=,R]ºDÁô/j¬¶ýöãv[]¹ž?­ÔBr*mHÔ=8Å{‰ƒxó‡7Q;N`Â2L¤þH,{Ô×ýì¨ôC_ü›ž‚ïW¥Å( ¬ž? ú¤p•ÄÚãûÐ&ÌARè}“H`ߌ–¯MÀ½ïO1¦Þ ô¢J!¡j»%ß’èõcjQ“õ^—‰åé”ÍÛoSp+êžþ—G‡AÞ=BUYó^7[œ¡“rõœF3´ëX3TEÝÅ_íjÕ1M6 I]KÙ Ÿ›¼Ô£}÷@¥‹·$º[hÃÈ•!¯Iyy‹æäþœšdª gÍòñÉ w<ÂʲŒ?E‹‰ö¢¤©¾§z†2slÐ-”Þ\ú5­¯<èÞáŸÝeHbã×x|§–³úGE þñ‹^q–H;"j^¢ 5tN_NìèÞaîá‡/mžK:%¨ªX©hÊf`A€-å±`Iƒ‰^ÀQ)5ÐÛ½˜íû³f…ø€É¥¸G#ÊÊÉ¡×Fé Lóî'´:g[›ªÌ9ÑxP¡™i¡›°òÖŸ§W…ÆYÝuÄX•Lœof5žÛGò˜øNLO–ÊFŒn”}{l^†jD û<пÅ>ôâa޶é>ãù’äUGiIøII¾BZçp¤´Hú9Tý5šÀ>…ӄм‹!¤ú®v†äþÖÁð„ê¡pHšx «dIé·õ[C×A=ÛÛý­Ã²‰C‰¬õ”`B”¼\¢*ñèÔùn‚0*Z0Ž 2Î0D4U()Q æè;G4€T´j‰¨C!±¨A5ÀÔ,o× >+ã €ê{ï„ ªg•ø3@©\Æ"ó[Á(שÌr…u-'Ìi‚¹,I/ÖVÃ*INMˆÄpAA]|¬ 5iîArŒ’#fï%ýü†‘±9œ~ll‰n€€hÑê_z6šA\üùí¿Dl cc^꺜€ð4ƒDY;ÿñ´^ý—ÓKú?"7¼‚ºÄÉ1âFض;fÈrQV«"{ø€/Ôå˜öDH4¶»Á iÄ"¿ÓµMMÓ—"Ô¦Åf5Šl ; m„zÊT©üW)y–ì~!Ê 1UP©w¢Ò‡ˆ:Æ%.PeÈ<9õ” K9ÀðØR('Áñü!–zHjžï¸úÐË ©v’› ž¤gž?KS×õoCé³ ·&$ ¹1–ÄšeTjäÞ861ÿ{S”æІ5V¨E%êÜ.B°@è¬ä ÄQ‡flŠ`G²¡Mð^žÕЄTo¡ ŵ‡Ü1:9aÒ=9¬ç9˜'x¾)ÄØÍî·ön÷Ô0š}* ÝX­ûé’‡OÒmpevÜmqcÊÄXÛï)Ò~~Ò]1¶9v 3ÜC;nSðO<ʳ@8ÿ2ÂÔÁ"(æ«P­ñ`ž5¶&Ä{u’é‚Ïìà¢Ðgm>bøKxa«B;’ÔSGr ;õƒ³h-Ûœât:|’ñßÇìéßjC» ”ÞÄ…¦ä™Ð/Æ”3e²aÛBhÎvϪu"„îh….aZ<¨Ô=9/™9Yú²[€7F‡P`gVÜ>ìœ &?Æ ‹˜¥â¢zÔ½àǤYNÖù³ eë4’^ߎCú:ñuʈU‰ó˰´³„:ÊÐsõÝ™{߉d@¬Ù;㸗-9ã¿ ¼ÒUŒ‚N½e¥ža·lƒé²}þªÝò«bŸâghÙ4˜í(Ub Dr+ÔxK¦óí/@`ý·!ºDðÀ²Ø×ÉG¡sž AÈób§é,Û'¿›õ-Š2ˆöF¢XúÑÔ@:Z š"û`ä$lX.z“ãt+j‚Ò΀굚^¶Š™Å%€5ž·•;ÃèêÞÂûiaä©XjNm]êw0Ï]"YÝ‚- am µe¢$L»LRh0óm@y$çˆÎTè†ÿùÙ+»Ã.„£nÀ4[â_2W` §Ìuä{Cžq!©’ç®oO¤T¡_ÈEWù[Å× –Gl5wh–X+nƒ”#vGj}t¿— §žT®7G.Q®iæ];J@*6EÆŸÂyü2ƒU8HÖ²¡›â;Ki.Ñ:^DÏ<¯Ûy„½°fêË¾Ï ß„šJmÐÇã·²…ë˜m’ 'Öm-a¹|ã¦<Ð?ËT΂¶¦ÚŒÙ®Š²Ð®BôåÀÌÞÁ1ׂŸTÓvï~”‚;tçáfÖ¦Ò^lÌc–3êã˜Ò¾j„¬Í¹®ÝþÆÐN‰åYµ¸&âæ˜õù¨ÀþýˆŸ[xt9û4e.Ä<Û¢KÂÙ§?÷cJ5ß _Ÿº" Z²MéßóøÊ9OŸªÍ½Í>ЭÙØ®n¼êƒ÷-à3IhîI™Â’zrËǵ…dÂ(aÕÓZu4´9½÷üEkfÒ›5Gwj¤|ã¬{%öT}É}CLÿû–1¼Œà7.”ÌñÐùá ±þ7+N5@ŽDßTÈîûH;ø N5¤ À[Iüö×Ëkû÷}&­,2k´ûü»Îa“ßZHK,Ê/Ëî*â^êãó#,ÿoí >ξ5öÍÍí!¸'gÃ[FË~Ë/¨b°óHíÕ-&!‰ÐW¯yª’bB6¤‚/šÞòâ`íÙÎ ÷Pí_Øa°Ö!ÎÊO1°N£ ¯…µÔúh"› ð)3•dÛpX¸xÇFɆ÷&’mÜ o¡oìP¡F{Úœ?hBç ÉRXÃ[ÈÞ‡úZfU­6wŠ=~ƒ¯²ÁøQ6WxX1°#œï7¼=D¬ì[¢ ?ñ=g–äV„Sm~‡ú›ü«-!ßZ›`\ïkwörÖu¤¬KÝõð­°÷6ÜÛœk°¸I«Øàï"©Æw­@úQ·8Ö›£­v‡•œà›o{G¢ ’æ}µQÏCœþÁpxkbûï” ^nbZð½ùŸŒöy^m…‘·E‡fwÝÁs?=é#+û<»+«ãðÏMÍ‹´¬sïϯ.C`¡ù*Bsƒö|ø—  0£¦È;|ŒÜôŒÒ˜Q€¡aŒoû-®}­øî’ž¸p.Ï. ‡œ_Ü¢žF>B)ä;BÃÀ%‰â«µZMŸ#ª&/În£a__„=ÃÆùê"žàªoÖæÑ73D,}õ+«ëñÈ£çI¥°±Ð—i*j,µ—ê’ìÊé4•u:MhôšõEl®6Š£±ŠE××–y„KECôçBúãY °wÿèrvÛÎmÅçcÄϵü .4úA/'Ä÷Èî!žƒ»/s sZzupúøï´8УMdæk¨m¦`ìîÊ*GØ{Á1W ÄzWasbqu‚ ìñl¾Á}w39 „~²O¨öeY¬Bzr­mó2Âè§ ?5’¢àH"€v0MGœl k´6=G55¿¬ Õï¯W7¾©øðÔçrzpB§%¶‰ó[l¼å¨‹½‚ÍÍ@›Î£†v~².´Ødgo ÓñË•óŒ %µÒïܽ ?;¿ºÂCgHÜÿí…¬AM@{\Àݸ=Ç»øsȯÔéµÙrÏÏ÷×RÛ` &ìNø½Þ|pêhã£å>SÌPÈ£Cìs&ÂYé(¿æ'>GVU«ÃH«N›Ý'Ý’o÷ôPß.éÏ{ÆëÑÉ&|ºÅÿMù=6ÙÈRš…ٽﭾ zÜ:É ÁÜü¼ÊÇ⥚moÇ©ÉÑê›j3žóS£“:Ï lKóAÇþúÓAÛÓ9Ý:xð6—äwóÛ"ã“7òæðò¶VÑuÆN\ÌÌ—”ÐTD +3Šâ‘¬‚øÂŽåœˆÀÞ„wpmÉ &HPÜiñ­ûBŽb3 … >ŽHÎD"–w‘¤ÜpÑ8[?Sì®~EÑ®§Ì±L``gæFH÷äžVƒ¾¸W"ä™™¶3pƒEõh^‰7zý÷#mØË^)¯, È ‰Ä Ž?¼tb†@™ç??>€ ¡º¯üïxä»ÀÇ^æïx2ògØHHà#Mhõ þ”$›Sº0£ôð›Ä%•ë‘£Ž<£šë*sÏLÌ;“¬‚]™#—[ü‡×M®\!Ÿ[rû’PjôáD†ðÑÍĦjuT ^kÇW†´FV `c‰ÎK¤Ö@™»!&lâV€s†­q§é½a‹  å@­·Õ£É:-.ócîÅ=`çä™—ÅÇm!¼³JH. wàö‰AR [Àr,Ý+Ö(ÍCx ä‘E¨±CJ‘r Z€,Oo¸ÿZsdqè'Š÷U:žµã )ÁPˆ1¶àæÐð¯# ¤˜‡ø2æé€Í%×¥“ƒ~<0²!u¥y¦éŒÁ–g®³œ¼W¢L 4ÁRBp¥!µL€¸ƒ°åxBlrƒìcB8*S, «H“(³ÒÃRƒoàrÊëæH-v ºt+_-«Â6ê+Að€€&ñ;B”dŒyÐçq 8à H@”"„‚` è‹"b¿D»ëñÿÒœûÕÆ´¹@H_†¦ J`+Èdî“ ZXD0Ú\¢ ªÃ`¨ŸK^K^÷y÷߀JFY¼2ð1‚yï í±/o©€xÛ¤L×òÏlص%ĈÁ ÈD4b 6#_¼ ÞƒH‹…Ç£ãææ&X]è„ÍÙ¾Û‚š¥¹Û‚™¥ =O¿«§oìÉëÕ÷ê¿ ƒÊ¡`W%àšhê_œhM3L4Ÿ›ÄUá ºŒûEÎ-¸'ˆ.dnŽd6[µÐÕ)m·Ë>©qÉ(qÉ(¾dv¹dŽwÉpØÂkeé…·²…¶¢…³‚̬#¥«‚ aQñÜŠRÄ{‡Æš-u öavÝ_6Â*rÏùr‚â0ç†Ã{Ò½~ãšÙg˜mÞqà ^…hoT…Ê}6öbód NP&”¸ïé€7GõE·gÏ ô’iÑEKNk0No`Ö?Iõ0&m¨I$_v,Ö¦Màà s|ø,7Oóv[$^ŽžˆO+¬‹!d òŽNîw£-mÉ)×ãÀ_Å»å¾þ:S£ú>To÷ž.âÃźPð9ôÞ4WhóÕFÓcÐÄ©ÁW¡ÖÚlÂ?ÔÊ$d›ØbQ0ZdI,¸Lïe࢟i»I1f°±%µÉxqzKÑʈ7‘*®^ϧ+¯|O)S‰2ûÆÛeý}FZ=Û~¦Ê͵g‰=A"¸±¬Ÿ‰ÿõôN£0…²²‚c!¶°å¬ÚêGY­!8žÀe™frÿö18&…æööx·%Ø'Üç¤ÄýŠÓõýØÕÌòÙ½2ÃÁdG;3ÒÎŽM1#ІjßÕhol—zànbFÛçð Õ×z0u`jíûæÅûF¯µ»­çÃñž:_ý³¼2¯£Õa]ȵáñC_1rñ+ÃÙÜÜ]YdSïðUçʤy]»ÿJ×ó5ð1?)ñlè7íëìac´ø3YXŸQ?eÕIùv7O¹Å]ñ2‹¿ÌO ’þÏ»J³\òÏ/¦vUƒŽÊ‰ o ­êyžúmn¬uó6Ë31ß¹¹ù[B¾T¨¸äoh¨C¼Ò+} ¦rùùÏ  •´õ#-›ö4,  «e>¶ð_/3? ™‹Ë„ÿŽÊøLBf&F¦ÿ 3ý¿¤X‹þª¨‹£…‰£¬‰ÛåW³üW~5# +++=#ýÿ‘]ÍÄø_ØÕLôôÿ+»š‘þ¿gWÿ›ù_ÙÕ,ÌÿG’‰‰õÿ‰] cD úš50éŒ ,( .â§¼Œà%QÕ¸÷òìnNåè9² #‡f¢ÄbY·Ÿ™. ò”vrnR‡½Žz—6.°ær¼kxåŒÐ±ášN}QÊ ?àÖʦÕX7-!Ëæ5 FÔs1Êor¹r-tI‡! *ÒOüT3ëTª×(¨×lS´‰wÕàBâpY0ì;-!"dº IŸÝ!'¦ÐÿÐA $ÿ°«ÛUš EÇËuX°%RHÞƒct€åqÊÁñ‡,G)wÝkÆuŒÂ!0K!”ÿ¯ÔÑsTœÜêC:#øQÂÚk÷­áô=^;)þ× ÑD!Ù/̧Ӄ\ß?Â÷û…Ÿ¨îwý? È?rîŸD¿“_‘‹cx'nÕ¬ÏÙ¶4wŒCÑcº°\DY3ù;/¿§¢ú ¾Ý·âáÿn¿±üÿFEþ?í¯ÿ–‡Ìø?yȬÿâèþÿß þßÚþïüݧÎÿÂßý÷Nü_ù»¬ÿ;—íÿ‘¿ËÂÂÌü¿ðw½=¼µ†—ÏÒ½š5¬×¨×ª©ÆÐ=Z»FÂk¸ÙgLÁ¯'[¬#Ý#‰GÓbã‚2€l(>7P%p€$óâ€hBRÄzk¡›ðcXa¥2µÄ=@¥n§¶¬FáPÌ?¿}é:¦²™Ìf¦²8.£„qàd?_à Ëì ˜‰tüñã–~² öËíªœ ¾8ý?úÃóç{¹ë¯UýƒiŽ#…ˆƒè*d¯ÁÅgÀŽLÖn±ß΄uøie—ÿb„_+u¢m¶îGº ~BœÚ»޹÷Þ.wá¯×û`·„(ì#'op€¶6ñœ`?×¾µbnÖ¸#“ެö4að¯:lð!>ɧšá€ž0l˜!ˆÝåç ƒ¨4MßCKVKÌ] =KÜýîŠy§4ë& ß@ó±©ï‘¸Ú~»Ú„ö«ÜO;ÆóÒºØp®¦ Q)÷±íƒ®Á!×!ÂñwIé†.çñWP žd¿é£–÷¯ÀêÇôúó íÈ€·ê¾AKÐåáûP¶)„/$ÓÄÂ(¼C)ªØ;}õ Tß ´î%¿UÞcdë…©‰Ÿï½’«õ½ÛCjCé›*Š[Žï‰âцÞ0ÌÄÙcòBߊèìÄÇ 1ºãI§"δ&hMࣘˆhï®ÿêÆ“‹ýÅLˆ_¦‡1ªÃ®ÙÞ–¬Ýc÷æGR[ÔÆ|¥â•pˆ]À {[.c”̇,vaІ]ÝsóÎ… ÎUN M"q"Ú2Ù⽉ýjþøq‹`;˜¾?˜Í›Úba¿w$]º ]Úˆ.ëü¾£Ç %ÿBØ uÛçI:9âtŒÃ!õ;\h kË¥âUì)ðfóuû"šËoÒ‘1yS.QÄ:2¹á—᧨¼s{ãªOàtÄlϘŠÃ›>B{°rŒèŒð ÷Šî ù…Ãß÷+#ˆ€™¸Lª­d­ð©×2tÚ´wÍÚŒtm29Ó‚É—Ù qÌ Ý£8º¶»#œþÛ9M29FêÈОtE–ð9F×îÔÙæ`HæÍz{ôøf…Â:Ì;ìGé[Ü%ì¡3…{“ ÐŒM£9ÈxÔ kCÿ·R;pÖ…< øÄò’£i£u.c7 ÞˆÔÌ£5wºì™Ö‘ð>Þ !Ò;芨÷ßLVˆ“=›£ÞqOÿ7ú+XoPoxo"ßÐoR,ÅdvP.« Û[î]Åt…Õ2Õ²ÏÅ' _UÖçþ–UýPE úå_D/îÔ]ÌÜ?_¿¯ü·€7†“rî [ÖT9¼¤¼Ó¦~Í6„÷]8®9R³=©;ï›Qœ@±=¦¶ßàßÀÕ½Õ·,6…F?}/O|Þ¿‰I…ÝKqÕE±0 Ž^´d­ñ›ãÙÄ‚9FÇ JL ƒöÇæåeºÞÛQõžôüEº>ùèIÝ*ø@´>æ6·™Ü×ã\:µ¢—©ié»þb)³à2æÏ7gÈú É o×j³j;˜uðjk©Ïð?p‰ú Ñ'òó‹oEoËu\M„B}ÇÒˆå6¾ÇjŠ=%HíÄþú"V°÷jë¥Ô ÐstÙBëùû%¢0óXøüzFêG€µ&g»ÏKàÚ–~Õ¿ý›óðɼ_K<3}Ïæ8xvÄ‘0ã1&Ùc¸e'Ž/ 觘Z)À§˜¯“½‡w>ž}§t¼øUáú£ G&ûŠHgOS”] ’z¬‹ÿéÑ…©ÙæþEµ½ÅEž{7ùÖè]?PÊþ:8ˆß3”>®ö¦`¢öEW¯3AgO¥/ÿžâÉ!í4A¨; í 5Î¥M:±šæ™”¤q_ ÏwÄ*ÈúJëOKÊVàvt ËÉàˆãOø°@ÉÀÆbˆöÆÕ _hÊÅîp¢àŽVý­u-Zƒÿ8E¿‚c : î–Çò¥èoÁ-`ãkȺ&ó e¨TºÖfè1Øðѱ‘Aig"â¡a!á_ZGÀ7{=áÂyHCÆ5™‘nÀPš¹UNzDìS ›™§P™•C$¿t¡½L%¼Õ²é1Ã1ÜtýD8Ð 1ˆÐlI!#|fb< (‰H>›‡B"é€ fNN‹¤™-x4ŒVŠ@¢ÅêJèíÙ¨õ7ËØž+¨wHŸgÀ• A“fF0ø ª:^…+ÌLÓK˾#ˆÓ¦±q½–†60H73øé逡®F_ºÖ%¼cÞ<‘R¼´™”ñຠÇß»Õ]~ãÁ‘š´ÊímÂéï™v5Ñž(ŒuFD'áéD g6ûlûº³µ)ôB°³Ëgb8vpo­Î?]µ™5ñý.ZN¤b´dÙ–Nj*Ñ_5ßÓU¥øó¦ÓÓ+CÆ|°;Ñ^7Î;;±Oû곬Tn§(µ?"•ÿ¦(7ȵþ±™¸µ.NÛ‰ t*vr?)Ð];‘a•Ù4cb&º_%lTI;¿rÜ ŠV5k­%¹ÔÇš²ÎàM"„”òŠˆ„ÃR$¶À“–L þ6ìë%•Ío‘> N":Åj ü;GG@¦#»%NãónePÒ‹6~·(c¯l °Œð—˜ùuã‚"'&–\¬”þ‘7o#›ì· å0‘2uß'€%ôsàǃýà^ º\§,S!~ìj¥†’vóøç(‹B=ÿM_N$}óZ¹rUDälPÔxv„¶œ†?×mHQÎ-ø²˜gôÄK:Ób6ç='d¿[–yWlmºe¾ ýGе[+ì’ˆø[©ã[ÑO+¬Ñæ0…6ߥD-ÅFë4!eI¯sŽLÑÈ ÝîØøoãC:Ã%ƒ3‰( í—Ü0µAÅJ# ¢ªL÷©ø­mõö¸Èáí‚B'´)“â­Æ†nKQwp÷&¦j[{Ë Ó‰O%ýÕÔT•#T¥©§™÷ü[o©¾ý N?“®±iA³›náüÛüeU¼ŠÞÕˆ L¢g“*œ*žs‚+Ê8 ®VG Ʊ—ŒÙ*µXÛÉNò6¸S}íå_FêVÿbš€–¼è¼âYG¬ž?@WÝP¡ú×?䀡‹S‡Obfúì_(‹Y³ ßT÷ý‹)XXøûcà”.IØ(ÑyÊÞ}AªÕoþü¡¢Å_þ‡…G[Bz~Ë Y–iVêfîµù>5€Xˆ¬ÞÁµŒ^<Ã!tÒKwñ}øŸâÊŠò8B`¶{k¾L,ÝEµ­P/Ÿ],\Ž­&Z‹à6 ¾÷egQÃ[×ðB!Ådz~þð [ÎN´˜–˜›¥¨Øw,%%á„ñLÇ'\Õi11IéŒgÁŠ s0(èh¼'_(òZô%ëG'$7n±«.¯öºË–ƒâöjIÓö©@-ÛìùEûUqݹoì2fq¾ïŒι7ÔïÇ:‘y**^Jb׬$¹'¸bæ¬U4úï3à$î›ð{r»©ªyl˨«ª*¯bÔgŸºrZ]Y&CÍÚtI;9ªì,]d\L@9q’(ò^»‘q;k]½ßî~?A£nk}Ȥ‡ ›ž³×¶ø¨ˆFxím€{ú"r5Vþ~>íþ>[x†2Iµó´´Ó´Þ®¬UÚ ù~_Å–³.8Å­¦e§Šá€®Líj¤¼²$Ü…@hüìFŸ¦bN‹<}±Q6Åô¢ÙUÿŠYP‡»¸Rc›þ C7gBá½’ñ™¿«š$§³™ð|5Gl¢jÇL‹æ-ÿ¸ú `6!ÓÊðß#y•çœéÎÇŸ(–ÿåħ Óo„|aè3®n…ma0ʵOBî¹o…žc¨Ò&û»s¬+@]4¤M GÍ ˆ¨±®‹ñ %ɇ¸ƒxÅôÀÆØnX=|ÈеÚ8Hä3‹{{Å «F%ŵ>º &¬t\´Ø{›ûä-¼Ó›Qƒº$äéÉW)W\0ôËefpô\¾òHì:IåD>þ†·ÍÁ¨xûº%óJ¶z¾˜ØLÕå Ê JeàZ,øˆ¹³\‹6@ÍâÝ ¿eË,[&[Æo¡ £o!Û¡ø>2%3ÚM«'¹¢†uŬеfœ]ÏIײT¡ZFZz|¹Ü£x!«r®Xä½Û°ÂSN+L­»´ýBô¹ÛR6;>]xÞƒ #K`ˆ!³’¿ª/AIÄnÊÄ]©^ç³òÓòªÔ3sW°ó6Ÿ%Ã;(? {bRÞ˜‡¼Y6 @°rZ¤¿|~7‹Ö†-™ÏÙîêMe•÷ ×°û Âg·g/+óg¥óä£×Û6üÛò7àÒöÏû'¾îò"~çÙfè[¬"w¤¢9Ak„šùÓEûRÅÃòq6,z±\&­Ä®Ýï×vR]{‘Óº¤wf9rKÚ›¥9½ÃËW §­P¹ÛIíìnÊoϹòǧËE“ ,ìH5WmK ïvU3 ƶÇÊ1Ir2keœi3ž 0ž‹ÇŽ©µ86µg¥ƒG¾ =Vd·wÒ¡ˆÕ hR¢B“±{tAù„©¶Ý–i×¼{­‚E”£.Cý‚ ª”x¸TÇr„*·–žH¥øÌs¢2º‡)ûc·‰ê¼gnr«)µ\]F´©¶2×3LóA(XR+±Í—þ§5jvy´u 2n%|ɪ›…ß­qì>q!wÌ>ÄhÁ4v îà§‹b¸Xx(lÊH¨è†:„´ºâ´Kè)ì¬^¶±yµŸü÷žfÁ°ñürúÜm¼z+Œf u‚‘K"Þ@ešc¥ÕåÉ•Œ ,ÙÐ(êå‹® ¶„s5{–¯FøÓs P\ŽK.KÊLj§Ðp¿]µ?hÙtEæ2!§œÃ–j?!˜¤Ÿ"àB2ç_1íßjœJJÖD':“["²b×íøC;éê;®dåX­¤%GO%aVÜÅ5—†»Ý̵_ÛN½Ú‘?(³áÙ“¿4ã¨EBÖ›Ò5r]\÷ùCdOœJØU’š&œ¥ö¹ÌéÌÙí|¿O‰BfÍHY¸Xû<$Ü9…÷Ây[»þP@.0M›œ—51&6¢: *öN­Ãµ-Éôib×¢ãB–=BfI´H[iš–,gW.ï—I'h¤ÛM3£à¿b]IÊLñW<¨“dS6›WjÖ’þÑÝ![z--O¾(üb„™Ì«’5cFÇ«,›@qRĦ .15̸ö!KTg¢dšNOÞ ×ó»ã6Ã[¥õXT´dç mUŸ­Ç5'Š&+7ÈŠ‰!+÷™cÇÇB/¥·¤~[¾ \­¬$†M”?O¶ô’qyîB,«ŒR¾í²F<ú¬.üì褋ܾ/xi`97µó¬âß\Lâ2k\ß©G¼›+)a9&â£9­å©7»ûÀ­¹Xh4¬o>qÖŠ˜,.ÛžŽu.ÒpZòpqº bëë5xß-q(dùAcè#œ˜ALZM5Sáäö †'3&Žé™mpý”0Ï¢¢«Ì§–ÑüÎ3Ó2Øo0 K™gCã\¤ÅñíqĨº L+Ÿ!>uÚ’¥òS«]òÑ'=1!å ”´„·X^."jv‡šº3ÙK Àg¬Ë–8¹£7'å6€'ƒ(BäFÂ%¹ ­EG®R•·\ çÇ;}ìHϺ‰£I'{çV0#ÐøKdšPÌ3qœ¸P$¯‚}ŸuÁ±sÇËÃÏŠ±cUô!%:‚<:º<ÒÊ‘ÿÁ ô\7ú—NB̃öçk-Q#Ýòô$Õ-ã*†Mç¶Ålål\ôø.ìõ½ýa‰=§ØQ²Ä}K'c>=7Í‹ð¤Ê2jeWiÀԤؼ­O’Y³@v â” V±Íblbâ~A…‰±›BR0« :M+qgFŽŽ©Uëi(G"à8T=R&7©d•lÝÉéâʪùÇÝ´F¶Ôí˜ì–ƒÙ¤òÕYŸÒKbz}IxÛÌ#ð°_ÚQ 5Åãy)ó¡RýØU'g¥.•¯ä7üG‡'–,Ñ$Ldzq§8Ú±%hvlæYÇáÏ£nàøñ¡Äì6„°¢•i|<˜¹<·|%H:/P™pãŸ:ùáÅ¿é”^@­?ŸÝl5ØŸ†Ì//Ë­;RY`NõãJôu-—«:uf+Õ]5>{[µWšW^\('J%¼*×?Ÿà,^Ã:Zav=H×O_o™[y†tGˈò#zÈnÄ«$‘ã.»Ê=ƒ>iCv-3}§¦«ƒ Þ q4ö÷sCÇüŠŒ äË–c3õ SUW8‘ŒḂŒŒ(P?¤m±D”…pK–Zž`œ.¢4|DÛ}Õ'6Üè§|ú]¤š,XNF‡–œë;f¼¨¿Éε9%Õ–6æMš•¬L6‹ŠÏI8t¦ä¬¤ÑïB¢Š“Ú•È`GÏϯۈŸEÚ+ë÷ÙuQî—²·/ÖÂeÖçøÍ‘ ÁdSÁcW“-K³¬-ÑÌÈ ÁHfºnn’:_Ì01ASà>Wdš]Dš¾[-G{DêÙ^l·®Zª’”Í6äGTXn°äžÙW³R™\£!iÛt0£h§ ´ž:Ä>dÁ3ƒSÞ º`2RU:€Hºº°íL0Ã_s KK¿yºpÂ^OìýÈð!’RÛ¯T^’&P:ÇsŠGÆsÙâ{GžV{mÿøÅØEœ;L¿I,Ýb§õNì10´"4Zš/¡Œ£eS´óÊ¥×:‘îÎ<ÅW‘mþº¥…RÐz&W¢r‚^ÂTpTÊðÕg¸lpRrÞ‚ùòÿëî»ÆáÁ[þC6p9Ð Ô«Õ†8»8 bŸ[²ããt"=O½¤µC©:|s·cÆaºOš‚OBc ¿ïsAq»pØ“Ã1Å0*×à5wŸ¸Ð6Œo ±s^ò"K$×â’ÎÄ|¾„;Ç&yW3HË©l¶…=©3±ÿ‚jÕ0I§÷9‡zªm.GÃÚ™Ò©ÜÿÓ¼Z%”¼ .!D‘cý³±©¦³ hjWÖ³/up·ŠNRtÜ2ÉZ;æÆüuU@¹«"É[‹2 àHîyÿs€þÞЖÊ&Ð10°ÉW­”\)뜠hVèÔí”é´å‘þvŠ;3šmQ ’½PãLé4y•Ó°µr¥:j:jþâoDïf¶Qt8:+Í×Ú{õ%R3M>ˆ,àÑRQU¬Q¼ÁÍP¸s‰ý¬[8™ž‘rélÈí±¡¬Óºië¨z¯ÆCÆÅ`Þ€Ü ½rE]:~Ôĵp寴l!9¯(RC5}ÌŒ…ŒÓgffbÑ ‰µ°‡³×”¹àˆZ~ÈÌ0PZùšQð51š•Ê1S¹Jxˆ£à;l‡Çà殮 ',ÔÍ–žT0}GwêI¿-OfmÝgEËó> .\Eƒ $eÛù¶òå_<‚P®j˜¦Ì6‡ *áéÙ—Ô”M<½PÐníÛ 1kVqu4_Sk‘-sÝÑÞêrRh$kõW‡Séÿ¢ìƒ†iº5Mø¾lÛ¶mß—mÛ¶mÛ¶mÛ¶mÛžç}¿îow÷Þ3{&"«*r­Ì¬¨ŠQY?Îó°¸g=38ìpt•º Þ “ŽŸž¸ÆÛiá%ÚMÊ>v0ƈ´ÊÿŽe[Q Bœ9>ð´Q¬ŠÄÉð„rTÑù’&;©RPÉA½•?cq(”™*[0˜_†Ñ¡ñ>§Þ¡¬•©*2N^­˜^iØ®m®=WŠ.y.±.™NŽXÄ~Þ¹¨`á°L±¦mï¸@u0ÝÀ)±,$®àÉú{•SV ƒg#ð­& G%^Vð`5k?«Ð åtD«dâpܶ9 K1s.fpMúRv:MMJnáZ¤¼œØZù°0Å´-zOUèjj»_8¨™BYrž«5q¼¸(ð /kÜcY‹»eÅÄŸx Ý‹â¼XÞ°8j<À­µ‘”MXsäѱNbŽÔøq;ŸÖcFç0dî1m)œpKêĨ`8%Ug8Õˆ¯LËž¼¡K3Xg!-ºô ›-\_Ë«nó5'õ¾ä™9t4'+WÏΚ-s›y¢æpoзØþZW”¨ 9õT³Y®;¢Ôæ5K« $&ß}4#ˆ$ìÁ¾³<êuJf“,Åá@XD†¶'ç4„3*&>SÄÁÂ>Ý"æŒpxèán.·¾Ž!»ýˆÍ¿p¾áÍædX7bSUŸ­î¤~™™HÐW¨íÚôßÁ¶®ØY°Älkª*«y]XÕtq~3i8/˜N6ª¤âá[LJ*p™r®ß!Û\I,=ÿî:tgÄqGyœ2|V>纬Tà1°Hå´p%·Kpr¨ô–ôñQð$”*+øÒ60)àäcà¨Ç_O1”¯×tæ~ÁtM´~$‡@ò2HªÊ9öL,zW볈1«žÈ÷Ñíš¼¤«njÄþp†´Äˆ>ßiÈÂrlËŠÀv”Éw¹ †Z•Miræ`Ö= óðš_x®â53I¸y9t•E—Á”¡)s»otüì9§(íXÈjáNë~)«Ÿ¹¢_‘¿³sÈÆ˜¥{àAz|/Hì¸Gzüøb ì’Së­ÊÓS÷ž†[ˆú0RÔîn4¶œŒ´9˜ÎØ©ª¥£Vk©Ê«ãEͰÔÉ6cBvM UUMîÔ:74T?WïàÖÓµt$ ÉùüÈVßLÀÚY52[“ÍÚ­îåú¤èV¦ôš ~ÿ­Õ?\zQ£Ê꽪±Žä¯àL(ö‚µÙkÌ—˜ÃOvƒ›‹¸£¥^ÖÂq¢ÛçåòQ¸ï|Ó›ÒB‹yÀ Ôbè¯a(Q"ÏUÑ®ÊPvµ •jâx 9CútA½E*®Ã`ÚEkšÕKÊ[³Ž\³=|BJô%+õ:­C!I Æ8Ñ8š¬Œ³f¬†rW Q–ÑIÖgé ™¨ÝÚ²Çñ«t«†¶L>½×MÐÇf!3bG=ÿÖ뾈"3f‡hÁ €Bܱ~Óe”Âç(Öá+¶JÖǵPü0R8§p«CDT$d¬”œ@b¬ ‰Oʯ*/€&ãw´…¶l÷.o«ƒ½’†úU.>GÄXyÔ”‚·CL2*‹G*ÈS(KÐ9ÈĨ2zÃa”ºïĨ*x˜u ‘?ÁE¯2yÛ¤þMïÇŒ˜JÑ2E¥g÷¿`sl„.ô©ÆaŒzZùçôÑ|“ mÉC1X¶[!òm’Z%ꃸñ€’)@½nÑ B’?rìYrÛŽÕ>ðÜÒ¯ñŸÒ1NÒÏèÔÅïs¥ó”È«£l“’çpaX<ÍŽÓÿr.Rô¤¥êýó¿À8£¸E>È.kXå#Bñ =còj”¶À7¬‡5ø+Bñc =Sü˜ÄnŒ~J¤2v·ùÏÚ¢ZRDãÙŒUi¡,R?Pÿ, ¸Þ#¡“¢aú!môT,‚!âUžÝ/EW|ùÏ*tÀpêc’öÁîúT¼Á¦È ›ØqE ¢§ÔI‘ð3µ€/¶ÂÝˆà›¦â/ Üõø:–,€ÌŸõå ¤iåAØÀ¬,xË…Êb OÊ ²î‡!T±í§L‰X¼ ªÔ!íµ‰üù§ˆœÕd2ì‡Þí~X¬ðˆÄ~.ù «êŒœx²FU`h®ÄÛQï²ÖfÌÖÃ!Ê×ç·JOãLØy|€&šÞ¢¾}F2à˜¬´aôð†Aô¹ÈG;r@†>ä…´X¦^ž õ2‚ŽÕÙˆz¤G¡öL.›=yjxÆž¤9Š¿Ø-Ãîy©gU ¸Å[Xb.rHã\>ðÞÓ¥;(F–ç:Ö #Ò£C0àˆÂÒSŽá: Q N¸EðLÜVßåÅcêyä7«ÿNËs€¦D]ò²6Öõm›û{§?· m\r Å<6ζž67«&à®2ú€áD3ˆ‡ØÉàe²¹0’ßêGÜoÄÚrÚ¦é{YÕ°’=w1ŒˆB|ö~¯Ðgè§“án:5ˆÃI¹x¯]˜ºœšzE?ïF¦pGím9ÕÈŸp黯ø¦Ãó®aª³ø¿§ûÆã„휚[‡ŽÀRÏ{‡UÓ¨§ô=1õ剓²ð%N–n æPçÿÉ,ô}b¸mEâ­éFL1b&uälñ+D0Iˆ!ôiKsßF_\‘a5åA´ ˆÎN­0æRWÚißCRZp¿Þ i¡[ÛJÿ!ß|OkêLžr îÄÕÝÓm‡5tÈÒ¥´?M{!¹Ì1BL1É0òL]Ùê;)` A¸ÕDVeÝÓñ¶úñ»šõ'Y¡3YÇB0à‰¯¶I;Ÿ'«ÅJ:ÄÙÖ3÷qÿ•³)~«ÐS¡8¼éÌ³o‚›?´Ñع'Žl^R¬2M›f‰Efú4ÞŽ:@_9%Úþ£©¯‘2à¶0:b“êžLü—„>èQDˆÆØ¾*¯…öدn‹©‡8–þ3C«`ø=Ý›º$–.ÃÚoÜ6àÜÈâøO¢\ܸ‹N- õoh|î,J19ÐGR]Yý¡äQ™âÄ™¨ý‹‚W¨:qž×¼ xK};Imâ¡h uÔW®¶}„¹/iŽŽ1އ8™Ç7%õ´ÛIþÇ­úpØp+a Ðî“R&“ï5ô&„Ç\ƒç­÷àÔa(Úp„úÄ"Tüû£ñúbä˜FŠ•%ï‡4“g¶nâÛibÿ9×lì‘la¤CZ¬{ ‘ºáÃïHpî±Òø@Y¶ëd¥üíÅè//j7ÃIÂÊ»hB:_­ô8Fꇂž<´ö1‰,fRõqâ ím¤ÄT/qÂÌçÕè*V‘xœûÏ‘àÐY"K#»«°%Eª×%oÿ‹<É©áéãb)2Þ$’j*¯»5å÷´CL’ÃÄÓ¨Á¹ùyº\DïÈõéƒìá{ôûLÜóßðµ9…¯W°Ïû× Y+ªãQ*“>ºãìôÓÏð‚4 žE?Ô“nÄí¨ ÃJ÷•®À¼m«P¦”zH÷ñÚaò>f—ÐRü²\0¤ÆÙÕ¯VÍç*Ó¡—î9Æ‚æXhö"Ò€n¯î[ÌÇÚÆèèÈ`bÆh Ò¡€.;=/©TÔS*8MOÝ-S˜Ä¹‘àèÛñpP‹!{MWMMº. ‰àâXX=36,Dæ¯]©oýÄå—â@ɽüîZF-ñŸï7FwÀ’¶oz¤JÒR2ÄûµÿÏž›™½ÖÝøQd9bÝ ÎðƒG¶i”ÿôèWaÏž‹¾×â+5ð·¿ÜÞ¸?ATQ&WLOÝ*çfðs` I€­ÿ]lЗ⠘J=’„¥ùY&zŽNÔÀ®’žUX LžADpL­Õ_470Pþ}=á~¿¯7yÕ3Ø—¡>²½±ð©~P—\Š·7‘wíj??=2ª’OîI)ËvJ“ýÖáÆ}=ÀÖ|š/%ó~æXd€–¾û5ðÀ@1ô›>Ã/¸_JRÆÔ»«!Ñ¢`n¯¯×]Tìû¹—¼âa¼=g téççwþÜÞ¸¡ôÜG:´;–1p++2úc48ÐÂÐ2¤$º0 x¢!ÐÏ^ºÂ˜P €/ŸâßݧtFê+YtÌAñQrdì\lL‘nðŸÈÁŸz9½R<¶óR a3Bvua„kl¼uqvŒ¡yŽük%‡ÇA\^ö- ]pc™SQuB8C³GÆCòv€|¼ažu<‘‚qxœö~¼¬»P2ï)Ö¨·‘€%G~D&£&z^Fü Å²}ݹPCû»<>â'i‰{˜7Ò³¨}{ý°›A_û^_£ 7`ë&:zDpÙÃD$"1fp8(¡ÃC³ô±É·3fo½¾Øˆ,hq—6Þž!tÌg˜wZˆ¨w¡eàï3}«(ITNú> ëxÄOÁÃ(+|ø ô{iÉ1ÿì=vþW FlXÙ¥Ôié䨉jE ØØ˜¤˜ó^H:k(Åå|ÐrÑ”¹™Y» >AšxõÖÇé –iélË•Z¤•ZÄ•síÕyJæê¥ŽÅÚeËô[jvèàÐt”¼ 9³©Ü½+U€-F‰ÈŒÝ§7bœu¾žÄ`Àq£sî²Rù}^!ÌLÝ;oÊ:Ä|¬P˜O9õCo}< ôæÖ ÌÕ’9{_?õ<£‡²"äIó¹ý{‚^£"¥F¨C9ˆ}½?ÒÀ¥"¬Æ·ZF}ŒŠg$PjèQu²kçXéFÃh¢©à™óDóŠ%§®þž D˜ù²àÒoŸ@f„x»í~ E¬¼C#  €K±óßÔ|ÈòNqã)à Li˜Q F’¦NKtBú¤Š“†c÷Ðs‰ÞDþsôÚ`§ C<¤á_…¤% ãRÆ4Úaã†Ó{çÓòtCzh“y‹OŸ†°Œ»QìÌܼá3˜O·Ðz‚ù¬w÷׊œKExßJ/­u¢FPhà…©¹+ Ûi²'w‰¥ÆÔtÐg£o+§¹¹à«÷Á—ý'1¤R”ïÍ;€ù%]-mSš*)quí0ñÆÇÇ:t=–”–AœÝ/xnÛ¥œDÖÉW |FºJÀǵS^¹ïÊß ×UÎY_Z†© ¶ºÌ‹§Xó-ßH‡¨ó¯÷±ŸïB_?{¦Ñ\/W±¦¾JœŸ… ëxÉ3?eN/¢Å|”Ùßòçÿ©ß|¼dúÕo¦Ýêb¯éIöÆq߬´`†RÛå„gߟ̮¿»‚¬T_é‡'çEŸÏÜp¹Ý§ÃK¥$ü§©hÅ?h½åo-–î+§ß6R% ÑUß³;—ˆÂ%ê”Èͳ˜íA-×¹¨vϽäD -zH›|–[ˆ0ô2^Ôœ1Þ°ïÔŸ†öll™ÎGh‹\¿nvçÎ{»ozKz«Ã{÷^\ Ûsï8 ³ï ¿C54©†ÈWrUfOД°õqÒo"x¤E~ìÛÐ&¨éôå÷ºba Fùfpú3`Âc§J4×Ëή&’å´©B!#©|¶‹Ž ï`QcbDé'åC$)w¬öšbú,ÿ”ú&íriÚJÚ0>†öõÍø¥@P×Ê0…î­ªíô#Žø)ýø¯ïêXz&ô¦Z‘¹î·öKc’n¸P¹PµooÙäl¾°£ ò‰L5b¢<áµ½3ÅOóÿEØ=®Å7éðùBwÕkéRˆ2P¨"GUé|,Ú¸I¸ ö_n” ç,|è!“£F ®uƒ}$´¥ë þq(Ûéî:€øqøàï…Ýü»º"ñ‘ kÒ ‹ßŸ Û.AyŽ÷ïÎ=~+?UçŠ8œ¬‚n¶;E‰hÞmc¸R¹Iu¦ûž®-ü‹>¦ÃÐßÝA¸¼¼5fBEyž¶>0¡$¡ÑØEølŸeÇ¡4²‹$]sœŒªº ‚šº©™ï€¢Bzð1ï&5匢Jº’ª¢²uã1ç¼ôÜ%™ï¤lh+l-,â-àmÍckŸn? ú ÝL;Œô¥óá' TПbU8µIÛbÏ©©Nrs1jeµéyÄJ̸nÑRWÖn]D9SwŽô’z0P±ñÍ*SUùh®£©‡0yFIa<Ã,ÉÊǵ8€u5벘2AùEJÅÀÀ¥xP[E~ßNuåà/Lƒ6g¯eƒÊ$g¨¼7“wo$:yͪBA¹0¢7•«g¸{ˆâæîNöžî>¾¨#þÔÚ‹bw¬ö´õ~⨄ò†˜Ò«çvøÁ2æg²JÁ‹ÍôÈLxåËÖN.³¥1ÇÚôIîÿ‹šo2{ˆ,n†ÐdžœˆpÜxÀtÊ$êO9f½øè7ØéPRµé-æKKDºÎw´Î7ŠÎwêîõ²«Çó©Û³°Žwê®Ñ^ýÑ•wbìÚ'ÆËNߎ°AIž¯êÅY/åY)È·pÀÙY+ĪÀn• îtJ£+†§é"[º'q;b¡£[ÅÒ#˜|i}‰zë*êñ‚Év´š¹‰#n+‹±åØ<ü•²Áz¨0Á*÷2²l^–Àû¸»÷%m“ð¸Bsì̺QïŸäÂÝìƒÌºØæ^ØãªŸ ­]žKì~}×ù ÿ-™pZ¹qÔuŽKÚþ›øõ‡¼jŠ`«Q t uª ðj¬cý™õ,O+Yx®#»&ˆØ¶RkGöËOOc%!ë_2¶‰ëÌæîóD«oÜÏ›´fºÑ”²Æïš¯¶ëÖãÕŸIœã±Â–Ü6ݦî³ʸ»z‘·ºLé>°x1_Ù÷àÍí×5›„“œq㿺üú7HÑ-“ö’‰3‡-Y&}MÚäÒˇFØV¼ý8cp*—TÙ™2VMÕ´x Õ·,©½fšû]s*¶B_…-[‡-˜dš•Cã!h'{'q‰€§¼š:ï¼›'dcpüÒ&unCˆÏ6ϰÎüBvÍÒ“<ùBïvšå½€:Z‹½÷ ôŠ`Š{Åî.êý»³(tƒt˜Hÿ˜[Ö–• Å0?*Ç›Åz÷éêÒtVð ³Bò_”ëÿG†á_>ÿ¥„ˆùCþ ø?°…ÿ+®ðE²²1ÿ{Ì¿€ÿ3ö_È‚þÖå?aýXÿ3Öõ¿Åú11rüX¿n'ìáfz­ežÏ ¦ åÂôõæ(æBã&•å–(ë:è„âxÍÆ+`ÖÊ~+1µÄº º“á‡0t A9À ;Üðlć:ñ,`pÀôxbïÖ Íè®>Ï[¾Óß™÷¹¶ åíŠçÕ ›hîLiB P5b—› ô­&ómÈ 9(p}ËÓß\4_ؽDpsæíÀM×í•VÁUx…ž¦nÛ*Ï(8+‘1î`¹Œ#ÑÕ]^_†P¹d€òKÿêFC‹26~ž¥FË£èëþj¹[Õ]~ÓQ¶Üì‹:\‰«‹6³º9‹?_$r‚¾£5F{›•rhð”í¦vznGæÈBe¿ |i°3ÙŽ» e¹‚cŒ°Öè¼G`+£” ¦jÁ Å+fÎóF'Ô+ЙÛûNù¿ª>Il„:í!%Ô/’csØwްo,¯WípI\òv.£[â¬@i( _H"ªð0Û9‚Ög|è¨,èQÈkÁ[aýÉf,'qHõ°ÇyøÑq8Õ‚½£"Nà(ùlvHСóœO(Èü–ñ@Ëùty÷í«óÆg¦mh<€…xC„"–wíHf<éžÐ+øvT@™ðp<œØYž &è"Ÿó‡g©f°Ö­·!êse¾7 N;³¹“‚Tò|ÈÛ!7²Šo˜ÜÅr3Í… M0Åu˜¦{Ø/í`òÁ$×­§9é\1ì}½`&Þ‰á”';‚„Òþ§}˜Œr^óÝgà0.û -¸ér€+B¹B¡kØyö ¨//ì=ÀxÒŽ¯ý6òd‘2‘wȇª=E^¦7FÁß$~ @g¼OÉצ¿˜øüJæ=ÏnÙÁ^$Ó5NŒï§ŒwEÖòÁ«þžæó>Ic÷òâø¯jM\ÇÖõÊ‘ïäT¦yb–ËaÛщ öøDù„軳ì›|Iž(¸º?o‘ðÛb?÷ÑÖüÝåïT o^ó³Vë#iÃvqȨeÔ!³2i­MZžÞöûÊ£²&‡ rÍ£šS>cp‚þFý†>”BÇonÙõ³˜¥]{\ƒZÓD.8@&Џ¥â•ÓHÚÍý¿ßãL/…g,…q.Œô_M¼sÇüÆl\ ô¦_Tè‚4Ùˆè¼?½ø2þröâã Ƚ¿|³þ‚Öšõ wpOáÁ6í¶ÂÝìÛîÉ»£ò©~+í0áú\ºúg—Ô~¦5¾Õß^;ÕÍÍ&]‚©y!ÐéÀ×…ˆóWWD$‡/Â'ÈÀÃ5ÀG&ß•ôwˆrŽ”M¹O +úQßõtÚ Ž×ÏväuoOõžâ ÉOWq¤Ê€VÖtLjWÏÙ*®ÚÎÀÖØw¢§]uðá¸5ù¶L¦|0·Íñ$Yy“Ësò*Û±8/’r® º®X”³²h+8d/Q°swo~¸ï¾Öï[Ðü øƒÀíÖðÎã;dA’·6X¦ƒ9#®Ñrh^<¡ÌÚ|T<®ú©X7¦v¸µÿTæ‹Å îî•ì­ôÍô øÇí%( °$F–ËBÆtc¯1ù܈ְÆ7x66]—‘Ž©×xÇ”ceàÁŠusÌÆ´V®îðªðjv]—œ0‚srXfØ1"·oF•\•²•’+y•âVÞGûÝû¸—J–âT‰­òFYºBGñrÉøõYä¹ù"tI¶Ì·0gÑ(—è¹=rkkúÍK8‹#ì4M|íÕ5á…/n_tæ!Ì/€¶)9Ý(רç£=ðÊvݖަɓ­mÎuj"—âjbuÎQŠÔêk?u ö¤sê: *µ^Y•BçO-¨“O6GYºß=Y܈JDç¼.òiA«ÿïh˜_©ôÕ ØåÜQêœ÷Ò™‹›ÄC^ ”7Vwl_l / G+ލmA¢i™òi C3%Z£¦2"gÄê,ÏèÑÌ7DÕ²ŠGº7žÓ¯Vêèü÷™+þ³îð J#$Ü„/^RÓK¬ ÕÝ. A]žë¼<« ¹LÀÔ!Óì,+¹èÕ§NØ¢8Ï+çAt?Ø.hOÄ›<'pÆ §Ü¹X»?moL˵ó8g¤Oì–¢ú/XS¼&1ßù‚üºÉÖYê.NûùŒ÷§š žxƒÚù¦vÀ˜®øQÊm5倞ªø{Úµùß¿÷"‡Ú¡NlT[( ÏFÝ`Z Kû<Ûý"[É›ýÞÉ×ö'Í@z`¥§\ò)¨h±G†ó¤'¶''B“c){+¿â+ÀµU<ÉÄî[Ϙÿ¸Òíðûië‚®$½(‚)Uù¡¯Œ65Èxd…ƒöƒøíê€r¯&`‡¨ÓŠ®òxŒÅÎs¥ ¢åݤö9déf×õ€ž¨G³ú/…ÎïáÔö!Í-{‘ý=ñ²îCš¯ù¼ Þ5*ò/ÚÎáõ9NÉw<Ü#nýY¸üëKÔM•”ê…nð€ ßàï ðŒßA „Õ 7Òh“‡¦L銑Ä×éRpnœ;B:W ¿žz@7ý™&àÒ›å9 ‹ÛÇ×Rà‡½æÚK´ßøð„\XàÙ)Ý^ù»-ƒ–è)a;ï2hð]¬&÷h®å¥,ŸjmÙ¾ÿľ¥0¹J?| æp&R¶}FIßÁòûä;Ò“çÊ_Dµ–¿ ö mäV§×ݾ6÷¨ºš,Êx@ú,CïüÒ?¼’ ¼ÓQùòÄÞ …š".;N[üSK\òV_ K õŠ£®ço¢»v6Ó‰¬©I„è©LÁ”È~ßÈ´À¯[5ÙÚÆ|œóZlë>*Èë:È{(GäÐÚ i#4zT¤ÿ‘ ^09§¿JncŸº9d p0:'Hž‘#ü‹’@ƒçœ:€ÙÝb„þ’šX‘«–æ®_%áµÑWãH0Ð-áœßÞS¦i«ÓMý+-!þ|"ˆrà»7&Îì$eA X:`4K“ŽF\¯gB;û„ÜŸO9¨»–ÂfJØ«×Ïp¿ÚLäyÖ²¨[Í^õ (³L²è!·&ç,s2ÅGžé"Ó]¯Ù»|³‡Â'C¼Õ¿¨?é¢"†²Ëì._‘7EK½BvÏ4™Á#@׌¶ª¶Ü•¤ÏªÔOãsn"ß&+ƒcð‡úætäÇ PcE ëô5àƒ|(Ê5âÍ M -zùæY–ºÇÕœ½¿Š°Yž¡QÃéúŒé‡Êó—’3¹Â9û €©§.¢5a±ç²—WqÏúüKòæœò¾ùG¶z¬ü¡pÙ!G“¯€£¯³³<¿8gÝŸ«ëÂqµ@6&òŒCß2žC…½¦Ch2c¼UÛLO˜Ó?^Œ«C…æZßËüâÄ4ÓƒUÄ`heòÎÍE„:¹E…ƒ^æu@¬óžgÄ£Ì2Mz` gÄE,Ͱ¾ûx¨¡ Rû9ä£Sx&öJ•/ÄUˆLð‹ÌQÏ—"J,•&J1^¸Ú`Í Ür"µäoº/Ò/š·Î/NÒ-” è_Œ„ƒ}"wC M)æÀ¹žýËþþþ 0ˆØªioWM6o“ü¨%‡œ‘WNϺǧ•ß²0mï±;0­}$8-Rܷ㼦mÍÔ‚ï³é·d6{Ò Ü¥qû>8Úz6HW¬Fd›.Çhîè3oíG/–H>t:jº’GO²)Ý4_%O¶è÷Co–ÀÊÊÊô¨Ò3&Z»Ï>áö:”Ý ¹îò…ðy½ïy>å¸éºº™ÌñÚÔí}͉t³¿Ìж Ÿð‰“¡AuþëQ°e²köhXç'8Æ#ãö"”¸†P¸‘|,nºxŸ6‰I1h™Žß+xCýî'/tÅ¢hD“ÉlïJ}´o§´Óò÷tß%ö¹œ÷{ëJýÓ¯6 ÂÿêŠï/ëK£^ï6hC ¿Ñ^lé¶.䉡¸RáÀl«¶®ª¼.•vå Fwz» @%Ì}„úóB™…ÞÛ;Z ëÊJ­…\¶ÿFÌ<%Gl&LpR2Óª¨J #4£ŸkÄ;ÇXYÚ:N]“c3äLšåaW;dò´þ§ÀX¼®½­ÝïÞR»{mÕäfí¯ÃÝd~è“.œÌÌE2h±|¢%õj-kÙßn3½ªaîŽÄ}Të ê‹êfWý(2{¢·#®j{3±¡_Lçû./R~Ž´p&²{†³žÅ{ÕJeŒðŠêž0eÛd)‘¦ ¤7/ªGŸÙjY[±f¿ð%C=™.ˆLÆilÙ*I+ihâRX%ˆJõAKôhp bò¸ôL&Û:Ž4¿T+ÚQm†¿01ikåç-gðÀ¶–Àpª•b2èyÖ?ÒÍ<àéDÐÊøÚ :Tj§sžô2îyº~×@ÕdÂ4S{½ð"Hdz€ØMAæbé:i3øõi‡€°&êø‡ÜqéYq8ˆTmâfO™V?HO}¸ôëÄIôâaÎãp„'Úm#Uú”?þÈŸQ[ã{>2ÃÇ´Ÿ/.†y2I¤° g³ÒZLÇõë€c $ÔMìâÃ\½½) ¨lŸlØù/þµàCjþ…=²qw´•K–ÅM!+0VÆbÔþÝLÎù»"ve6hm­ÑnÆ ]*$ åÿABŒ¯y€_!PÞ§D¼;ì5Ž­æ!¬m“,ÝßÒÌ–£Éw¶ø‰ãC+w·\2P¼ÌÓŠFýùc™âI­ù½sõjsEç¾Õ>«íz³¡©ñð4Üv†…a˜3I1ð$S¡\\”¡aÂnN1‰t(ã…¬IÖsŽ8¿z`è¦Ûyk³Û&‹ðºüa¨7áºPúP“ (ºÄš”ñ*<„!Pôç^¼Ç,Cå ÀyÖ[ÏUý9Ôšal²Œt‘)üÄCŸÚal ê¸6±½¨×ýWðx0 ßz¬™N76ˆy<‰n—Cöˆì}–Ù—”ÆàÂÊœ:‘Íî.ÒO«ËCÅÅ&X¨V£VQÒVZ9uáÏŬõ ~ Œc‘ø‘¨ü)Ù@ØœV±¡QÄìX-Òd™téuú† @¡ÒëKšü,4•Rè!hôq5NF‹M I¦„Ý BR0î‡ÌLEËL€üoÿÚq3¢pàê‹/þuÿgÂ=¡F„†-9‚ýim÷Km”Ðü…I úE—©VBŒï#Ká¦ÄÀRSQ™pUÉ7uÖ侚@v÷(õçf@mð®Öô6;É?¡V¨ý,™Âw+&‰éi»¬x~gò¼Â2¡Öì¤3x’Y°Ÿë;þnDZo‘¥ªZí'_‘VZ.ÓV.å‰"L³ÚÉÚœ™ÝCÖã5úxèšÜí;ëtÏÉBÎkÛá·¸5/ºŒ•·Õ~ É{g†m80<»ƒÛ 5W¶ÂvùnŽ~øëÓ×'/À¯Žª`x¬¼#å¶ 2èØ€±ð`ÒŸt²o8ŸóÈ£F*Õ®tÀ`Ïåú°™å8Y€1|ýƒÕ©±ß`ù 7¥H™±bà;"iÖl÷§ Eçl ¯ 3€ò±ë ¢dí MMúWœ8«G®úbFïÉà äæÚ¡ˆã½ã;vȈ*èÑ Š@4`EFˆ,Úb…Waí­K·Ó§™­ÇHÆ«[8ÁJ6Ì TšÕ©6)_3 äa¶&òœþØ£¶Õ(«tðž([°+WTîÃFâ'h¾V–ëÆÁÆ·sAÞ#ÛEM͘¨»Võ2R¬3t:G¿½b¼=—ðOOå™fïÉ1¨sý|³÷|MÏKÀ•B„>g»RVü¼ï­:¸“h³ç²u^Õ¾™6‚ì1YœÊPD¬Éýñ½'‡ôÏÙ†Ë!tâ2ƒÁÊÑ/7o#ºÌ¯§Þ¼he–â_ÂBòIHO&TuÀˆ@²à.a¢ p ›õ&ÀªP¢p¾)R©],äï3™”#PÎq¢z,Û¯ìC*ë{Sú§Ý(ý»•…mW†mã)ê7BBUR‚–³£Êð£þ¿à%\óoz‹µx*”<Ù¤qgòQ³¥ý*?æßÃ#ÁÁ„¤ÅE·zæ¤gI‘é$Z}“®H/=íeÈìá Åd–HJWã$m÷{­ÙÞèñòÆç.ZGZ×È*úò'Cl<ƒX )˜•¦rÓPgRpH &äKG`4MôÄ5:½D|T²UuÓu‚úò­"=œ(ÁgfC\~{MNÄLÞ±›Ò^V<Õ ––— §R9F€¶Ï[œ“~Kƒ†˜x9ÈÙ%)¯YU}C‚V{çë,5~$±š/0]³†ëÿ„,¢@©1/+ØHW´> ‡íÆR\Fgg ©+ÂMz½J¢U+NÁg‰WÕamë Üý ªNUsBÓõpK70yÙŸI/]íÎŽ)Ÿ)ö׼㠨žš…<ÆLŠ•;{“ÑÐÅ(kZ.† ”’¼r‹’ r*#¤ ^$Ïes Õ5æ1ò˜r£Ÿð'²*8°c[;#C¨gJC:x ¸òÏ[®–ûòI/[œ{‹¨žÀÏÈøw`æ 4óÿ‚.xÂøûÍvb*7=ÛwMm„=©×%ð´Ã‰2èeÅ=2dÈ@ ú—øÌGzé+¥ò–VuǰŽ{"÷Æ`4sИVÕ•u\—}¸®¸>Ú¸n¥~ßh<<›rû³®ðöHÖ° b:‹ü‹Ì'÷gÎÅ32cÉÄ{ŠîåCñð~J”+žDk pÄ?fJ?û—0÷|0>vCZR‹\k¬>´'6Ü“ÜkN´çÄ;:“/Ê™ÂE³Z^v&ÃËq$aÆ{‚…©WSw·ù÷f=ÓuaîtVêÀS$åmËôYûÆ/´7û a½`e¶»‚[¶‰0÷•Üaê:sîQ=Q¡d3Éåè eŒÊ,*I6®N]× ¼~QæD–UÇ΃˜€Zd 9iIéÒÂnÒã®apß ¯‘Rû¦¥(j}U3ôÎåj‚JîÃ*/OZÚy3– 3f>ìéÙjZ÷¢£pDù†¦¢ö£G¶¤-å[Ù”¯¶#>Éɧ@’6£péÃUb–BLBõõÂv5ÇêÚéï < Ÿâù Üé¼fu–ªIôýBzCW¯qÈ’7•%ÔX&Z´ Úæe–½ŽIç±ç$Iïç’K ´K‡pRÚ}7–B¶ÓÇ÷û¯ /a!›nŽ©Áçs˜ ƒ kkEà? DÛ~áHCÞ,Qj‹:ƒKÜàö‹B.©‹ Š\§ý9ä”§émÇÁH@®IRù J-ä›ÉÅžæÃ¤«Ä´1©Ó‹B¥¡»–‡cåí4ž¥rÖÕÍŒ˜¡>ˆ°%íæ6'O6€§UùIÉFyÉv7UG9«vË{¢ršÈ¥E§›±˜Z:hçÊum”Te‡jÁWÑoƒbÆCGoqsvøjÃ)Ÿð©ÆOúÚ ‡ñŸÈrgÃOnJB6ä"¸½HL/͘©u»‹Â3p7PiLaw¶ ü' ê$"Ò“cµ S9‡³:º¦ªÆ&Hš3èžtK‹1˜xz²ah4 %¾o€Z_j6ßÇ¢ÑnÈgŸ5´]x3—ÚJŒ†Q¤…&l²¿ÀUµW+>–uG’'WS“GptŠÝ[½1žzài„-ï >èKÙVOB_-k)Ú8^7# 7œnâ׸ò]±œ:­Ë+šÿªU.iïkYÓêö×îÕ®j%»–¾ø€]<DÌ1ö2¬´Hà4,a‚Z°¤óZZG›UW!­‡c"6[üí¤’W  ZN.K7B)Ø6!нµÞyЄµÍÒ£º"Î$Á¤@ÝÆTeé:AybèjNI*€-CQÙC¡ÒRNtö¬ï\Q ®ŒÐ¦` TìÂ]Vã¯Úú2>òÇ5:Ùxc´Ò¡3L©¾s,Š/ŒÆfuäµK¬RhF ¦GIì;ÎÕC,àCœ&»¼ŸË ;9"}_Èx{°0Õ|ˆ=ez÷Ì *"ú{WZQ®"ÓÚ/ 2cºÂ[v"#½hÂÞ=úµÌ4o17ûÆŠœ•˜:•XòE?ŒÛêq3öôK‘õanZ^AG kiAžA^…Öxˆ­9HQоêIG´F8­ö€I£Æ¿T¥\DrÒòçºô2ăUûe¯x´•YÇí=f¿’Ž“c§¥¢ÂX[ÝYÒŸ~ãºvëûùò†ÑÓ¥^÷œn‚BsÁùô8a ž#\«ùœÓW~Þ=¸>Ù†aÙÍ]~»{•W—‰óýØÿ§“ôò÷ÝJW£A_¹{óÀ1Ž ƒé•Rj>Aؾ¾ÆX k°.l8m¾1T®Ïàe×ø&7§DTøߪ—5XuÈ {HêÚê›÷+Éáô³3,lt6G»x x.a{¥m¤JÐÂh_9+£±Œñ@ †@Gz‡ñ ÈÎ^ž+)$"G¯?Y¦y¸¢ACi¼Vµ\Z‹v…'}…ðœ‚4…Z:N¸Ø†‚Sg5\˜F8P8‚ð}Ý5g²CÛ †&„bÐ>paV‹‹Øç]³µw…¿„NuüJ&‡·–d”¼ïXX>’x‰£Ù Ò®(ºÎHÃW Vù‰Ä[`F6ì¨ÓëÆŽù Ú(ôQ»}ý š®ëÔ(¸!ÏìÔŸ„ ÞÅP¬¡»¦»§;¾û=aSç5ïÕ±¿ÿª{¾Òj̶~û¢cÛÕsofî»%.4f­©u¡»¹Ï«Zÿâ5·áËåÏyº”cÖuZïlÀíPhV°4“oA,Å=‹0 ÕìöÏÜÉ㯵¨£·ý{ûæD@øDXZÕ&§çnD¶É׌ˋñÃÒCŠà×î®ûa.-„ÑŽ«ýñÛïBÇQäßZ…×>õ^èZ$EÞ‡/ÛA]Þ’º‚úªâ€'¡"hÓä}3ÙÜÛe3ߟÛë°n[·ÝWLP*ú®äÙ=k­0(ªiÇõAò*Ås`:$p#HK|øÊ@€î½fà¯d6lð[ w÷hµØ.u@r²IÃɉ‡¼Æ È.ÞÌÕK¡¶©Ìáv¤ädÆ´ð nu•HÐüH¸Ôp&K§Ñ'²§(ÞÆ€œàF/ 9‹ë˜¿Á±³á‹‰]ÁOÃ2#i®áí“T¤÷#¿ Û¦:„¥cëØI=«"±ç6IZGæ_qYì• |¨ÊòæcPÆ€µtâ5#°·Ç„H¨»ÑAуµùÒñ@5›æÈ2Цå[ì®`S>ÖÝâÆÇkaÛ§=D…Œ¤šg¿¶YcÜt6­òËõ¬crügÈC}§t0[2¿jŽÀR_‰ÿ€im_=ñF ñš]9 ®† ¦É=Ÿ6j¶¤…†OÑÀß–¬ƒõpõJçÎ`UFÛ¯Y"¯…¨æ+ ¤W§krgr1ººy¬zòð=¥90:X¡”6B°`´V¶T¶bÙDò…áNyÍ[©GÛØ(`,uMܸܹÄ|áV¨nHåa»¿\eÉ-!DzwX§§ÛΟ1rZD£Z ‰ ½1ùr/Z3bÆmäàˆ32=‰<åÍb{ ‚ÉË+T0Æ÷tŸÍs S?SˆhÊŒO1oºŒð†«Ï—OM£4 ­Ãرyîà OÚmº ¡ ìýzfj)-L—šnÈ'øèâ ·Û±8ÂG—Œ#QRä±SU'²Ô“_|dEü‹aý=à@Ánd^VÂ((“¦øMøã–/ {;WÉdñÿw–_æ\ägòJëÂàmV˜cØõVCA)ÊP¦ñ–4Ó“ÕJ7‡\'¢^ÄÐÅÝa‚pªµ·cØ×NÔ´7Œ§•â”i¿Îá|?}¹OÉvKC·Y¤*öìy§KsLœ,"nxå‰f sˆnëdh¥È”­±ó`•è=öwÖWà;3ÀwÈ™£ °¶Š'3ME6¡=£½áNàKëgöñwÏ“æ.çöÖ¿MCÅŒp¿JŽ|“Ø­ÙßOýn° .&?MN-)t GUPCS…{eC¡s{äh˜Ù.GŒ~€ê¦V%$ŸÐ‘5wÚ-Múæ A¼êOã–ˆ2Í·BŸçôsfl:˜‹$­N¨¡Ý9§´7‡(.¬ìb!P³²AÎc(@®Téë‰Å¹ÍmñlŸ°Ê8Ž 6˜ªòèÁÚ-d<5¸u? ±67/ÇÚÁYÁ û“˜®­W·_ø‘(Põ»êé¯B5¡vuÜücˆÆñk*j긋öE*vGCÚO€Ø_ëú²óö€•µÌžV—»MSÙ™ªÎºCs3CØ®Ÿï~,Snû߈cU„ÔÔæ+ÝúwØ5·4›[L(0ªõþŸ ~·A¶–mÑ+Œš&Ýtëʽ‡o¬– é+¯ë- ª¿¯øm—Ú(°æö•/>»Sý©wXo­Q¥ýïR9×Ü=šÙ»ÕÛÕ’ï6Óo&#lÖ‰S‡-D=£³1Á4à›Lð–œ12éûì¼áäèW³u[5®{=_dªk_jåΦI­" ÞaP(„cWè ½ØpCÒCl­Å=ŠªRîÅ܉É2ØefÊ.-š¹ˆ}À9%lëÏêÞ•"p±JCÛ,>†Ú¬ËÂ4¨¼áWD ,-È¥líDiu¿ Àè¼×Šv!ô…}Öd³®zŽŠé¸Ôê‹Ô+8ßø '–…3¯>¶Ðg¾úuÙŸ†i’a,;-¶oN:k¾Ì¢ìZ†—õ$¾JÔfÁ_& §KRÓVÓ_£%jYñÆ!ûª¤GhëìcsïAgÑ¢ZÓl÷q'¤°ðXz‡ºGÖÑ™&Í™˜ª9TІP¾×RS-'i¢ sW«À-!ßÐ,ݪ'N ƒÞ㈊ˆ"ø¯gWf¸5¹hø”j«¢f•<‡ôë.Ñÿ¨éË™³:êØ¦Ìš uÍ:ÇP“ÝŽÁÜçëñOöU¹ÌŒl¾ÃãAƒ> zQ£%ïòG–ê)%Îúg­{å³Aا¬Ÿ=𨛠²sGd«K¤“ÕÓ]óì²…;þU%GvZs&ƒÜ¥5zA=´›~ÅùaÑ #spVc ‡+–¦¸±·4¹BÂQ§1¡»5îòôâÕñéNJM¥â&;hWlu”=¡tؾ¸ÕC}Ÿ×ð‚—ÑòIT‚)Rd*•ɵÉ|rÍðxÊgÅlØžÞ:_¬IWêT|Æ×Ïd·Ø´¶B= Fm¿¦/ ëÉ=IÓŠ´™IyhæÕÊ&èuÊsV±þ½»,¸ý®ØÉ¥;¨Uµm/ødSm W¯Ý›æÒÈVdºLÎÍô¶;¦»¨àΉ#Å6‘|qžöyÅPåG€l—šÁ´ÒyÀ÷Îh9›Ç8ÛIiç;³<—ß“oR(6%³)Ck Ö¹d…Ö’¼: duIí@ëþ)+7Ô44“™+¨·ÇJ§âÖ´#PHWeX‹¨R“?³tnéÐðŠÅùع/÷>bK‹*æ%ÌCÍ-s®r̪4X&…©Ua¯_<<È^à«:R†™á"=­AqM“OÙŸ°`td•“`?×6c0š`Ô/’³T›Œ×%¥8úÛæá‘eGŽ¡bÒ ^­RÞ·óÈfS5G¡9UÕ>ýŠªþýŒð)Tƒ0J© nŒ´|‰˜$(Kϵ},ê¸W$Ûÿ~Ç~xÿ1ƒ VTZ[Ú7:Õ&Ûší"M‡ vBÕñ')ìFqÿÃþÞ·'¯–tÃ,¯î¼žWzP½Á‚a\ÊnOb“Ò\ŸlxwÑî3޹ÚÎh¬’”‡šãsÎï—ùÓªxºÏ7FSëÝUKAC.<ò‘ŠÒÄ7møë B¯„»…ÝkÿfÝÓîë1¬j¿®âj"ýOA¯è¡ÓÁlì·Aeû‡v´¦Po¤Ãí­D挓q Ä ·_er¯¥øhùwcôYqÔÌ}%¥}ÊÇP=[žsœJ£NSNù_wÈ Šy¤óÀ†wtᣳÎ? L{ s)Õ=Ç‚ÛÜhÂú1Ís+}øMÁ»©ä@ŒÌ‘¾C[Åärܵ^jÏ)Ó‹Ú¹=KvkC'ì©–ájÝ+hVœšõÕk¸ùIòLël ;m#ó¤k³>“ìMñå­”´)o‚¬²Ž)7s¢–Ô8ŽÁ’ú¨Ïó·r IeØ^r!ÉcÁdÆjÁ÷ö*…©w`ŒU'§ŒUªL·“u2î´²!ÏäÅïÅÐXr­.ÁÐXʱ½$ƒ¤N­Ö ží·ô"&“ &q "!êJ&æñú ê5)ú$Œj7qú(j7©ú,^¢!,äƒDƒJ¶Ú„ƒœ'—ÅŒU¢É/[Á¶')k§æL šWº)úHí; uvÛG/u€Ã[òTùÚ§6Éú Ž® j ×”¯0ŒUü¯ÓÈ*Hím±ôcß<–yÛ{>Ú!(ä»ß´ÁÍ{Ò}0@ß=Ü0Bܹ%Ñ.Í“ôÓÕBzrtÇéÇVMÿÉ›¸ÿ“ÊdéE[½Œ×«}Åý'úC:ˆ¿«W è8jôOÞ$Ø9HÕÌÝ-™MZŸ§I³CŸ}Saÿq›rß׉+Xúïc!Ûx ŠÈm½è)é QŽ*E–€[N.Á`ߣ[°uPΗ¨ÇC&…U¨‹æÉ?îeN6ì«ñ#K~UòæÈý² [2iŒî”§ÇÜ” i„~FÑ5À=ìËÑC\ ?ïú¯{±È3D_áÆcôwd¤÷u~njÝÐT'ú0Ç¿€ÐO£ä<˜µk:¤×J|0ˆ?HŒR¹¥ôÉ’W¶ŒR+ÙBXóè˜Å—¢9åÈ’=ÀD–„öM˜ý>¢ÇÔ+B5 õ\ù=é†áóŠáFFÿXä]ãüN¦@%A˜==åσdé›íâ$­P &ßQüaT’Ò¼Ôû‹—tÚ—vÍ?Žã ;ÅîUÊÞ¡­ÓÙaF“äI>ó§É¹ïkò¬C/¿ûiЍr®Ý©y?x;ùsð‚ìÔ&”§˜À,}‘Mß«âì—î´§IóÇÔêžaõWPsp±¨Ñ"ˆwM°‹ Vþ“/­*€F"~¼ŽšsÐÿ˜€g?F¼é6p$?]¸v-ƃs [2]~\”ä@Õ`Ò ®{ËÚèñùŸ*I3Là­ú_D*qç´½ë|%k]µVÕ–8ÀŠD¢¤^3ŒÄ÷¸‚NÂÆ¡Évdß (3˜c¼}#¾m¤ùeÄ9åQJÿ%BZׂ`v«v¤ª¡W‹ãô£³žã9»`2.#?ñq¦ß±È«~éQ†Íu䬨ÐKÔ‰U’€Ç3éÞ¬„úåïj1:=á+tÖ±Pg™OtÉð{:W™,ž³&Úñ.~òøšzšv’ÅĦ{ûˆYÀ%»è?ØR¡Ža½ÈˆQ-Ré[§Þ ûÑ|‡˜5ºu±£¤ Ùûè’`TMÔׯË0â#ïý¥e¿kÀ2›@E(¬ÿªU}é˜'Q“•_VV@ŠÐ_Û Y&ëí°(1ÑÅô¶Çyp¦ÝÆ14è -\Í»©\Ú‰œxåZjcñÖ“î]ôåÕw–ñW:Nê—†Ç'±—&Kyè˼|s•^nO°bBqÊ"ý¿³šÇËaÎ7)ß¿¬ÝžÇc$Õºö&?Y-¢1mC)hï}Ã:: E6êš¼1º@=࣠¦î î™ßgÚ dâò“xÚwè›FãÔ1Ùx·þ³'IÈ’¼(xÚíª-ò쫳‡¦28Ì2Äåºoͯlã †xñÁÄ¥³A dWyÈ0 3/:G‹cZqÎö;±¶ºM´÷8ÚI‹ Ü)¾óÞ¾\Íb¶³Y’?,¥e«gßq²|L»^]Ø%®'Jô·3z°jœp‹±É_©¯D ,ë]nº‹†|Ç+@¿Ù¥gWò}› ™>ên¾ 2cƒÆmÜæmŠÄÅ‹7~ 0Šx‡n™˜¿aæµR±%Çþí1òé/Kô/‘L©‰û§«6v¾]Ñcr{Éþ0ìI¾™˜S’•{×ßÖ2žÁãØš9ŠÜÝ–ñæad’c1vÀzSPß ¡i0å<`ÊÐÎâ?îK{9iy­ÂAfH4†¨‚¶OÄFmØ7ðÇNJ”ÁX âF/¿:UŸ¹¢#ï=o¡m­ðpl10¤\œ‰B#9EsÜs|ô€Ò‚ßüvøË \stÐ/Î#A!1ñ1²ü‰ãA0@Þ…EÆjjëp‡”TPPøT1ÎÞÞÝ›ž"!ÁŽ ƒãK7F«#;ÚãúúÖ±v3}51?Æ@ÞÚªÞe²øÙʧ¤ŠƒFí júùq‹[›é˜è¸g¸ãßìGÃmMxæ¶“!€ #pÑrkig7b×È ¿Ç:ÁJž·wbŒÉWˆŸÁüÛMfçè„#&úÚ®®Rgg·ˆ Þƒã<<~¨šimÌu<Ë kL-mÌEœ/O/¶d†žø !ñ1?=Gèsq8¸g³h3×רb–œ Úã?zÖâ÷#9;úyÁq[õÕtQS“l,Ü„Nа¤\F˜œã-Eу¸«ÐO®ý1‰ž!`\ ‰å^ùX*k‹‹›ëDO1àžœ!Á5Gv·/?FªRð*1àõõáè%×@ àE>;Êá °À/¤I¸Êñ[¢½ÓÈðý‚¿Å„¿éÍùü€ 7=ùi â~ínoˆ­Å‹ çô,üâ ©7xæZ3Å <™A „¸ NOŽA˜½¢†ôÚqû.ùîÍÎÀ´S!Àfœ!²ûnægø1qH‰4ÊûÆÌÀ–“Szb~âuâk‡LæÆž”Ðq1€æ]^œŠ8õ¨§X69¨­°’ç—ÄL°1à[½‰é.¤^žãa^ýcl.¯/q0ƒEÁ>ÿ˜ËÞŠÅÖyëýaF2ÃXcUÊ]ÝLˆ•ôh°ÖÓ;¥'1S}‰‰ôüLLP)JÐ ’Ècn²ÌsTÚ ¸9ÃÂÍ3Óî ôÆ1T0\½1²(J½bYabdTC‰½]o$k‘41‚V"Ûaj¨¤ë± úB™«Ù¯&úë”KaϰðNv#þ‚ ]$´R³ÙÖ`nì½=’|¹y™ÄBu‰µjc†|‘’»¸¶¶ÕæR(oº$¿©Nø*Góg`”?oÄè¼ÆC²¦ÀǨÐÈè´Ïæ’8™äÑ/ËEêþÃOýÛo§ž'Y 1d8H}e`suô<[úÞܳÏ.؈n™_üX@l»/…韯(h  I‚6:˜ ºøs@D߉Uv$²)d•‘? T À ¼‹ëS´Bêw¶:7'©`$°$Fæ.È•øKP^ÎÀ^C#šÒQ!š_äßÑÖÅzmíþ/EfÚ£Ääcšg<ŠZWÕÚªsäØµã»°£Äð“ùž½<ÑÖçgØ×8g=ÚðíN~øå½4üòò©üF´‚/µuoÇ!Èþu˜Eã>3ÎÄrw0Št›î-&zÈ‘ ‰››`?A§½ÂM27üqâµr«H`[ýX~Tjz@Ÿ¾\†8ŒËÛx|4$îúüdo­ÖãïázÙ1ñ”uæB8ÀÉYÝkät”äåÒÈ©—,تCF þ><$8äwU554-”}ÒUÝèb•?”á,„,0Dc·ðžÏ­¯™!cå?Ëb»‚6by?HBŒX\Fy¶Dê òŽóò‚¼ïËb>lÍêÀ{¿œ5¢ýÍíÆÿ¸™7¥ ãõxZ‡*øHýFêÂÓp'ž ÿf“±#¾`’“ÀàæÑæ²ÿ¥(Þ2~…ÞgÌ]ò?Ë<áNÝs6y?,f7ÞŸžÆ{ÑÜ“çº3ÍÏ‘E'Ö©ÈÕ¶eò&~1˜¶Ž‘ùË{&s_‘MßÙ¢J^'¾z¢¶¨.‘§_ G@—©U¨U—}aüD2侘ïbL•È3AX²ìcÓ¤Èð¶kðÞ~!k©EÒœµZÁ{ðêOîüŽŒü 5|êVî6Oßžäqðy`zOÍšn%d›YQ£’åG˜¥ÏŒÓðJp.¤«ËRª¸°ßÏù-ÂÂ] œOåU ’>@FÈ.#ƒwÄËダÒI‹J EìºC^¤e ›D‰S}ö›.¯¥C8ŠÔ«ÖÿòÓÄ¡3ÏàŠ(þséë‰|4øó-בF=„›1—ÁÑç‹÷RÔÉà*c®ô÷n7gÌ.îü7Uó¬ é| XÊ®æ …ë+ïÔý¥”<Ìí¥Uî#Ï V‚.‰OÏ\ÞÎ}7(jìÒg®w&F`lݶ­Ð€ψN7⥲I\ãTõ<«ë€çóÏ'Gã¬ÀÕþ:¯‹iGZŽÞ-„ðË4DuÎÜ«IF#ßxÏÌÍ_Ö>N5™/âÍØƒ¸1R²¦‘ƒ@gPÄ«}‘¨›‚ö±f¤~¸úÏ8 ˜úT®$¸ÆÔÒë¯%¤«]ò O#TµÛ¦’ d$Å{¥ì”n4ÑõÞ?£¿‚ª\<åä"Î/Jàá%?°y»ñµýu¶O¨aç4ðÓšø­~­qOV˜%d3= ë©ób®U~£ }ý…ÈàÇÈxÆ|³>°ìý<]ƒsçA+É?µ62æyÙôéQVyÇ{ÂV üÞRI³jüÉÖ°ÎU`eÚ¸úf5€ênÅQéï ÉÖ½¯Ž“Råa£|Ñ*ÐÓשÎöÚ!‹ÝèÄ|‰”Àð{!ìÅl…þlˆ¬ Š4¡ BÑ ‘:Ö¿C¸@át1ZSîç©ö­¨ÊÙT‹m%#ý©ò І‘hˆÑμ9ðï{6G—òX•QRBAãí¯)ÆÒÉ< Tpä”ö/iRQÈù€È¦^åG(4g5î4|$µš1ÀMª°)òŽ ^„.ûÌ ¶ër h ˆp­Nº2X‚=·;ŒCÆ3à§@-LlžJéÏ™˜Ã&FV Ùñâ÷D¨vB**ÅÏ**ª¾ŽC7£ BÙ à÷Ç8<úðÓ Z‚5Ç+ê$ðKÀÄ1„ÒÃyèÎ{À u ÙÅÅÔ{ôñzئTág rkJm×®ðœ×éJ]Ÿ¼Q¾R¾9~Ñ`60gïUÛh@wÀ§²]…µx5_¬.³žoÈR™c<º]+«”{Õ¨®¨HÐeëÿòa²¤á+ã,3‰YÆÀ'Îò{b%FÅÄ^b/³?ðyQ›Ïhµ\3œ&ËØµ1‰ív8HnÕÕEÉ"wëôCÉäQ>Ö¼ÿ³i;‰;jO£Š¢¹àBç‰T¹·Î® Õ‘g¶ wñz®þPJ£ùD]Šö4%ð½ýaÿôÓ¦ÃQ“¡€£š­Ô]¨ÐR¶¡Xá]XÑ©L©ƒ£ºZ!œ|Ýáqb¬w±+d0Ç;§VÈ£‡WúØ´ûŽ_×*GîNà¢_£ ´óÏAB·"Ü ®¸%þà_È Ð½_}H!ƒ°}AÁ0‚/p<¸lh†Ãürx&eÕᮿå^e^á ïˆQ^á5߈SPðEørŸ„Þw:Ent€¸þ¼5Õ8áÿóŽih‹ýŽÙ‰íH&[ïÚq2üòØb@M#HÐ'2cÂ'1áW_Þ‰S8çàì‚ä€ û‰ÔRmL‘­+éé\zÿnˆ ‡Ë [ÈŽžþ{;9:”(Ê$dJˆ 6¢¡ø f8G®òä°î4Á°òØ‚ÞØ‰È˜A_7ÁöšCB¼|)Æ7~³×ÒÓ#óŸ ägñCÛ— êecK=«¿ˆÍ8C )ŠÊ&õÔöºã¾Ý‰~P§o ÞSÍ|p§§ x;DçÞ¬æÞøçÔJ Uòá™{…WÍ{„˜ƒ£jƒ£øÀ1zqw°yý}Cçöј€FvÁ¢~1èyQéy±éw°éuàékàÐWÇ”8ìu¡ÌsH8AÁî•ûßðîCîqû×(Ñ+ùG}ýó̇tóß«™é¼©õDlÞØr rsY‰®‘ gEETRg*÷ÉÔihÐ};8 …U:Úa!e;ޱɧ)ȦÙ5F¢àÃL¥ÞêB’BœvŒçªS!¥9oɓλ$|gK侘¶¯ƒ¾Zá"^¶!ÈwH†]ïtåÙ€÷øJ;{AÙä`ÓQ„ÓD󲤨V_ .Þä•ç²Dó}™²Á†öÁÝ9ä¸ãf ì{•¼x²G…ìe;¸@‡W¶Ù„î—^)ÈoÄNŸâßmD9œr6å‚RÏMàœàŠ3Ç¡ÏOØ‘ÎOÈ]J,}€…m;N1:pðFòÖÛ°Žå%¸¶Ý纕wİ&ò·Ñò8œ˜ôőҋfÂ°Ž£™÷<ê'ïô9#Óf¨€›2œt€#ŠO+·åÉ)8ú µrfÞ1íj¤tðSÇN¹ØâÂλDÿ„˺ž>Á;± îù—A»+SUäÿU_pŽ”<…ør­Çu^Äp&¸e˜íëú­J:-y‡‘yôô?ÿ5ydý‘N´«:Þýƒäsî (§[ ·ôØ;Î5Ü»…³÷ËGÿî×K'Ê—“qDö„»_–ùómÍí•õGŽ-{_=åo*݈>ª§ÓÆWS'©—…QÜìÿ–̲ï¯êqô}ª(í¡¨ŠHFíqèc×—ÞLðW vC\âí4Wè­´U(ÃøÍµ—èδ—'EñÛ@Þ=–Ä9®Šë"NuU>UÇì~¦àY ?âܤ:&•wÀ8ÊÞG8Þ}æDï ?òÈ'~ñPÚ‡øsñ…õEô…1¼mò~³îµâ&³ L)ƒ94f¶H¡¼92¦H sX}<Ÿ ÉnJk9»B2±ª\šáÞf¡n‰(ô ½ 欞*S%5]lMŽÒl6“‚6O ‡ï£~”­Õpåh)çü…õÀ›'V@¢Z«Ïdct¬¦já2Vjû÷¬vÄÂèL²{l ûäЯQ_Q„‘wÕí=ãEe¶¦ÂÑ Õêæ…õ¥;õÎ,6VêÃþ³§ª ׯ”e,ÖX¾+k:Ò¾¬Vçg7Ú÷Y€XF縉‘Â)…&ÞÏ´žG1®s3&d˜ez.–DÀ¹4(9À(ÔüýÁ¹-»¶AP8$bú gdóÈ ¥^ôðnØUÅ"Έ'ó~î< Tdq³ Þ$Q´)Åyf4.ìð>©m„‡6v@ðP¢á&ÏïôìòÏÝ¢_¢íDôÜsºh±¥“œú’µsmpFÈ›÷›!÷gÔYz̹Îöè“¿ ?ÞfîZ›”žAü²%>ñ”~ÆÉ @ìŽÊ,c×r¨ʰàÙòB´ÕbûQ])õß$‚XµK¨W ›òÎïÏë¼zžÛœ÷äs"âÄ$ÐÌ>þ±_häSàÂA V/€2,4õÓ/wL‚×%¿ûÁ¨±ø¯üiþÿ u66V–ÿÿ¿© ÿ×~5‚¶VFÿavÄü¿z±°°ÿ¯#Ž{±üß"Õÿ§#–ÿÚéˆõÿDª³þWHu¦ÿ©>ù/¤ºð«Wpê9! 0&ÁqðP\ò PO‘ذZõÛ§‡É‚Ê SHºgû;ïJ鯋ã×…èvuÇ&Ü¢Û#æHMwtbÉêcwA+®Ú°À&õ"´ug=yÁÍSÉ:A3Û!nÁÛ0Ö]wwÄòh=„³£ã h]£XA2hÇÞãRÇ©‹_šÒåF Ý%þ;–›.6¸pó¹3}»J í5J¡¸„¼’2†?˯¥ñxñbn5\!cé8+ÕUdÁ ŠÃD—ÀÎÃ,‡aMœã Ó‘=e¼kÉ,c„×Þ»èÑlâoxoâËT‘êä/ Ò‹x‡¯‡àɇIbßðW£éÝí;ÜX–®¾Éø;Š»Ãw!PW.2.É͇©°ànÜe>”€gôªÏmNÅÙ;¨ßl¡ÜUvÿyêÿ]™ý×\õÿlŠÄÌÌôïã_ÆGÿ2<ú·! ë¿sÿºþSÿÑÿ³$ÈÅ8þ™÷¯>++ë¿×øWì_¦JÿÓXé?›'±þ'¦:ëbª³þg¦:ëËTg`gúßÍ“¶=!Ô”V×ï8^·]h7ävÏþŠ4¡"U–Õ¯z®Â ÚÑ Lb"óI_ú¼Å‚C”';KZˆ®à‰Š1HM&\”L¨R͉¤Ý “?øS–nßß¼Õ˜ôV褬XÀ+(èóU”èƒÜ[é€>‚Ul£¼ÀmÖ6÷}Ñg®®©.]Ï]¥ÂG0 h•7¯²çj™t<÷¥Ë¾0[ ëQ™Ì^^d;cqê^¸ãfˆ© ÞéóÕáMZÑÂË'°hdïè==ÂÑ§Óø ôäëï#ÄËÏ3ÔKhy À[ÌéŠFI þRùrÎe~yÂÛ†z’è®W î–eûŸ†^öÙм¡GÁåé´Õˆ®ÖåtEŒ–!½Ì ê9cmLºÚ忉8jó×¾ê?AÌàסéòÛ¿õµçBž­c´ÕfÑÆ ³œ™qëi0o›Eÿqͦï¥É ]n'}»â—%b‹Â0½ôÿ…›‡*ÃÐå ¦2ãŠÍoìƒþÑÁlÃÚ– $é˱ꜷéÂu&Š «Ãŧßëûó“ãØ r–äJçÝÛ¡ùÆÉÁG¦qÈŽ 8¿þÃë/ |x¥<1tDðÀ»ƒO69ŽÒ$ÍoÄàŸØqŒß$è×2žÜ&®5ÅW™æ5à`ÏÀVógÆádêЯ±[Ë H^¢H°Þ¨&¨¹û«cÚ3YúÌÚê©G;Ï-#M´¤/QÙ`#üep&?#h‹§¬sžÒ¼ò5vÀ4©)¹ Ч†[$-ê2¹ £­bÃ)›×쯤ê<èà]4se¼è…98N2I<8: EÉ`Ë®ýšË®Öq³»}aÂM%)Ì_ë{ý^ÒâÃËÚÈÓˆø…®A…%}ƒe\ÔjˆÔžëýbyq¦ˆSJvf6QáÌNv0TÉÓ¸i=ŒGGéJ³J5˜uñ@Y| F±Ñz º0òâKƒY3£Ž5Çz2–9lÓ–‘ôÊ¿K—ñݾ%Õ˜¿tµË“ƒp{`’Õ`‘iòñ4r+èàŒ‰¬¿2ÛæÑ:)ñ+kˉ¬˜ÅZ=;Ž÷4Ž!DË&,H¬é(EzÃY ÷èŠë!Vì›FLh…ZV”¬š¨{FYðt`™HS†·Ã† ‚‚ÃϯUS¾7g’t$¹Z¹VØæ¸÷­6ºðh½½^Ò_ðbÅf“«x‘‹~X@Â}²‘ Íè\4N)~Ø=lSNÝ¥©á _i5%øðÈŒ¤ÖZkCùæ¨âbahS¤‘rËk†4šxžV—Ét6>Ç'Òóž·.½¿>¯·×Žù¾©g Ü4 dWm¸MyÉ9ýÍšh¦t@ã Ç¢d-c/šAqÎYÕ¿¥ÐÈœŸe.$&Çþx?iåG0˜8jÙea´¡#eÁç¦ÙÄHÆ2j^—B¡±æt:Ýà_“Ÿ[·núÊõöeX¿éı$ôÞ¤pã_ª)™bñXÅ…ÉØ. Ç„ :‡Fä ÁFCGs0{}Èñ¹§!ŒÊé60ÛÙ+9tœæìÐ"e/wX´õÑ™_j^\ÜtXtdÄïk‡ì*eîj˜Ww©4²ÚYÏmèaÐõ"!yu@I‰½ŽF=צ%/¬±æ·5Ù í‡d«ÿ6K¦Öh¶:o¤ŒV]JætØí—D ÁÚöŠó -Ök9‹\$)#ãvü©íKœÙEàyº£ÚaLžºõm_u@ëÛE‰`¢*^Þ'i˜™&ƒ¬—ü ºöQׄ’Y»×BßlÔý™w ÚpªA$kóó~üô4§Â>»=×Gxwpbw÷»@h«Ç}БTßë}x•Ô¨‹Ýx¦Ï-Ìú Îô•ÏÓnÌô­ 0wŸ|Ußê…ËXpz¿TP#¬\xu\p0œ‚ÐèI¸(\§×¢Ã¾5Néš[§Iæ7qCrwT…z§µ“Þw4 ‡ëgë ;£o8‚(Ag<ëmÔuF»ãTGƒóܪúÞ¿gd­€yþo©àúi7x6nqÞ$Rß‚œØ‚ Öœ‚fZ“˜Ö‘ÍP}cârŽýŽÆ6Y ; àÞ¡L¼ÛÖça•Òu¿×v£î>^”Õf­éùå²ÁGóûƒêƒâ;«‡l×íµûµ“Îç·ñÃj ^š[ȪŽÄ„Ð áI­S OÝ#š?.…7ÎIcìb’"ƒ0]¡ߘN7‰-ë™øå$(õ“tÁG²Ú؃†¡ùs‡¶ˆ`³~Ç_î,b‹ÐËI.S¼<ÿØê>(ÀC'vØ¢!ˆµ,z¬O 2BXvÛ›¶ñíÖæÔV !Ï_@ÛoÜ·ZÀè$4"(‹ÖãàOpÀ<¦ì9¨†4A =~ ¶ÆñÙ]:ü¥Ý]ºIä³Ð/gš &“ßEE¿\@fÔj’]>6xäÝácFm™Î Ö7âN¢yq_ zV25µ„HxéI©€•ªæ ¬ÁoÄo$ Ò´Çé÷w19µpL_nÄR¦×~C§¸Ê2Ùê0YŸ#ÿQËO=JÎ@¨»%¦¶Š{âoýý›`mýÛÃ"L%Ü?’<À) ‚©…yã-AvK%«Í꬇¬ß³†Ó!wküÕø¿ið4M°Ãj ‡jáçÁw "ÝÏ9ª˜*×ËÿzYnù9¡ …â}|Ù]&Îzý¬Ç®ÔzfÉŒŽþ|<ï\ZÔ°˜–Áð;YXU$`¦bîÆlØwÎÕØ²]Á™ÃYÃYÂÙÆÞéú}2ywµ÷´•>²Ê$ÞXïÔÍq¼—ÿj©w?º-H¾~\Ëî~\àéîEs¡ætô>¼m4&"¡èO 9Ït@† mé¸öÕŸ%ÊŠÚZa²µžÍ%$4áP»xôŠæ ²æ5¶„oÈe£RV‘*…0bI[—Š@&ç'A>ŽPÓ@›7‰ÒYæçLpROr¯‚;Á•ˆ,Ïõï€öÔÖ1Š.Z¤&¨`‰ëo8éW1Ëd’)AF0Ž—iX³C  Ùq"•àˆ„H 2¿Æyâ·ššX–&zMs»ç˜P؇Ӯ›R‘¶‹#Àb²Lu1XNAðd¿ò-”£{Ä<Ðd;ÈÝRzåÌôf“U*.EaRÐé0£–<’…ñ°Ý..Šðe[¥ÑޑӅ™zì¸6Šâ±úD­k‚¨#H„a溕šoÃṟÏÏò>m1½aìŸ]Ÿ[Çö¼)æŸW~ϮᤪùÎVbáÂÈëqF¼@±cÖ7 ag 1«LÝ™‘¢;é2ºíÊ-™5¾c€ø ™YE©™=å3Ô3¨Ï3è{IU¡GeÜS0”q¼)©Eª[Õ»d¸ÂD®`±”‘+¼Ì®Ž Ü·æáúýIU/@¡Bêî¿ Iâ"nMâ›úâ¤Ã}]›“C•”êË?'ØÈ%|ãzp‘÷<¿P†ëb÷ž\ÉÙMÂõ¶oHƲ5s|ý>ëf;ÁƒgÒ“é©uyþ 뮄Ö;“’­MkuiÚÃë4œV =Héû$ìÿÕÌ[lkh*a¢²ƒº#Ì)Öø³P±®ÉìFÔg!†þ¾Wmys†þuZ:Ë.šac›hÃ×Ýv€x`jGÜCã\ †Y²©P(ÈB,ÉNª¶Šˆ®Gÿ9<òWTX$(8XÐòî•IððŒÇVdL â¼`ƒS' x›¨S°ZZRrØHj‘”=Æ i*î'ØÆ0êNkFµó’ó»è õ»¬bÛy±¹6B»+jgˆBYͳðªdIüÚ ¯ù¶DÔeÞá‹øóf ¸ó¾g³v(¯ÒeO›n”/É j‰ €Ï‚[2Fâoˆ†%ùÜ{ ÓsÏñt\7Ï;ó1qU»ïŽmí×[êÓëÑÖG`è«·ôÏ;ú[1ï_‹ ç½Ô“Â2mx†ÇIZéa»¼X×O(ýïEp(TL§‰ù?–î…ºbÈ]2VÞ‘šÂg^21?ˆ\‹/”œÒMeêNÖfm ¤e1j@BN4 4ÀfR}éú¥¢Ð0¢ öVõCÒXûÎkMÌ;,Nî‹¥å™ÙUŒmJ@³’‚½–2¨jÄpXv.MæÛ%¢­Ö:å‡d `“›¹kYM%—‹²šŠœAQ޽ß×펽¾±>k’ž…„±¤d×|UÚÝwÜ–u¨²Y<¾\‰(+xÐÁ Öúq9€N4ý€œD¯AD‘Æiò)D N\<ÞÓ3y;&ü³LÌèØ©M6//l™¶¾YM¥ѽ:î‹õTí…Bbò™XúÂãZœÌܹóŽ­ hîà D¬›0UÊsç꽆s‘ e‰Äò°Æ`r·þäKžø8vøœßÖ`—»>ø9A^`…=1eÃyH0õ䨢PÄЩNñàBU ¾ÄăÇûÀÖ BèdÆc â#ZÖ=´³ôÛìš\ªl£ô³jmbÅg ¸K0“AÉ ×pNÏ!òvþ8mÅjÞU>Eô ªmªE¹%¹¥¾rvâÊʘ Ä5ª|sµó¡§Ç\|ú.>œ%ß|/¹¶8?Ãñƒ;Ç‚+×2 Ç•.ù/6%ã€sG sE<–™ Ê2DBÑH,W¤ásMñrgx0ùÊá–Ï:1žò4µÉÍP?$"5œ4—âãj“ÕI®.®¨ö›æl¯;Agr1kfÑËJ3$®¨¥1ÕýѼë| ØUøt³ªW‚8 *2c–Ùí œcl6t/Fp–¾QqcB#"mŸ]À|‚eJ ô(|Ä=ˆ§{$áډšÂ3›Ô7œ6€9Bò€­üðo:h•8À½,³Ô%Zcôé”ò|³ÅrG‡m÷ì»kq± 8Kv/xÇên€« ì£_×C É©š`õ÷ˆ™a±RÆ¿z[òTš!Æ€M½¥ÌºÂ¡ÃCÌlgË¿X È\¿ircÜè(¿i˜­uºPÈKc…GËM¥¥9#qã›]¢°‚O=±bëœDwbš_{UZ”??×ègò?÷úMíº©6#Tø2’¿WΚ¨èèx—ãÞÑFl™î&eB%Zƒ%UÀqØ›$ R–BX‘Âùqc XH]êÝ‘c2¼G E £ BFkÒ¹]?¡^\:–x>W8͉/­ÔGÕúdóód»i¾ÔLÍ#Z‚ûÕ½Ä줊°C   BÿÈÕŸWÜn¯W–·¸*ÊAÊ ¬U#SËÌ™):Q| "3ƒÈ"¬M†P, »G˜D¸DØDøD䃣ìæ‹é©¡î ~ò6ÁÝ"½Ä{ðšçvÉ6ÊvʶÊöÎÚã „ ʉqã-`/à.à,à1fý%»ß'æ ÈI’™ c!k&s&{&ƒq߉’ˆ FD<ÎÞæyÛ„‡QïÛ±-·EÀ¿©8O'é÷§ƒ¸e•&§H£7\99©ŽeeãûxÆÊõõùî°sö²¿ƒÐù€-`;ãbG‰<´·àÒXÇ[@qcòA¯íÏ$P­ :²§‚x];2^B}PLÔm¼—WÔìÚB]ÆìÄ$GÂÆªd ÕîNÍ_{êéÿ t xí>ð›’vs¯OswdzÄ(T[“çä•'×l5íˆÎdÓÎx8›R4U„]ù¹±ý}B­‡;7}…ªÍñùX›õ}ùÚßÔŒQ=`ǾQ‹x^ý…º<#Z’f“—¥~”x)qæ¾ã†–&‡´‚Za»„}k¯Ÿžð/k RqW½kD€€ŒgŽ{[Ã×¾² + }V*ê’7%#zö–ö]LI‹‹ÛyÝ(áK sÕùX&¹¥-ÿR~÷'u¹ÙÙîrýqÏV–¦;í -¦!v9«”ýúh"¨-µÞþÔSYqSÁr‘’ßù=®(ÅxuœW©ׂe@1mCDˆ#æõ-‰Ö'Ä„ìg.epëcÔ×ã„9w«A]- êrH¢åæGë #{dcËÎ 6cGEÃõû«¹”çÃ¿Ï ÂLK/-«æ*ÖèM‡{.üsÂrȵÃ+XŠK†ºûzÂè‘ó¦uï †–Íãù º>³ß;Ú©:¾Y—Ù$…çjcÂË#ê ƒ»ŠƒÚŠ·—À œpógÓJPöÜ®ó))=AØœh|LGÞ¿­ÊñèÙÛ)à•ç â‹qÚ'ädé6Ã1öi¯QeLbV@Ÿë¶É˜ŠY/\DHwE۔ăƫ^YöiŒ_ðI=ÌlãN¿Aì¼I®A-¶"Qá tŒÇâùüú¾ùÕbýþýþ0-äα$Ïú‚œ!‚3ºj;¬\Ãå†ÌþkéUP™WÌרקçå%úâQaÀ:žÛ×’x‡_•àÙ;D%±U÷“¯i²é–àkzN©q8¯GHRk‹î§|IÎmWìÄÕ©.UH•×{°=Ôj§.¿Ý}0dú¨+î{6§ºØñŽgOׄÛ–¥ës ­s×7öÞø)èë,ÏQ¦%Ø2–Ð0ÕŽ“JTXA¯¯AÉÏäŒeŽO¶ãªO‰dç³P»_ .N¦üuõiåÓbÔʼn_2¾Ák±¡¶Nl Ô!K¤p¼ÅM g¾„-Æâ)¯çíz;Š€|æúÃÍ("Z߸N²°gK¨³OÏÊ\-¨`Ÿ™=ŒÀebÕÖ5:+ñIժϷþËjhôŠïýø>w«ëâS c™¿ÖgC跆ܶ4š·«÷;l¹¢gVo™úáÀH3€”µKoÓdáWihúL¾ž¦h¹i/ñû€ÅÖ(¼¾ÌÁP7è{ý•¥R`Øs]ﬥËH71VMäK™r2l•¯¨{5Ï TÚ7˧íæ~”@ß&EÇ/HŒ¯ —=eîÇV ÚK'¼ud§æ…Dzç|ÀݺtfH Ñ&Oæ×ñ¾^Qõõé÷>M$òt_*hoÈ™¤ÍÒÂìɯ—Þù ,òTÅÞZàrŸ\@®ôQ¢ r¦0ÛŒµBlt)p¦Ï‚­ ­ 'FÕÏ ddNö:Èk³Íe&˜\$§f>ªržéL¶ óW+^…6Þiá¯Lä½Aæ[z“ÚG 6ulÜëxïrÑàhz»Ò£îô5žSqG¨óí…ðweû ÆàbÖû!ê¡;ßxíÒ­;í8yì7èÁÖ>ñôüßLOÌ€ïÏ£ì52.ÐIY¥Ñ¸|ùpNÒ:^QœHz—JCŒyRgêÀ—2C áÙJCšQ_¹Qе͘o'5£­0Î4–3¡e‘ògèv_-$5¾Êaô]¹QDH&µåú¨ ÒJC «»2CšF­æë::_™Š3{å\×XV¸Ò”[È…(C«.((Ž’KK¤Tö/ÊÝ6­S¼7£ŒK˜87+ äqK„/Véž8¿ö“7f?»/é\Ê›tXn“Ü`z^·ò|­HÏX”n*‰ÅÊ\êãCDu`i¤ŠÖSÁ<»„6Žèô*٫ñqg*³0/IK‰«÷x¹;´ß 8wžxž÷Éú²‘¼ÿj§|=ù|˜zGOÚ·[bx%7î@<¥,mZbeÅ7ìœaQU\=Kf‰KjÖ¯²R8ÏxÀYÙ¥²­i7×lð—¢›ÆÁ(Ë/hCE­Sñçpþ1ý£ì×½¡¼_2 9k?.TïÉ}™ŠsFë*lÝØÒ¿Ó÷Vî¤ÌÅ3Ao+'± 3yý<ó¶Ÿ•èaªÝ½9¯¶d†M ?­¼³¶wÄ ½WÔfAU…•x\BéjD¯x¨ÁðæQË `]¶—Î..(©½ª¦sý D“?dVÒv‰‹$0G«§Œò »ÅÁ¯9мWƒ¤¡s\ó~j21ò¡v„…c3|(£À} ÇI…¹¡:º¼|s,•éZ=8|t'eJúâðÊH–Ú)™Ì+ZæÌæGÀ½£l’Úáƒwǫȭì_¹U~ƒn¿Lù"ÐaÙÏt‘ŒéÚýX@#í<#Á/ñRÏòæÝ._fÏ¥•L{bõrjLJþG£s°¿øµ.luýÒ¿qÄ“‡ìØÎCKÒ‚ƒH²"ó¢‚Û éQÁÿ¾‡7†ÛUáÁ#{°’Ùóy¡ÌÓËëÉèÅ¢2y_Ìþ"ûJ7Õ^õŽ^™qóMpÂçô‰±wKûöŽüÞ{}Öì—üžÚíP¹Øßr©ŒÂU›¡+ZŒt€±µ2–å44 kXGLÀ„®Ö¥Lçà'¤¼\ÙNÏiv2îòÕ>tAíRN=/Kó ›Ñ§Õ˜çÆu{ËiJÎ¥8Ã! O/»Û Ü" ô^íòf’`w·4>`Dг’ªÛJzRù!r2i¡ «#¤]§\AK™‹ŒM4|–:ØÝ+‘¬×Jc¾Š_‚jò„BØ]ÂdŸÛXÄG_&!«µ¸µI‰‚RþÁíèt@ÃGߥ¿*ëk \õŠŠšRòÁô$`¡°GY89Ws"nzŠf<tqªê+à«Á]\BÇG`0LŒ)ù`*¡áëJ$Dâ¾*á `A«/È êêQÂOâfJ Þ e–j<—³š‚5¹ß AA%!=‘‹©ÑHñ°ü/­<»£{ÒÖº•ãyÓT UoUS1›#i ñFº†sâ¹™Š¾Œ‰º|~B2T‘ Ê‘‡;|ã£))G:SéIÊ ’¾^û÷…VÉ êIŠ*1Eñpûú»Õ)ºñiÚø<Ϲ¿¦leu̯=êuìxõâÌÛ³€Áã z׳˜h}Â&ï’P’“žëÿyƒÌlâãÔãtÆÔGÉÈS9uãìPOãQªe“VC+=ç8†]*OŽLPÿ (ŒŒ°éµû×”Ù­°Ðÿªne» >›Ìß´ïý±ánFXí Ý[„×ítoÜÚƒ%— Žö R~ Õe$3„“¤€'²IQJéäæÞÉÇ^i[þ8­WbX[áXÔÙäö¶‚…¥ÔÕ¯ªtíO9~®£ˆèFP¹Ô¦ZNЪM©â"ÑSº9êÝ9/Œ8Ö8©²‘]óPYe'«ÙpõަäTÕü«Kk-—bw ¿¥ÝKŒa,”©š!ÇãO2wņ#„bƾµ˜×/-l¨§ 9˜zäoËã@ÈU! ^—kÀ‹!’>w¡:Ø]—:›eç:clçüçˆížÜnNÞ=y 9å6 ¢ky ¸‡ÅëywûýVÑ!:Á¾*o¾ïÚl®Ÿíþ­îÍî>êcg«ƒÝ˜Õb Ömàe¹°¸î$$p©wámûî3ç»·±oÁ×v’qí×¼æ…æÿjï/À¢ÚÞ?pTE@E¥a$¤c†aN)Eº†înPJ¤»%”)%¤»AB¤;î à‘ç|ë÷¿÷>÷¹gëì=³öZëíÏû®=›=Sl†lB˜Ê‡/¶3¶Ai4£·%ÞÝ[2àe‹võ ç\±a"ý¬x{º¹¹$óÇÞ<«ŸhÕMy‚ï7tJBÒøº‹1u‘¦_µ[X¨–в)ßäqÈÙcbÒcüÙmöÁ%¦à=&ÿ ÕLaŽ"Ààüw[ÌéŒ+›}Ïvîlç†ÅO–öaÇ9ÑÀdšk™öìÒ>'¾Zu µôeƒ~uŠùÂ^üé¥(Ùt^jXüË=e(‡ñwÉ,1|·‹òLÀ ë³ZÐ-W“ɽpƒí/ÇíyÔÔ°A¡ìšäË=° |ɸ¬ùKï£'6?/%›½“l¹óY„Ùz·ûE±Ê{YT¥£Ò”Mý–Fc•8aÈИõÁmúéÝôÕ;ŽíR×™»,M7–9îíÓ#_yØùÅõ’tÕŠ= eÔÛ¼8^ …œ¶ø°TF‚rSP/âS'†ü0‡ðú L]u®·ºÒøî¦€JÕîxŠš!ÒÚçÏ/г3”»WVÚƒÇÇ÷FaO=ú£@s%8%ÏèÕހ´˜R´^E·]…äo¨ìE¨Fîj¥IÏŒ|ŽPÀ{öreCÌåì(}Nñ[ö&dÊØ ¯ÒÙT´R†¸Z6É>ä!»ôVD&%»³>ÅfIP•d³½ø©p¬3uçgåiÜI}“´Ÿ­Uêì¶s&eïX?Lí)tÏw£ON:v£WÏ+­¬Û§0š,e’J€G¡¨úNаoU[u£)¬µ[ /¦1P~f”­ØÚEBlЖÒÁ?86‡‘¶e&ªtݵ—»¬q² íhÙóŠàÄùïYZ6±*1/¢ Œ4nÙ ?¾Ûº‰Vù\†8f(ªbX®¬c˜Þ//Í—ÐàcèÏTm—M”•Jì<âXÁ¥ÜsŽ·¾²{ÍU¨&«q5‰¶RÌT›•H Kœ~.ðRæö‰=Ë®uU$¡EÕ7Äÿö#ß[ê~nixH´ º¬»ó1•ÇTN ª¬eñ æìMŽXá)B\ Þ½º_¥¯Èy<'ÎdB‹ËëGwa¼ªldÊÙ¨?y¥ð †Bµy«[÷•âé÷fF¬™M¯ ’‚™7:hÝT´9<ú¸jß„u2ËÿLcÏp”h¯ëréP®óñç˜ÖàaPqj_ñG¿VŽÎÇÙdcM(oѶ}²Z^=~±x¡cQ>tÏoõ{‰\¨rËËÞÛªosÇTÑóñ¾È|Ï𱬢 yç/¢Ì÷@c´5½å{†j‹· mž9w¨Ö³0U®ðCÝ-i†«8Ÿ…ÇVêDñŠÄ|ßÛØi‘y Ó‰#i_Õ¶à¼cަ°×IÚ©ËK¢Õp)CËò°ØbÉ–ò8; £Tt¼†µÝv ƒÄýr…rö»+*hMÈÛûw?pOY*ä=v0w8™ÇFFÊjÖÖ -ÇLŸzÏÒ` ‘G3uc:ÜKYV<-×ÁlS¨Ü¸ñl^g«ÿÓëçýfÏ: Ïo“… ‹|Ä~£4Óy(vá­oódŽèæ&Œ>:½0ŸtÝã*©yÓ(üü#²e1÷ÝtÂÐMý µ‰ÅVµ© ší´FgÌìI¤‡ ‡Öꌑ,€iŒô²~;Ϭ¤lçòŒ#t–a{Ù¶WFˆ++±7­ÉF—8io.¸•énmºÅV0|UzóÍâ0¿²â¹¤'˜PãûNkͬ…¡ŒíWã î€ýôçtû¥ÄæŸïelcqçí·ž(´ÔÝ4«ž4”õï~{(Ѱa{rÓ1wŸö ×òyï;]:œ^¦yÆ geõ¶ªó½×;y(œe-=»÷Êý`Q¯—Ä"0BÑ¿%(5ï½ÑJ>\$|ÁyÛ¸|ÐYØYDþ«GÏ7Ÿêªô»·Š³}«fóW£¯¿""‰Ú_ZôÈà&Ù#Ú1z¬ÿÕIÜäS&‡$êš;ÑiNµ9mù ìHg‰ëå'X!±íýYî£11Epà`†#…š´­ïþÛŽxg\çÍ:2…ZômÓ‘pÊa£û^oH6¥(d*€€uÅt§®àÎ,IQ}ÁUµàý˜ÀÖÎúnÀj‘lݦTGçʆý`ï¸îÓ†49ÂŽ´¬Io×ý@ÉÃÛÔ/øÔHâ¨KÊ‚7Ľ sFc&uF§$)ä–Z½OARFB÷‚§a¯j²\vuoY¬2mÒVéfÇÏW ¦rA¶”&áV’ä8Ô%ËåV/GgìEG&‡Ètf•ïìX_³ ,Rlˆj–‰ŸÜèž‹ª¦LÊG/RZK[xv—ñ5P¶¥yÍØæ'\æ¨ñr}Õ|uUqå‹y†•Æe( ÛO©(ªj@A4Èœ‚C E+J*ªg¼Ü{#x¿š÷-[}ëAìT a­Ë&jß'pšjç;aòd¿î~Šng=èp’µ©CÒ#Igu"MX„[U<–Xð0(ð ˜CPd¯ÂìÝ0ï&§~à¦{¥ø$÷º#Ås—zbQ›½•‡A)*Á óTf…äs™+¤L^_΢|DÙãíÇ­B¬ÆC\äõjçÅ ¬â€Í¾I´ð%s¡v 0k½,’þK÷{¨ú()`¥2\÷Àæ`£D­ ”¼ü€À›:«ÎT ’ ˜Í ˜b ì¢=ÙCbUú¬„ɱn-kôËû ¢¢¡Os}kQ’ºpyð2å–S²âŠÔP·kÞ'Ž’—làî’ϦÊJ.Ô —Q5ð0 M @¥6O x¯¤Õ`þDÄ+è6*ÅÝÀ'È?êÉCê°R¼¡Vh¨ÞAÑOàVøÀ,Zòl8àÕªxé"Üz“Ô’*B¼!³ÞL_Ÿ:)98ð¶è’’¸755©{pÓ—¤äˆà;„” €ÙXÇú 9÷ éÏýX‹hjµÑ{×É*ºŸl 7É ýA±Zº)F7–°ŒXcÂCðÚ"²ž.ê¡úkaÌHtP¹Aƒw’´Ö( 1Êîé<õa×÷x<û™5Š¿,,°ásõ¤’ÖmŠ-t2lê†KȺlõní·:Üð´8(&„E&¨DâxkS…iêúÍøEð/¥J3ãû½¼»EŽ,D #WÀ!¥ ¡L ¼’"Í«%ãù¹þADÓ“'…Bo2aù”€`ŸKõ·d™•én¦iáRðS‹ züé"Å<®ú_Gº’l':,ýdÈ Ñ‡4ô]êmyd U›ñŠõz³Û¼"l°`·+m·osˆ¢5Vñ> ¶»s9€Çå:é½>Ó„/èQy¯¯S…7}™ìVq©”F»¸¬:¼³7KáL¶²5ׂi5Fé_Š#ô7ÂÔçfð ïmNŠOX‰à[7)˜ãC©^]»oáß;¼G•xÛ¦‡IG7|Ç>MýÕµü|-„…åwóúóµÂ'7Úòé[L,«m{ 3±751¥?¾íö×MÝà?ý€-€ž°2± ÌlwC7üÔÙºAà³7tŸüËû¹Ïÿr-ëÅ_®eaþ·÷s‰ûE1ayüÜG#)ò¼k÷X]ÎÛ?·Aè[‰A!·‹K):éï¬V¹Ö!;wyÝn›•¿‚s¾fj¾ðÖ‰uúþzûhíó…2èN#3m*É'{¼ÙÒÒ§¡iC¬{ó÷£ë6Ø+ÓJ I¦ô;ý(¨+kGêî+7uú-}ý¶ôw|àjJ­¬]?Œžs4Ic\잎Üpœé\gâŽ}ú­™Ä¯ ='E²yÈZãg\¼m¤rÇWáç–øÿѨ²ó«%§B }¾¾tWù½˜6>aߟ”Ð}¦ÍE’û–¯L[г|ÃÉéÙ½¬ØÁÝmßLëÂ!±ùŸòΨNºûwËt c Õ³‘;+:+…^ˆ9Û£vVÖª†Þå ?œXÇa“ß•SZÇ$;RœxÃÝb2«öŽàÀЗñðÎQ´û!Æüé ƒÏÙ3ïÆüýôíݰÈÒí b·Ã”%Á#´QéнÞÈuÛbÚ„ÊŒ|Xh Ï||ÑQ~òì0Õ¦‹„g‹ˆíêÑIDíu…Ý}”R‹Ž»áÐÿòtÿ#þËÛÅY~ß.ÎÊw»ÿý…öÿñqûù…ßîe½ðÛ½‹¿Ý ù÷¿Ý f‚0Ÿ ®…r{ÇgÝl[\ÓC–­1ÓÔסÈÚc þk–||—Ȱ´üãÜ&=”ݛN”%7Z11+^‚üõå!ÂÂ’s¯ eUß$ʪڿx>9òs`/=}±èç“ó>W¥Ÿ×ôB'ü_j ×þá~l>”ü[æô•‡Ûb¹èå„®ðõõ§*î /i¹HÈ% MÔJv7ù îSß‹ Éèc~21¬·º—ðŽ[ÿ:˜…Óf˜›/oé%Wj>OU-y'¸ƒ‡r?=_|XÑ«æ¾W_™ÿ>+ÆeKÍ—‡ÉW¡o]3°YDçÜ `©C²/)€YëaÍùÇyK©š?W’ÛzHòn°QÁ3€´JÆÝØ!º¦0^Ãx¼”ž`_½òJ¨K”½¼JY)“µ´fÖâyYEñ¶›ˆW.7µt`dhl@DHLPT˜ßáÑ&!ïÊBÿ›ñ™«Žob _ŽÍˆ( ?]¥Æ¢½¿™Î4#²Ž!QÇŒÿEí>„ݯ܎9ÒÕnùQ£Ë«%AŒük‹_Xƒ}”õ€‘®özAîKŸ1…z¸yÃâI„[¿+%îÅJ¬3³c‚¶±ý¤‘¨‹8IX»n{XÝB$<Šùx³íVVÈ6 (ë¬ûN–ÅîN/M}b=¥‰fîÍ|¸’´:s£ýöpÕO¬·b’I¥myòØÆ>íäÁn…™3ƒí˜R4ï¹%«£s—™¸ vÚ­gX­H6-‘¬wñ¼Õ‘ÞMиMÃ’¡z× Ëõ<\‹…Ø¢½F°8ÝYêßvÀ$`ëת-’Êî;è¯sZÝe÷qº9ŸRY³ÿl_uìý#…õºvœlNWëmèþ+)À¾ÙäÔLM{éÓv­øz\]äOwË]TÜ%X ñýz÷}öi£]´£Ñî8GÙ58äÆ>GúQm0¯^È©™C}KÈÛš˜ÄÅ=ÿðÙ|›¹ìʨû‘z²WY-g=ÑäÈ$ðˆ­í¡Ë¡ái«k›k?Û å]×ÌoVÜ+SÙ®Û¾:KÀhGïL{æ&M’¨xÛòÆé-G½'ƒpoÒMžÁ¸D^+¬ƒ‡p=€°{ÒÛјTaN89^q"˜ ²É âujižXWýþfoßzÿçö–Û û¢†+nÉGJ ÛÉé€Qá8¯Cb­º«i‰4ñ3Í€»~‰Óím{¥3/Öui¸ÛެÊÐÕ’¬Æè·‰÷-÷i÷ŸïSgÞÎ%gŽ‘©N7NÆž±ºQ5„”T‰RYWÿ|òŪÜ*”ÒLdèN¶ëóðùibÍ!á2wÉ–; ûŽNx$È-w®å¹¹WkVcW¿©Æ—*Ò™½#_&8”|PÐÀ-(•LÑA‚wµ¿û¹ fðöu‡«“‡€åïžvgO¼8Ý—7¸êô™~ÞVvÏXQv¤¨À÷\¦·ÞªÀä~MR%Y…¦~SÀ­D7À"ןÅ«_€5ˆÄßuù¹[™èȃºýð’¨f CZ&E>¥Þ†î`+ËùqžþÒâC†CCÔKšK,fÍÍ’ãÏ·çwövÊ÷y·ªáöÝ~!`§Š{ωQ‹AïumšWï¤~v7GêN^ú­´×àqÙñ•Ê‚E6bÝê¦^¯ì`˜¤{¯]G]$ùy¯LàÉÆ";Ê_+¯”ÆÃ¹\®ÇÙÔ,RV£ap§‚d¹t%/ýÚšq ÐùÊÍïÜØûŒ 3ôÊ~åûªÙ϶o®$Qò,ZtWЭüƒ)&ïcŒa®hÎðûoÌ;-ñI~ï´Ã¯¸Ž3qsF||üÞ¸x:Ëîµ±·éï k:.&SQmà#åã·×…×E&)Ë*o­wb \'|qû… ‰°ìþ.ž#_I/â!ψ)¿Ì|w×ॽöøZ!S`+Å;9ò–ÔΜì®qׄyu\pÞ½¾Æ=vt›þíÔg Wb×½¯tªÞî`%ORMê/ôͲ·Q Ì0lcîr3¹’e+—Ͻ+µÞ3óI`—þ»“{ÞŠ\1¦Í,õàDµšŸ13*eD›c™G1w_³$ÀÖ©7¿ßQ¾´7–ÿQ¨ïþ6Z%–SZz׈ñU¯CУ< Ù¶×N~\‚jêô˵or¥±¦Þª¸“EZ0$-óòE:ѦûÌ Þœè§ø0%Ì*'.´çè3^íŒmæ?Èv]›0Lˆ3ˆ_³Ð+Ge³£%v |Ð61óᣠv¬è'²ÃÀuECÜ)¸ânI=ü6rÓ!á6ãËŠ ¹Ä¸0´÷U¨.HžR×õzD—F]Fú^é|'ß}Îð$2ໞ.°}ãÕCfFö·’$uÍD©ÔZ •WÕïéÈ×]³ßI§²@n»é¼*(ÚŸ,'Ë\PL%/Ïåf„P>¤‘qÒƒ) þ“aØ”_êvʈ‹g?fÞËkÐk›'ÏJ¿e÷V=˜$õŒq+­gkíÝËþ±'ï犞UµÙ^º•ó ¿$ËFŸ»â¦Gð¬èÌ{ùÈ:»†?œ¨™ÈÚrÔ}M•¸?iÄ8ž~Š!, }l?˜˜rÙÙYA9¾0ùÐɸTc8^. §wR‹ä£¼¦§:s†KÛ­k¾ÝÀ×à4©»šÁ_­„^BXIè |°xʇ6™ü,s×8EtùñÏf•2Ò•Í̾¡O÷î½í_ôâ-Ï•+iÜ¥ÅɼôíÉò\Âw’-˜Hq£méjoèê<¹u‹Ö%©NL¡‚Šòy3VÄm½¢zÿK;R7Û l‰ýG¦¡p%ÇßõŸ-V×§cLºkŽô¨¡ñý"åF¾Ž]~mIxaÿLr­—.yjQì›qýã¹­F=~*GÉìw2>¨â¦«~4zÇa©T.:8'ˆóCcãiÝœLú¨jNð2q6Ù|öòš‘ 3ÓO¿sßÄžKuÂp ¼«^ò~ô ÷ÃÍìîƒ#,ÝG]<øÞ± òtxÂOuû¹éçÇ÷×߬â4·|ôuÝY}—ìОke ¥§§-¢PXHØŽ),³¼÷å¦`-”É»ÉâËav¹„Üç[—®[ÕàÙñ¡³Ç{ `³Þ„M ª„ëeÈõ†==Q —Ÿ69í 4:ʇê«þÕû°Â(÷¡ÌfHy>´—?(s a§øÓh…Ù,,Ù7–->,M"ë«õBZRÃmë®»¯fÔi°Éÿ¬n}UD6>MšFöSøðöNÙä+Ï½í šÃн¨Zßî(|V»LœO„Š9Ðl”¦1 wU k'¼3+ƯöêÆ·k*ö‰¦ŠRmzvI#É9XZ½Ë6ß =}óô}Ìn[´¬åýẢ¹ Q…Ÿ“-l9ØR¨AO\Yž­½’(¬ 3ó®#Hî"¨Ö÷ÇØdnîÿÜ+;žvU;3!²wåIå ›Ç*=C·ËƸ[ëË"¾ùåæ²³O+¶Ä h§Í¯j¬×Þ6¼ž; jDqéõƒFô×85V¥†M䇹o®÷^2åNÔÅz ýÌË‚Æ]O‚‰”Æ-ûXÈ0bXc«ºqŠ)5Ï ]@2ŸÑ³T4x Ïbþ29ŸEV¬Ýâ]÷²¾Ç;Ø›ÒE>#?ŽÏÇ®Aê\ɃћULÞ°+¬)Gß÷÷y¦ ‘±0³w]HîÈã! Ũ[Î6ïG©õ;ò¨”-+‚&n=ôꜽFžÒ· Øðê–—˜a¯‘Û÷LvNcî» @n2&¾G=‘77·\CVCV«qëpx)Y£íôÔt ²p ÖóŠ˜>Ïúͪ膚Z)‘zÙ)ÙʲuCºó ,CUÔ$oÐ's@‚›…Ùgá _Ô+¿n>2ôüÔuæ÷¨«‘/ËP­˜ïJËèná<&•ð²zi›ÑþÙéÊ,U,s Uxd(þ÷L'p~ŸÃó9¯æÆÐÑŠ¤¥1p˜ —®ùîîÖ²ýos¹Î•˜ƒ»¹:t2=Blfr\!›C©IDBôcjo÷•ý³£Ÿ•ÈßÂîõødÊn¹Jï;%ÀíñúàÉäb~ý[/ìîWý(¥3±Ùæ–çú(ú´ìÙíÓ¨rÚ§¦’[&£Ä£cÅrŽ•oAz,BibÖ)úÌÑ#ªû“ì×rk——Unˆ|QjÜFV%Vkãø7%è*&^rÝšÎÝ ”ï‘^ÆArûtêaù(½>c@¼v"à²wÃ]–í)TóÄ6[˜kÀZw¼Ã&û/ Zmm‹J EELÊÄšìFÑñþsS s-Ú_-(ßñ^¹òLVÅÀk)/ˆ•抓Ó?Ìå¡d³§´…ÕvD±Û÷ÄÉT­'aÏ”Pq(„ ÅOú,‹‘ŠG_v¦>¦¦»4b‰6ˆ‰.Ù©wEä`õSï4Ž`üGê4÷$<߈ ®“zë”Xòw++‹N>-œÔ9Ê\ÒãØNRSQí”-¿¦úÑ99žÖJ—?ÓgIRœf3 HxhRÊQÑ—‘õ¿/wµ{…ÚbÿUD'Ù‡A4ý”ΰ;É÷(=]hÞðâãÁ/›áFk.(T”{5ºQj²š¹¶DZŠÎ¨¤ºY¦i?¸æ5…½H¹)= Þ|·ù®¥Ñ(¬ä=GÇGÞhZ›»Ï‹UzËŠ‡Ëº÷Â?Ì\ôc2|EÖßÍo¾År_ßËö‰ŠŽa›ÝÕµG¨ù ^¦>zi“n†º·Ú•Ëhj+”|Óê· ɽëªÄ†Eo¹ÜÄ©b€‡§¶©OC^Ü¢ö¥PÓà]ËX¢³û«¡2}ášÞÅRÂçØ¨¢†#¼ÊKq>ëŒJtà¬D_µ¼é|@Ü=H–±­ò1ŽŽŠ˜3~R¢ µ[ÉŸÚçj2D8Aø\{(̼ãvóm•,c/C¯4ÖÃèmíè{Ê($Xê¨)îÃeÉÕŒ²ê„¢O~ë eETD9Iw7­¥Kf–Ô‘ÛÑ%÷iÐ÷ÕƒÔYú‰t<•B×ÌÕŒÉ-ÜNt‘Ë@¨HÃs¦u¦evÃLÃÐøPðŽËÚH“«*R­ï¶ÄQ숤u¡µ§ø€á+ɵOÁÛ_Ôo8_bVHßèMŠà{rG’ékXóFFvæ‚i݉2£hΆ8L Ø}*`ên2s(v$M :©iÏÀ}ÑÄêláôORBìïw}àôíÚŽ䚃RjfÐqÇ9ÄùV¶5dìã¡]±IõÞ¾ù° ‹¿Õ¨ñÚ³Ù]|’Ígý_sØ·ýØ’$U„‡`ÁwÕàÊG ¸¦Š4°8k0¡¡[»òª^HßÙÉbeB¬âVb“ÃGwl¡ Ånµ[s,"ßT}=ûmªí¿‰)úIL‹šâÑSŠ*6Úb”µ ;2.W¨½j·¨Ò€JÛmMm`ãˆ#AÌŒº­ £-‹§Ò™ÝšVð+/d<$*ŠR¯–÷0 Âz¤¼/ÍʼniÏÌ—qŽlnüaúe]nÊ<½M£P|úè&ï[d² yÝDËOåê¢Ë'g2¼“‹B":Ç­¶üs¾|g –2û,uåÎîºKÅUi­ewCiáί_›l¥±è8®õÙ2Lt\ÒmKí>‰X‹smö+ÈᤱE[’Jª–cŽ>q®º[ït:B¹îVJ•®/ŒŽUÙ+ÈÑIøµÅ`n¤ˆ-naIVX %Ò25à:*l,r;”“Ø´û}~ÐýJÜ"á2E£¶ùur÷P<; Ém1:¯WbˆeÑË^ÖÔ°•7¶iáeê,¿Nˆ{/¦æåKh{ŠEªaÈÇu¿`‹äOÑš¢½^úvk~!ÙøãuÁKú¯…‡š8;ÞÎÑÐJ]/Ó”BçÕy¾óEÒ:°Ü¶ïVOm_Eñ,Ë=àiu@¦m«É¨Åt½ŠÖꈶ`ˆ’)Éú½›Šæ"ëwLo{\ïjÝP¾7¬Z&9D¹æ$;Pì`ÈéùdðBì¶œ¢’2}Eˆ×Àí'‰¤âÀ–û–, š ÕÙ¢ÉgÍbbµ¶ $,¦¢¦øY÷üîP³Êµå >f™ŽðRxmyûöÛVÏSb„½ÏLËØ–ü¬¤ŒÙ3\¹rîöŒ])j4¸¥™-¼rÇöv±ƒì<–¯O3C»dž,÷ <´]¯Ø°™¤µÃé)¾ýj톦“ãhUýꣃ„u£­ïq„;";‰Ý·¶íy©ÕòÑø?nóAë?+Çó¶Ê3îÙTÌ‘C¾˜úQ>/®s/Ë?tkÌ‚³uO5«ñ¥ó÷Ý—QÛÖB}&¥qzù¸Vül À&ˆÆW»´\ß>°›—†Dþ4o¸@˜Ñ(6åg•Õ|÷0˜å“Owx€•”Í}¦¸¬X®UÚЊû+-_†½ö°ã$ïÜ–|]7M9¡¥:_\ŠC"<Ò$yÖp•÷žoÖgËÆ9¶Uº.vqÍwòëø{Þ.Šã åܼ’×È+PÖø–’üÑe‰îžêKc±Ø7ôµ-Èr›6ªÀöËÅh^ÔF@@qw¥òÖ€8í¨|%íjûüŠê¢%EHÚó¤5~ÇÚbQsy¦ƒ/ºSŸ)©Îx?_*Ä!ÕåJÞ¤w}„\K4þ&#M+šø5ËZ$/?|ETßK;M;lƒkÚ5»º‡w_¿ŠönÀgyg[15Ù÷ÒÓþ ž˜¸É£Ì;®ýZÀî¨× šIGXTvÊzš:@^Œ\j²P_žHrˆÝ¹ù¼XÏï¿Ò©j’÷rrƒ³[£ÁáÖŽy,èù¤¸"×;ãŽ%‘}ò<µ ÜfÞ,¯ïvî qVoÂ'qÞNZ ¿À%ª5ª ¦åWLüÒ:Øß„Â)>V‘Ç»ûJ à÷w™§"%‰·¾÷p»HÿÒ&‚Y¬_Nںט›ÞžÒ“–üö¦éÑìé^ð7òM™]S!|—ÐjßßQº7*­r/õ¾zf*}|ÿ1Ú{â¬5XrSøÐA ÷ᛯqä-÷ÒXm.‹Éb`†˜°‚ôçKϸŠù™_äH…M¸>|râÄÙ¨e%ûrĽ ¤xæB­JÅrƒóJ˜®’°`Ð.F‹•¿>Æ#ýe0ÞTM- i¹¨•FòÄk¡°·äo‚ ÄÊ&ÕÉ ŒöŒëŒÊš_=™âlçº}y_RÖ“[ÃFüµ åÚž¾X“ItÆZ §‡v“°'‘Ô¬§ÉDéŸFË?~ª|“uç1r\ÏÏ¢Ú2Å¢z#Ï»[/çid”åÔžÿÈKU ¨K[ú”;³6´ú"àè8ÛåÀQƉî«yŠÁ\òœˆØðØc0? ½¨òU¿èAËTá×¥NÎÙŠu‘þÝŒmÁ惂ý›û±&£÷;ï7ÒÖ†Ñÿ”—Px¬\&$”Ã_uß(ËGeŠßnÄ«´áø°VB²Ë­3ä±Ü[¼!bºçaüæ‘Æ×E‚µßnO&ï“NŠÏwÂò¤Ã0o c•h¶U:á}Õ 5¸ÜØßI`³Oaj~eWäó¾­#àmæ›mšvåÉ";)×d*=ÚØ'Tk&àk…}é Ôãßêw?Õ/¥q{/<·Ø^ÆT­“U'²g\ñòetÚ^±´n•õ/fò5~qùƒãÿP¾c‹g%µªugI¥pÅ[GNJ»«…$k»¼´OFj]A ~©”ñ|»ŸjC¬¦8kèOÕ-FÌE…Åæûúj,ˆ,3^sæªzïÕŒ[,¤ÈHúšÈ6)Qí?Èn×rzæ>Y…VñÕäS@Ãnòl—é¶V=™HJ–7´i®j_¥¿RÞs ÄÂÿ¸¿!§ÛÀ NãÃÞ—½”µÀ..ý rfßø©PבÆbÑíÑŸ“ê÷#b…t¼E#Õ[M·0á=U Òa%ÁŠSØ— }«pˆñ½m6 ÃOÄ•%FdÍrï†a5¶&Йnòªé%ÜñÔ—áÏ9nOª…‘Èo¢Kí}}©ôÑöK¨œý‹šPqÚ(Vn«gô®å†Õ#Ï_ÿàÛ«hT"9èav²Ñ8~®ù¨zËšæ%u\+Ž:I°Í£D”äîÓ¯_çßæ´¨Ã®MS±ûRM8æ_®Ì“m‘áhÊu]þ’AdÏüÀ©ûF(1à[×@G‰z°è$c˜eAnSÿ 5êcýÏ«6ŽXGf?ç•ýŒî&$œ%é’eÍ÷"­´A_ÍóÆ_û ÕïxoÏÖÎn&G8ä p–ˆÕZÔ¯P¼| ToS>‚^&E¦%†2ö¤9˜e¿y›­ö©`Ý÷TÍo·8}ßjîcé5I°òøöu}ïÓ«lî{ÞøùM@}ýXUá÷hkë"E¼ODÊ - Á Ÿh¯2Ûg‹üäkí;‹¥qlëË~ízZM›õ|ñ\™ZmD¤]×´’ÚÑoÔ¹U^çK”b4ÂÚçqI±Àâ²|‡ë»„|-¾²GfVI¬^îÙÇ%Z—L—)ÖWq¹b@®”ƒêC÷ gwG:¬(¹eÑäžÜ/_žÍ÷ .u¥ kãŽ:cR•k³QDw8å/t†îGM… „Ýäó³ÉÐ Í{òm~ÝDÔ•‘]T@í…òsB/ÐN» §´N öv1;}3`†éGýÒ<†S/) ˆI֮;(D¼ËHèS¶ÛïGªfT³ÚÒÓóü§±ñ°HàÒçHã¹ÃMµ5o®Í :âùç39uTÆœß?9^Ur÷~EÍÑ2©ýÆ(ëÄá*ýQ™'›ŠìøÇ’Ì¥x m|è¾Ü×ov(S&鯿Tñ—pÍšê»Ý,‘ËS©‹uðz¢MÄ]{`gŸ>M¼hR!LÉËúÚšw»–$ÅÛé­àR†¹òGñóªÏÃÝ4…èn|yëTW&…:ø…‹/N_MÌÑy‰çÔõöü>çXYB7t‰î¦1MI¥®±œÁ×”°;凓]üŽÍ‹Q÷TjQ+HºëC­nªÍ~¹±×6ø­l0%ëmVöð W3mGÔPÔn+”ZP’L"ÄË&Nþ¤ÒæmÓ¾XX:ŒG$vë‹Õ˜,lf‹¶\Êcº†¡´šïø„H—X);ì°–kÔª©ÅœÐ²ñÈw%x­MPŽAw ”½eÒy{òåÓêº ;]Ÿ×͉ጤ:æó8ªZÍ2˜>Š9oƒrž*]š“Á–E5Á ñŸ•Q³É²øFðèl3G³‘­–¶’:ûUõNX¹zd°v\¶ƒ¹?ìõ,™ËÅì\õ “uü¹d¸›Áy¿C³:«û°Û´Þ‘~áÎ@–@é`ÅãÁÄžv„](ô!¢k”÷xk®hfè1 ;pR9¿zªò¹öšpB³¾>ÓK.|&ü›š+9—ã€ê5_WVÂ3,¥_s¬Èäº~bž¢±l!ÞÔ+$AÁHÑ#x½hÚÅÉÄðØ™¥@§“‡<@òÅŒº`c*ÁŒ(vIP©‰«ÐvLàe^Œæ¦ÎÇa͉½^Ñ^C¬¥H¥Æ_^rF“Ï ¸zW‡”¶ÿœš»gci‚ux™2Í :»õâã”òà´F~*LFÔØÎÅ€¥±'Ã…_[˜ëLêœPå~öÍô±M±M`ù׌ûí`ŒŒmd-¬>VC¯²ZþA&lŽ~¸§mÜlÑ·a¼›l®žðÉvŽÉèŽÍc¥…žt˜XJF>Xƒ¢s¾s¼“a-³xeu^l½£¢Iç@š;š¹]¼Å|§)Ú‘Ñiñ ÷#ä]Ž+#&4M·õÔÛTój½¡„=Ý{”m/D{|N)ˆý+€ëéw§ØéòOdvŒKüIcøåíŸYX¾4ÝEB1ø’HÝðÕ]Ù`² ©Òô+ù9I¹?·Ìoûy‡eÜć~ïjÝFèÙóˆ"+xîØ@Ív†è[Û¾W|÷ ÉC¥XXO —TDËi‰ Vƒ„‡ëZ¼ Élv³êˆØT ˆ×i,™0)W“Ö¾ÑSLA§h¿nôâÏ\Uûî?[œÜ©f¿æhýôñ³Tš¼ô‚G²ž 0ùÔðÔp݈ççéLgÖ S FÀו’éG±Yïó~.jëÒÐúT·þ4H=Ðuáf^•¾Ué“…{¨Ù¶ëÐ×5/µ¸·[¾è1Cî¶YŠâ3+ú|ÙÖQVŒKUeBSÇÀQƒHò“¨>ÿ 6_¨a»ôòµ™ÎEªò{.øÉ·ŠÑwjúb¬Ó:Ý&¹enYs8J…ƒ3Í/¯lÚËZ• Ùp‡‰]¹œâÞ·@`¬N©¿7¯D æ˜p˜jˆJÆm_jÙnNq¶›èØÉ ÂŒ½ý¶é¨™Þ¯›{c“’Ô¹V[²kW˜x|Š‚1ƼÿÊËUN¾È=žŸÉc³žÒ(Ë…e—_ÿ@—ðmÄšäÙ³{õR”ùéÃΧxLõʪµ|ï¸45k—|âô7½Ÿ£KƲ®ë6²ÀøðìñÀR+޵¨x‘è(Š(;¾™O°ÊfÄÌ€ô»Ú‚oº#ïáÄE¦"íé&d~ËMt 5ABux-C­£¦lÀÇ\ðu¡mDʆ ÙæÞ†-*–èSÉúï¿´OãídI!Œ}bØùJÿ[¾Ø+{ý)/ük#:ow²¦šS‰K4J¾I]ÑY¯ ~ë­±o-ƒ뚎×g‘Äaµïï¯ré²®F5d£…΀Ù<4æoXÔ IëÒ¾Ø@ø[B¹°y`Ñ‹b{]§n2¹-ŽÀX,hˆ¡ºƒ˜ÉdWr.õzhÀSÇ9?FÅ2»öÕ ÚŸï(•ØŸïµ\»ÿ “¡1{Àê¨ù»Âý>4$½×W•Séœ,ëmÙ– û´±š±š#š\Æ‚Ê‚Ž‚Õ·ºî—Ǿص˪¥L²Ï1eU{·,»ß«ešëg™²*¶Më9NìíLSÛ¯¸šåët3 ûe¦H_ðG²±Å«ô{2Kl®×¿h0Qʸ´£.ÿàygjù’â&æm£K¥×ÉñW۩–œ6¼ìKc;]Ì‘~J^¶3úJ ³‚o+sêÑ]«4¢oòޱ¦ðDͤ°Â@Ò ¥z výCAð5EÚdÙÒY»|Œ_í«näk:hªp&iÆ]¿/Ãæ=žH)ÅÁq½{ëú°’5VšxNæà\Àp°–:`Øèc†àˆÙý掭~ÆÜTwƒ°B=2’ç9›?l[[#•p¡Fw÷úÞ-õYLÔoÙÊŽâ3ã܇Íi’`Œl*n¥ QÉ÷ï-o,˜ðǦeΈ÷Eó¡hîMúP®©o>riLC~EûÙ6¤¤G:*—\SDÖxÄzj­gÍfÐJ3ÁÐÁfSÓñ,F5âE-ÓxÏVfáˆz—.¾þƒ°ÁM–b•2J¼ŒQ³û> ßÞÚ lxÈGÆâ­ÅðxæYY0Ù8R@'úBÜ~\Žòšb‰ ·‚Z½™‹I¬ËhhjK{¥{yÌgrc×ÑV¢6†„AÍ.¾Pd¿V§Ñœg/²ÕÖÐþ]g2óS‰PåwRV(ëö%¶÷ m×;M‰ƒµƒu)⿊QÚ-Ô~„.\KËlÒkA›Îaô0©úA\ uµV–ÓÆg‡w'È%ZñN¾äòD1ßn`ÏøRóƒåAHÖçí5ã/ã)‹­#x;{qŒCŠ•«žJŠ|E²)âÖ«¥P³í—Ò~:Þ˜Ïù9½¥¾1`(†yÔ&`òb¿óh²½‚)']û›–Ú óûäGeñ¯“ÅÆŒ÷ Mæø]8$ßÍÏßz““$«é›3R_@Á÷2j‚lâyú–Ìh²j†/QÄ÷p¶ëÂÏ#XNôp]ÄÂx.Uý5û°Mk®u;çjÁû¤Ê‡æìï¾ÌL©+ÇRþP{%Ìú9’áÓ—#"Ç-#ÇÇ%“­©Æ¤vû9Øf$ÌÚbb3ý±Död‚mÒ* íª§wÓ³ÒK¼¿™iL{0¶Ò_~½ˆraÀèòf;ðÑSŠÉ drúkeþë|.?Í]ж²üÎxHßСˆDQ«¯ˆJ+s«¸-¤—ß]L¯ïgòa%‡¼49È'’\üsÆ¾ŠˆÆÝÉœç^Â+ߪX1MÖ6±ZoÝ~uÀ™‰ëHáeÎÆPW•пpc”0ŽÐçÅhFÝC3’¥ºù7GArŽ÷ˆ× Ó¦ÒZÛ6úÔ”] ìÎ]ºƒ{ïjóÞÛY¨Ê¹ºÍ¹‹a´ è÷]BCïÈ”xÿ¹oº,('4O~ðª.MóÉí62†±'ñu@ðA£KŒÓ»‡—¯Oª}l~5¢~+¼Mô¼)AsY/¡I>q­­.Û ,óÍÝ8ËH“È'áìžf ,ù3äÍbÉŽûï÷gŒƒ®“Puô1¾Yòí’tiª‹ÏiÖ–ß\qœV S&æØÛˆ‚ì/š£WãBM*gS£¦'µ%¿|غ%Ø*Ìå[æ;êWiØø ¹ j^84+ÖÃÙ}Ø8UyTÛ×.,sûqƒ¡ê «½]nUO`·çÖçØJµ{Á¯Y©Ügù>™„>üâ‰Î—Zú…sðÙ¼ùuŠ%jó¨‰%›9wÚòGºE)áôЦNŸ<5Šfô†Rû »9 ­[Ø•Ý(¼n$lB¿ý˜­©Z†fßÛ_Ô¾Û”Ix¦'T¾ì+(»^>‹b³G¸üø ™úµÙÒTe%/b¶»¹×ªªªøÉ`,Eò†ðøS5ŸèÝ­4ÓEÃÌ–Ç“jeRZ¹ô;Ý/<#|êÈ ÍØ¾¿“Euù@žº0$08Cå[é#ƒOKöÕpÛüUÏ,FZÁ'Z_±…‚%ÅÑ@7ÎOœ+Ó)–µŒ„(´Ì_¥9›æcr&(ÚÚíö’+¡Hº ´ëÄ:bî­PÔÞY.¹nËØâ41'(ëé°CJçfݼùñႱĥgÞYø©\š¿ôÃ/U+†"CÑãqôΟt ¦<>¼øxÇ2T›wµ-'ÕÓŒG˜5šU]£d(«S{fŽêüi/àð“KZŠ€“"úŠŠ†ùÌ-íXØ&WÍ‹ªoƒzè:8 ST£ú UæËbFß[Egh¾±|óüÍ´Mo¾>Qb«þÔ‘8M2—Âôm}IǦkZzÞÌdŸB>é4%Õõë‹× °wo³XTG\ T™çÙ¨×Ý][µ½QIxc%B ËÚ‡èÌÁ5½…˜.Un­û31ÌӸɮKöξã^e/Ó7™~„¾„o3낸Ù;3þä^kº¾:åµ7€š›*¨H†)àD¨ïº\ñ£ñ ~¹Ù¡¡(ªÙÕmÑå.«ú×\R¢è(\{ýµåõŠ[–u]§òoÚáÄU§è|`^si:öSÁQ|¥ïX9$½FGI2¿Æ²­Ñ›8KaL¦K^†á…Öç§éo/Œžtûûý/k..Ë®Õ1ì™IqbÆ÷Ì‹¶Ë꽸,ñúy®­D¾ýÞ¸btï~ìB¼\wßuÚ¼hz¢wšc×ÂÇú8U#XQúÎÕXÊ«†}_í“A!¯7õ)›RèÕ${¢Ù±Si~XI©éÃyȾl?ýv›ƒŒŒëçkÏáél‚Ò«¦ÒÎýn}Ï–ÂË>*Q=²Éåeh›¿qb씄ªÌæ­“öš¦CZ”êü¦%a¶ïºæE“m«§¤ªÇ/-’æþÖ~“cLùþ½beGýûÍ¥r>D®{€9˜¬y(¥÷ƒ¡¼œ³*¼×ß›iwomì!Y¥/iÌ~0æ!«iE‹dÅ[ÇmÕɦRÉ£eÁÝóªmvÓýa!l]Ézn5okˆMB]äÐLo|ÑF>¦jáÃ|Öôh~7TÅ–âÁd¾Ê£á”±©_ÔŒêèJ™M†«FC†“ä.3âqÓ—à]ŽÊ–‡¯³í{+v«Éöze”A¥é’lã«••ÎkR}s„öžÎÀ'q¥‰e©_Ó‘® Ï÷_J{’z×ÎןôÛ€ãâÚf¾c¬g¨^Áë‰Yòä»,k`KdT‚€m­±3öúã¤VØ8UÑý[+Ú‘îO•‚ê†gu[–Öë­:XVýÍ&'Û×Èl¥ç-(Ù>Ç| ™í{žUPZöܽ| N|oÁSÜ3Sx±¦§wA(é«¢~ÉÈ‹ý°EÓ}®7ªHVÄoˆ£($<ü#¼çɱêŠ9u4¹1Ëhh¢9µoË&k>‘O¾ÏálÆ]ަó„4U‰¶úóβy×ÃÙmÇ×b/¨c'XMW!KD1¾îÛº°¢¹›Éí `öÜg €Ñx߯¥òI–]ÿE¦kõ†ÆÀô£ÉÜ»Åa·à¡èÈò'‰±j¥ºªØ^"òâ~µÈƒ]!Ÿèkܤºfô#Þ?kÅ[U·µæË{M+×X_[$â,sUø]+m8bîý˜Ò îíöZ’g¤T®ªLR‘‹ÏÞR²¦©˜´2hºt¹¢W U¤s¿÷RÈÕýŸÞ"ã,ŠŒÓ=«¬ôc<* ¼Q±ÃE©Î¯¿¬ÛOU òfó\åê²Pä¸òŒŽ×#ÍÙõKg%ïø –9 áEo¹öÞ™ù åR§¤ØõYÏ òœŠ"‹2u$7pPR^½IvÓ RïöEññ×ÔøšÏâÃTÝ:¥Š_ùø_Àc}Þ,½Œ¬1]ò][åõÃ4‡¯ ì©%§3jÜ(ýFÐÅàö¹¤øŠÄØ6Íkßµ—E$ÖnoEnrÒSZÚ *ÕöÈLŠ ý?,NÇöÎ ÚLJÉß!G+À¿¿†öJ •±]ú*EéçkS( ¤v9=Ï‚´ê«ÄÄÚùÙŠÂÅ;¾Ì&KE ßY1È&$—ˆÓñÆ|-ùº’âg _õŠX¢$Ùì¸ö¸¢ù›mòö6ì©–œú뾇æ²ôÎQ‚ššFå˜ZÔ…Â#xPa‰Mùe“EKšY¨>täqu2ækÕømÍWO›ïHL­VQgÜ¡[0Ré¶?kÊyex9(BÙKõ:å¬å‚>©(—¢½k¡Ö¦…åì´â‡Ðƒ»GdÎÔD{½ ‰8¤7X¶^bŠ¿ àØm%«âGxÒÔ¹šîó_ ¯+qYጺ?®¦Õ£L¶ü®€Þ*{È8ƒºlDI1cR?´Oærë³Ø÷ˆ‹úöÐ0VCÑ6´ž%F½[ýËF²krþ/ꑌ .UÜø.ðÙþêÜRü„×u=ÎÉÔÄÍ7ÄÚ´ýGv2â[ÓÅÜe^­Ó¤—â|³½ìH1z·üÒâ’ëá'u¢§fœ"ä¦f?W ‘ƒÌÞ èßþŠSú¾EñN…=aïQ¥ÌQÁ˜E˜PÞ j­ZdjT,˜|ÔòDwÇëGkÏs£¬M áØ §éŒl1•ÖRÚ¡º²è-¨ZîP VØ­8bî8ifš«¡í$WÌõ‰ÑUg9Ä‚oQ)µ¨ùù+’ÀLßJ+×YÖthd¡YêQ¦3ù º#OVAÞDùþ\F^€qŒò™ü›'"¥hbzýïªÙ%daœŠ÷Üo¦+(½«jæ]äºËšÑÜëÓðeíÖÝÚGƒÓ™¨œShšŒÑïNÝÿhð vÎð™<Òòƒ»÷ÚÑ1îö–þøIC—!Ëën\å6ß®µ‡­y¤ñ4áÀÆh2ÚDcÄW²ÄÔtu-xîÁÃÝ©€Ö*WdªÞ’89qÉÐfy¤ø[¨4MdQ{ü(o‚—epíÆÞ2ãò‘k-÷= &–$u¥ø¦&™ƒ†ä\.Hƒ~`×(ƒ»=“RÝ–f[„ht¥Aßß8”uY9R™Î´Ü[$[1úŽi²$zåð›*bŸ’;Ä„¿MnT'Jy6àüŽ©†¾Pˆ3x°£œ¦)IdVµ­ ¹Uî¼{%*}2ÍS&\xe–½ð£ÓÒØ@«$žQ‹¤3¯ûË©ÙL}¼ÓA‡Ùh5Ž’À¾ë?Þ¢gVs‚¢ÑmÚ7ñà¬.‚hp´çe ª›†¼ô)•ÁÕLð¢P¶íhJÁÂ%kAèZ‹jÏ`‡7Q(ŸâÕo ¸ó¢0séš=H‘L•vÉ‘c?%%î‘P½O•©½Nâ ;Œxì”o¤qC&<”1íãý§´ÎÕftÇGy÷Ùb¿$A="áû]>øRdÉÌ„±‚éîU–ÒîiJÆ®`=þ˜K¶HÁž 7ðDe/;1ð„d–Á$í5AAí|é îµV¼ý„4'Ä…(–£ü,dÒ]BB‘ji}¸Âäiâa¼Þ‹,I©ˆôó^÷õ›m(I³JkѬ ¾=øŠÕ³øQÖ÷}"1+™E=¹ª™Š$Òg_¥áí~ÛnQ=”&™0¶|ÐdØî¶š†«M=ï×ío“8ïóv³2­=å~¼,îTWó”>9lá¬ìsô}é›5aÒ4¦ eÓ5}Í>›7ÛbÇÝãåDU=Dž7¼…~ÝB™è€ðZ}|öºǸ·Q§ÀÂhè#M9Ô§ÑXÏ}Þ>3Õ]:ç²N'ë–„,¶Nà'¬N£àÛ/É4õO£D I«TÃì0æ|)mài˜ÐÞƒ0™)Å~yì‘ôÍîÏSB@¹<âÙ2%õýîÌ:Ü mÖyVÕ¤o¾ò7c¬Ã¾ù¡ÖâŽ|z>Ó7•ƒâjäÏjtà* Syâwíl}l×b­þ¢àkÛá1õ\a”;ùe¡±æ‹¯µÄô~Ò¥J)’hÞ©ŒôÞ|áû¬Ú¿v×DFÌ7å€o£Á`IŒOb.W õiƒ»†VnB•BDàÌl–B}Ì =Ðü"ùY6•ëÁ®7…=âP‚iœ ŽgF6pZIŸ èóMÀó¦e˜ï³ŸêÆwTès* §$±jh~ÐJ:~ª©¦ÁTê«0”ÓN¾9b%üAúÑ˺5(®ò>Ø×°U~Ñ 6ÅŠ³Æ¯9rs°Þ Ž‚žÕ;Iöá Å|­,݆¶3"¿â¥tâS"ץܻ¥x‡3>ÂÏMu™ó#g›ÛK~0öÞ´îüœ:Å[¹ +ˆL‡YFØ(Þ¤WìSUDn3pQÄóä£ëuá½6ç°ÍüqTJ¶ÎìåUpSW›XoWP}+ãû»K IËŽT)÷âÑFÂòV#¤r±FdXu_R„_¯´T˜%ÕÒ²zæÖ}°"þS7Ÿ£PR㎱œ>CÒQä]‚é¦t!âkh²¥[>¾ýl©ƒ«&~ çp{“rd­uÂ>¥Éåªòi²,õ(Z³|*íMÿdõ5ìíÛ Izÿðy]þÓGÃæ Àˆ%­Ý$¥áÖæ÷N8åB¶÷Säõø'J·'Àïîù‹~GKÅÓÌSM°H}¶¸SÊ\+k LÑÀß|²² lMЭq³ ÇØEZî]×Òëznü¨'ÎÀ`Ÿù~ëÑqÁGÌv•‹ÃŸuºZ¨d9cœÝ§siÞda[ Î˱E=)åZ®WXlÊÏ·\·xhRzM™YÍ;þñ~1ñÐïèƒæPôŒçÛ¹º•Pã6Ö©„M&ïÎC¥Úì¨^YÕ,ƒ.ÜÍÒ÷*!~æÍÏsÐòîú6ÖnšZštm%²R&;èåtÙ/ªíî1]Ž_#X'þYòÆ>ƒ³iÞ¯?Ü™Ä.Ï8z¸%¼7/sPùÍ1 g˘«6¼”ÚËUYî' íY¾©9òÓy(ñÃÔÏ,§¼ÒNè~÷›E”AÐ’¯ä7iºÛPI¸^{ß? ãij€b©!+ J ßK2(Ðæê’ðˆŒGÃág‘mú˜Oúy†ÿùQ\ÿ§£rm¬Æ=”Ú…±DëÅ„)Zÿýâ²t òVcCãyҒĬ¼ò¬¼„‡C=4E šBh >2ú®êé®V>ãÌO‹íD³ ]µüþÌŸ6ìƒ"…Ú-ÕþŽaøaÍ:û:8ÄÖƒ@¶YO©›e`šE–´3Q_Œ/F%¾zhßú¤8'/+#«ä¡fY×{™,ž¯«üÞ°Dòe°Œ<šmúÑÒöÍLÎ6ÔEî©ôÃDï“]‚rg}Ú·7SßÖ¢¢,xô<€æ¿§í¼ÒlQçð \”µ`¤gªÉ¡"—³a]÷P’{2Ää®H4F¶âdêë(|Ÿ@µ9‚g‚©¿Aà ¥~Ÿçý;仯Yà ÝV&1ó“Ù(ñ¤X4Ct÷OÕ´Jy?úžñãÒÈY{L†ÆvßÌbzQ’“’—p+ôr„™&NfRÂ3OÖV0Àó‡tL]¸ss®c3¯ðãbkïA3mqF;ñŸ× Óû g DR±ºÃ†™„½z ¶¦é _õø^Z·Ên ÁKìqc'pŽIðXê–¼…ÿÜeZÅŠé[eŸeëûg—rKª—3ÕýÍ ŸJÊA” ä¶>=Kð Žï¾nQ”{/Ƹ—·è&ÓQ-1.UíçT³šo¬Š£ƒWX”ø2^’¯~[ŽÄ2üü¥‚WyÒ_ÎÓÅ&kLHo¤m„ø s!«‡º?nIM°¼…=޵^7;^Ornž÷f{™w@‘Òð#•9;_—Rò-«—€ì3ZÏñ©~L±Ê3¯_{3·”jqEì'úåÑR^qæ±þ*ÃsÇvÍh­ˆæLšÞÞºÙU­²˜ßÓ~ŸÊÆ0½(£<(E 7 é¥oÅx$e‹ µ|Ó-)UÈ#iWU/CÍwgÁc‰Dóh¤x{ÈÉýſٯ÷åiÚ½øU¢5ü.é—H“ÜQÚXÕ;YÕ›ÿÿ²&_n1uÌj{àWJ\`Y®‡ÊQzy*Alú’Ô2R`ç KÕ¤à7òk¤æ‚[éÄ^ŽŠ¯¡ã?FÖ.a`!!¾ô“Øê9Ú@ìæþ'¹¨œæw^ð0_šª^J|³ú §U\#ø‘úFI7!†[°U Ÿ'že¼#‰û)GJ ¡‡²âÎi©GÄbI5“}ŸDô€^}‡DŽØ6gW{¨=É"ÊkÝƪ.òÀ­z¬dû0B!mbOGìdÏûônëõ.G½^üZzX·Ã÷\‘„pTèv~ðÈÆ¢‘§¦»<”@6Ê~µzg•ÔþòW~œŸ<‘è³7´p)xw9’‰yŠ@vÁÉ’A¤,D®»·íß~c±÷ ¼ñõ±Ú©ø&¦žÞj×%ï«·ãÈ$(ý1#ݨ)~7SPFƹ2Ù[a’“|³ž„•݉sz1Ã&àËÁ,õøa?p9:Ú.|ððŽdN`â$Õóâýàýè}ÎèhÒéú¯«ð…6ŸÓë«b#ù\¢õ¸ë09Ð8Ö÷\®(®t¼Ú#Ð+Ø&d·L'YDç[.ô鎅Iá•Ülöôä!™ë¿Ô*dg"ÄQöÁsÅÎÄmúlÇMILŽòx®ËIDLð] ,Iè¡:bW£§›´õ™8w9‰˜ ¾‹…u! qª#v5ñkY¨>ÒH¸`Ä= …Éàb‡+tS½ë’ÐÍ©Kü—Ñ‘™îx$\ò¸‚‹¦N…~ú9wDZC'#aZ½ŒØá q©O" qÕø_æGAÇcÒò¸äŽË¬^~úvý†ºü¥šj&:—“÷¸î+i¨ÄÞæW˜$.Õ^ÿÌVuz¼ŒK‰{z|ôóò’ü·K?pã ‘‚5ÑO.> 'ÇÇ*2”Ë9 ˜­(v¬Õüæž—mqÍà»ð1 ÐÊÜKÏñM$!nõuø®fÕÇé3bçÑŽ‰{àÒÞÊ´{) ±óƒ­_"QGìjf4_ "a~ÝŽ»ñã(<©S&s£6Äè ݰ —欃Ut $‰®üÖ"löæÌ½PU?o…7OÐrò ( Vx4rJ†iÔ¹8Çô]u!{cöÄļªÖ.sH?I&_Ñ1¯(VbäbDÛûŠM 4œ2´1Ü4Œí½¶¦d`ð#Jæ-ˆØÿ©ì7àèéÀݯùÆncƒ4‚XïPi(à=Dm~£¾) ÉèÆLâ¡,ͼ-u¿@2#8¹[U^î]G›€'‹~‹¾㵬(™¹üÙë‹æs´Ï§e†;Uûâ´ŸŽL-Úîý\*ÆÜúþðQiXˆ¯íì<Íe¼¡¬¹¹ù;/"+Sg&îÐZwPàß’c·ë‹ëÊ‘ §ìÇ ÿ¸:dv¬gD©I2Vór–'“^„Ësuâ>”¾Ñû®Éã?FÕ¬»R¡¹yŸéЧ~é.Å#¸ç[YÓÈŠ{·ûΙ;<Øk/?_"н|etëû;Sº\e‚aR.”vJ³)âË\›ý nÎc”qœ¨}ëýTŸ­GÙ˜¦ *ó”}T)è^ ê°!U˜è’\^ônE¹üxôéCd>vÔwü6—7„ÞmñQ4ëN\:à*u1rǵ×§y3E„ÄoôŽÏUµ7;¡Ç|ÓûEófüv\[Ç¡”ëµÏbDr/ßä:ì>L‡Ç½!%YÂTE‚Žc&|øø3žá½iŽi?Þ&ÕwUw›×®Ç Ë!ÍX{kê*@ËE˜•Ô;>a8Žhšë{<þDØ!ÿkj|5æw¹®ÒdÔD/œþ´& ”Òøžç†#?½åäâÛØJp~êࢹoð >FVôPL5hz²ŒƒÁТr?{ˆÒ–VEÔ³#;Z*Ñ;1Ÿæ–à“×¾?£¶´mi³;ÅÉÂ…*ø«eÐFÛCÒIÝâ r(ž?4>GÉÁyv¸ås¯m&Äp?<4­¬ñÁ¦Qld‘g$n ¯sjµ+zBz· ™1¬âU„õ•O~Ù¨åÅx¹Â ®Ý#ø 단Ùb㜶¹+F½ZÓîoÜ48’qæúÈìëú9k¶²í×q…ù©ëT¶ìL?³g§þêíÙõö;{A½ñ¬–9þAÀ 2Ž^é+”¤"½/<—")i:¾; ãG'ù‡¹\bÄ'©Ò|…“P¿fÓ¥QõôYUûõþ[ÑD1„cŸ_¿ÌEfŒ¸C'ž‚‘›žô.æšHZë¨óê ò€FCæõJäN9/ÞÃdŸëY÷v=jø$¶>_N{~‰mêˆ#tõA†\h‘ùÖ¬}VñUÖÖÊ2KPËOú÷ï‹”õÄb¢ÕÊ̱Ía^rù)WÔzÐÞCʆÉË|Ç`YßÇ3'Œ•Äûð n EƘYu(B»,Dʰ¾ ¦‡4ƒ¯O3E fü¢5Ôv—]¹÷vºîé#bÛµ;ò!o…aè­[†%ëEÛ VK¶ÜÚÄ-†¬v®Ó·jªãŠNw@Ñ«Žð<Ì߬QUpÄμ‚$`˜ÙìÔ€µ®šù¡DQêdCÙdŠ Úö£C–±ÆØ¨Gh>“82Þpm•~@ æüÖüV4åIu²F5]ÿÔ;¼¨¸"2-¼ä© Û-„ÛŠŠn­††´({Œ=>2=¼LºO”Z¼«3ÅB{”zµ¯§¡ý˜›­tzùp[WÉ/hi—_£‚n8ksžÍ™Zγ–Ïm‡:Ñ™øŠö±Vùù·O¾ä5dÂÜbG›%õ$ˆ„{åöC”²G]³—µ¼l»‚yoŒrí¯EÜ›á«&ƒòsaxìèTï™7ŒÖt‘ %J$‘¼*•(j{hm5Ý®;[c tvž¢ó+ñܳ6+矷=ò1ì¥ê®Yc".¢;º…‚#ä,‹YÉêÉÆ54µãWÊV™Nj¨.~:4Ui‹&Ö3áôºqÂÂÙ‡úè ×RÞS¡ÚHš–âù¡ «Ã…\ké|ƒòÁ‚ˆµgC›¯ÁÝqßkêI÷ô_ƒH~\C1r7¼™]OQü Ò…`JöûÝ’KÜ×£ä–AÓ[î%´ËÀÛ•wÜÕnWbg¬d§}|„yXx¹³²VÅjµH}W®ºAÍbVÍôóú[õÝ^pøá=.ö ÕÈCß…ˆ}iZqgZŒ݈I¿H‚Ý0fÉ#üÃÝ$ÀõfÙ •_<o¶0¯™±®3RxÀLjݶ­Òåw~ó0r’Ýù©4ìðÞý, »ú» 6»jô™»J×r^ü¨z4š¨)üó°â}dÅ(e.ñ Ý_95ø¿{æ_;ñ_?ø’í_BÀL6³ ÌÌÇÇ_/V Óñ‹ùãó¯÷§ýÑG·#Îýó«/â‚Ϙû×{ÄÄ9Äx6øØ_s÷aeý£/ä”Ö¯#â¢ý,}Ä|„yùzæq ýùA˜¬„ ùwÂYYΆÚâS1Clé;‡S"¾›·®s±Â®<t¾Ë'µº¯JQ‰ô΂»}=Gu“þÕÎâöœOú X·”ïØªÓ¢@/ߺ…£ Ãü@p›ƒiËiÔ¶•skÈriÛ¤†çŠ’ü\¨¬äõ+z?/åÖˉ]výØÊ›Þl1×F2×ЛáQ¸’s_©ÊÓÆ=­ü¹ù*xuöýÓlMzÞ A.öO¡|o¥[xsò›n]9|åj`j>Xi¶Ôº-cËÁíN)Ì1€Þã*€•êùÔM´þHé:é€K—ì·Ò gp„ˆj=9Èøzyèk~ª£s˜36î¾.˜Ä¨}éã0;ÞUàîã°Ù$£À@â4ºdUè.s´ÕÕ-¼ÕcR4¸×³èñì^K¥Ãö`EÝb…Râ­;êOSò­·”· H›;ë¾{ã_UçÃ+nkÇge"Û'±\Á¼…-~Ëù¹“æGÝ+?QÍ;\神³W“f0aüvC®5~é%#œ ø„sßEãDÞ¹ÚOº$,¯.ÁHF8¹ÎZ}{£ÞõÖÛ1BÓŽ:´ 5Ðn&{Xcs†»GC¯UïJd¬©|ì'­ùþO0œHÙ œ¯hM×P“€`͆ÁÛ¬©¼!©VM«aášíá”4ñ—£Ö˜ ”-Ç>óÅa"ÝÓ½+¦ÜðãÙ|—Ñ’žEŠ€$v† :ßĈ4qR>éݰC­ª~{¡VPœtŒ‘ï¡CÓ€ÏGnÍȵ¿ ú_OsCÀ¿ÿÓg9 œ@€¤™¶ÉS{c S£¿Ìb@žÅ€Ìð<ÄÆ €Yÿ.‰A Ð³IŒ‰éOOo²þM;ÿôfÈŧ7333ÿ»æÏkèÆ„åþsç®å'tä–·viJ!7àAÒÍ.܉*çQ ¡ÒÌý—a[“âVƒœÖ“AN?ÔŸ?fr°ð“ï'Eg³¢Ü2h•^6Ø/â¾XD16ÔŽ;§8 Y ýBM¬‚»œ¹îK”›v]‚Ęñ“šæ~55oB$ á[‹Ö’-·ÊQlþ¡we—½×“þÂîlÿåCÿÊÒ õÀ?ÌGT(Ë_¡å…ÇC.<6˜íÌcƒ…€0Ó RÂÍ >y°¶±XOÛÀp˜>yëÓ³ð”Àtaræ3“ÃySƒs$/©a ­io1f°œ:£”…©æSm+%F)!Fm;+øi˜®6ÿÉAääÀ§r–ðœ~¥à=à; „k³œ0ùDÛÒÔÚBSÛÀvò@dqm-}"d ƒ¡` ³ £°…©µ|š§p³ÀL,ÍshÚ3ò?…‡Š¾¦öa>F€Üd\\Œüpià$,Ìç‚â|Hÿïü𚘘ZY*¡1«À >9°ž '¶“ñY ¦“ðäÀ|rNfÌ:™t2 èÄO@'³°œÌÂr2 ËÉ,pçúßô=§àÿsûCA'²€þoœÏ›Èòÿ8«,,ÿGÁçy„ü?ÆãÿÈÛ9Ž˜Ï#ÞÿÇ#òüZ‹íÌr‘ßÔÎz‚ªp2Çä¨ ÏšÌð‹åüHæ_Á¿²±žÂ¬¾•‘6‡ <©2€'GÄ‹ÊÄÄ*ÈÄa¿à ¬ð>¬ˆ÷pôf´³BOÛ¹ŽÓ³•Òq°Ï&è4¡ 3ÁþÁˆü ú|ÇBœE΋(ÈrÙÀ'Ït¡òÚ¿Ð;øo¬§ìž‚>äÐÿWâIg£ë²÷ÿ³þV|N¸i!`ðŸ öħ:`9¯ƒ ú?ÐÁYd`:“A¿ác™ÿ\çù„CòŸ²×)Ÿ'v;“˜¡çÓô?IÌçy> ¬¿õÉü Ø~gµ_‰ù} ~þ{³°ü9iüI– :¿‰¿ r& ÿ&XÏåè_@ÆzÊ5äŒÿq0ØXXÿ.˜. Â…üü/ Áú•BÑÐÓ è—ñŸ²yj°?> ž`1tÆhB¿ ö߉:îW_þ A9Tç@æ"˜\Èþzƨп1ê÷ƒOk3Y‡þøK#Ìç²Ó/í@N5óçòæ·$'å |ù»òæLí'p^Âÿ¤xýL žÑºÀ)Äý+Øüï!òg'f;ñŸŒu¾T;îßÉÂòŸÛä¯ç áß¶ù;{€XÀ¸3ö€²µÇù²á¤åß q&Ý‚Ï¬Ž•Î|ÖõO1Pèô<ËÂ<ÂüWë©cæOè÷.0]('þ‚{Ös`>-Þ˜~«òG¥ÃÌtnµw¾0øwçÕÉrþk%ĺÛíÂZí÷bpfµ¸&!¦¯e©8¹îsra餸œ ,xzщé×EŒ¹YÏ®þÏ5{|åã žõM ùL-´´-Ž—úpážhkZ)±!ld@|Õd`ÙÀ*Œ¼L¡i¥ojÂø”ñÙÄ‹JÏÊÊŒ‘Q fÁ`¤obmÇ c¡­ ß1j™j2ZŒ͌´-õM´´íô¬Œ¨áüeñüeÿŒEÄwS D–DìáÙŸÊÀªò¯ w–èyëüçD„~e…—*ÿÚ—ÏeýŸ‰"ý" 7 Ûßýýg‰ž¿"õŸ…0Á‹¾cRp²p¢lÌ Ð‹Dÿg‰ž¿ìúŸKŠ ôKR(ËßÈù+ëœ% ýß<Á;œÐ)IV&0ð"Ñ?'í3TA篮ýÚ=¦u¬]V +³Ê¿® Î=Íö¿õ˜Ô/a™!  ‹dÿ\‚ž¥Ëü ‹ õ‹.|Ïòwt•ñgéþ vLAë]xè‚ÿŽî¯eÎYºÿ2AëĸðØeýãþ•GýoÈtBAê„(½UþõŠý,Ñÿ ™Nˆ'Šc¢ðØeû¢¿®fœ%ú@¦cR'D¡ ßÈô7WƒÎýßé„(‚Ô1QøoHþº~v–äÿ†Lˆ/˜~¥7KôÃ$„r„~©­,£Þ_ùÏýß áŽp`e!ü…‹Â…ü6ë¿~–ÿ @ ã’…W1 <ÔÁÖß8øï3+鼸ü0+˜‘©îII{ú%#âKò“ÚPé¤L=žÕÄÚÈèdǯPeõµmµ-à%³Ž6âË#mKø”ú–fF0{SÍãzýø $DÈ(im¯'•òémFa&ºTÚ&ôÂ|ÔúΙé¬^ÎÔü,'XÎ-qÕýÌ‚¿W*Çë¦?_Ýùc™þ}u‡òûšõÉÒÎ)¿…6ÌÊÔâ”,ò{µöëÛóÉ÷éZÖšÚ¿úýºvüëÛÄçãUì¯ë§¤™Ϭ!çWÆ¿ÈÃõ.³Ò¦`G܉ȃ´L`J%õ…ÙÁõÆO|`4¦?6xHƒáÉGpÜÁ‡ÂáøŒ à~@è…6+èbÛ…63ä/ÚW³ÎµA—϶ÁyaežïÇÂd:߯2Ø.Œ…²\à¡Ùóm(r~,+ù/úAÎÏd‚Æ2³— Þt^@ó_Ðe§ ïw‘.\õàscY˜ÀÌ,ÚXÙÎÓ`abcºØÊ|^xøŸ×) ¾@È 9o7 ˆíâX0Óű¬ÌÛ ,ÛØX/¶A é?·13A¡Ú˜¬Ú@çíoƒW¾ÚÀç}ÞÆÊvA^¸‰.è™ ¼ gx5tQ÷ ó~°/øÏpëÁçý Þ9«`fÐ_´A/ľf<ïW¬6Ö¿hƒ2Ÿocc:ïWð6èy‡{ßyabƒûËù˜†·]àŽaçýÞÆÆzž6¸ÍÏë ÷|?xÛ‚ÿ¿à»?os8Ðy¿Y/Œ…·×¼ zÞ¯€Ì èùxƒ«å<ÏðЇ2ÿEÛ…ø€,°&ð½ VÖ cYXÙ.à ä/` ëyAbf^ÀgV0ä|\‚XY/Ä^òŸ×)Ât'ámpG¸Ðv!ÁÛÀ,èB çs¼ ¾ /ð|„·˜/ÈÆ¾€ 6ëÅ6èEÝCçs¼…ù]¸;_Ð3Nø¾0]Äl&æ‹ ^—îXgôgeÓ7Ò¶@T[Oõ´uãSSÄeÙÓBMÄDÇpR–!> ”|Bp£²Â០bò €Ùxy…x|‚ðt"$ÈÇ…öï» .rÂëD~=mMCKkc#+/`áñ °ñ ùÁ,l¬l¬l@V´ã{]aVÇ%\V … ¤Ú¥¶ÿÿÜN®H3hêèþ¿"TXXXŽðíܯÎY/Á¡öðR€x Žû` Ë%ÓÿûXú½Y#"¸dÙÕïßÿÿÑ ðôØš¦&:úºÖÇk'€Ó:¦q{>˜¦¡µƒ¥€‰„F†FÑ;>g ³˜ê¬ôô-ûÃÏÀ7¯‰=¼ÍDÓ±Ò¶Àdx}]S m-†_½ž™˜[›Zik¬`–˜‰ÀÒ †ø&f¡ý«3@ÛNSÛÌ ³ØÂWòKm3˜bmiùÇ4‚ÆfVö€“•ñ™‘œ‡¯agNó{Â=€ÃPÛ1-;€C Þë÷ møê] !\ˆþ§=éN;Œaö m¸h–úǾ o¥m 0EÈz,ý)£p Œô-µdj¢è`ljqÒû·":¿f…k鿘€Á™ @¦©wBnA+˜¾ ‚íSUÃÎç %@ÛDÓÈÔ>§¾É/ÆmçŒ ldª3‚ÕÖÔ×Ñ×—<M­­Ì¬­ŽŠÊ’`«§W·&be·-àx ¹´ž°ÌJa+ø`JK½&#=ýIçSÖpj'<±m`ŒðŒgœ›ÞAäÔ™³¤e ×¢ <™kÛ!Ì ouø0!¸Y´íŽ£è/Äü;rð®p3Ÿš0Ä´ÖóX™tµM´ö?ÖBFx¬:ݯ.ðùMmàñei ×Ü®V÷Ђ!þ A  qÊܯè'š¶ü5Âb¶úVVpïÏ# +€™Â½ÆV®E„:OF3œÕ ,í-ŽwÂͱ™¬Ó$%è¦ðq¶úp"’BBµ‚èÈŽ8…AÁœy­ã{¥ !ÞœàÜÛw9ÀÞÑRS‹‰ ~¢ „,ôµà’œFª¦©±1 !‘ ”Ùÿ˜xÌÙ±ÕÎ8€•…¶ö©£"À©£éÀãôÏ}ŠÔ45³?U(Ý/¨Ò6²èX˜ó†píߪÿmNºcõž²I‡˜¦¥uªn¸jZkÃCû˜õá1l} MÚ–VÇ``biea­yð¡JpNÙ>æîØkaffÚp¬M´àâë[ÁÅqÛ̱Hpµ™ÁÆÄ `‚«Se"¦A›µ%€QîfŒðÿp®à!ÙÚH !€þIß?NüVÌ™YàÌHšüá@ÇŽwxFm+MF#ãßcèNœëœrÀhd ³·<ç·aà¿2 ;€ò„}MF-F1xÓÂÍc0¿%#¿%ÂòâÚ––ðÈA@Àøä=‚ˆÖÉåX¸œ:][X›ã+ÌÄþWˆ˜jXþÒ5Ü– „¥ž©í‰~͇ðK˜¥áIøšÁíg 4Aøâ±ó˜Xé›Xc¼4¢ÁÜZßê¢|§lÂ%z¤O€SØÃ]ÑTß„ÓèÂ]ôàèlSÓ_H`yÌç±›ÙÃîSã½<ÅÙèkYÃ{ž‘åð%Ϥx…dâƒ'œã\‡ö“$;3Œá¸0<Åm„ÃÃàZø%'Sç#¸àžñGî<—3„Ð-é_Ú‘”î+aìÖËŠÿEárRNœFœš%"r4áú6Õµ€Ÿ0*ˆäD,¸©tá!tB›ñ·nˆ`'P!Zà™èx¢“ù#Ã~Ï)~RwÀõz¢VÆ_ìHÁHÛòwǧÖô²'Iöí³¤Ï¥x)¸Î̳ý¢ÿ{RÞ“òp±†BÌayjC„ŸÌe¡ 5 mÆZž™æÏ÷ÇFÃÈ`dª 3b´Ôƒ'RÄÿÀ+cx£±©<ðaŒ:ð©tLíiùá¶Òþ%¹ Ý±ÎÈ.t’ÙME.âÔŸÅ5†ÅqåáâžöûoFÌòØé¼ßÁñðoÝî—¼OM`fp„Ã⢕˜>ߟ;¾PrbíãjKG„³åé@¸CÁë1„[œÆ"Mž–•ä¹ãÉárêÿžû¸¼GT§éóLUlõ§ró/ÝýûÇ8ó{Ò“ >“…©œ±§ð‘0 xíq̹–6œ€‘åq¨è[ž/Ó~U(ÇÔ•ÐÚVp°×ýƒÚïäó‡EÏä D"9}œ^N3Ññuœ_BÁá?…Ὺ0àö×?EÉãyORâï®Å ¼n|VxR0ÓƒçxëšÛèkÛ‹+Bgš¬ô¬5LV>n´Ñ±ü{€iÂÓâ.FÄ_™[ê©IÁ븸ˆSð‰5õ­ì-áZ8.×þ7§L`ÖVúFÖ–Ç'6FÄžæO'-á…¸ýé”ǧLÍàkFMÄ4,¿?›šiþšøÏâÈê[XÁ‹>8qqxÄZÎů¶ ýÂ.v ƒ–>Ú_Ä6;ÈDÇÄtü:k?9y)5x´ÑVC‡ÿ”¤Çˤcá£/ÀÉHÒ? ü›´3 ŠßÔžiX›XY#®¾þ+Ÿ±´°a<#óÉ FÄÙ¿ @û]rIi#bBÛÄÒÚâ$žOÆlNfƒ‡þ±úúú' íW¹hf¡ozìð¿â¬'`y}ÄŠí²(ÿ¹ ûÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöÏöØþ_¹ÓÀ]èdar-2.6.8/doc/samples/cluster_digital_readme.txt0000644000175000017520000000057613476543220016703 00000000000000What follows is an extract from several email exchange with Roi Rodriguez. Denis. ---- "[the] remote copy feature needs to use a ssh authentication method which doesn't prompt for a password (in order to make it non-interactive, so you can run it from cron. It's no needed if someone plan to run it by hand, of course). I've added this comment at the beginning of the script." ----dar-2.6.8/doc/samples/PN_backup-storage.sh0000644000175000017520000000015513476543220015300 00000000000000#!/bin/bash dar -c "/mnt/storage/backup/storage_$(date +%Y-%m-%d-%H%M%S)" -B "/root/backup-storage.options" dar-2.6.8/doc/samples/README0000644000175000017520000000011013476543220012307 00000000000000Please point your web browser to the index.html page of this directory. dar-2.6.8/doc/samples/sample1.txt0000644000175000017520000000445513476543220013552 00000000000000#Preface #-------- # #Here follows a sample batch file submited by Henrik Ingo (Thanks Henrik ;-) ). #It is complete for backup but does not use conditional syntax. Over comments #(lines staring with #) all commands can also take place on the command-line. #Thus, this is a nice way to discover DAR's features. # # Denis Corbin ########################################################################### #Execution file for dar (Disc Archiver) #Sipmly use 'dar -B thisfile' to backup #This backs up my home machine #Where to place the backup (somewhere with lots of space) --create /mnt/win_d/darbackups/my_backup #General settings #size of an archive (one slice). 650M fits nicely on CD-R (and RW?) -s 650M #compress using bzip -y #verbose -v #Files not to compress -Z "*.mp3" -Z "*.avi" -Z "*.mpg" -Z "*.mpeg" -Z "*.divx" -Z "*.rm" -Z "*.wmv" -Z "*.wma" -Z "*.asf" -Z "*.ra" -Z "*.gif" -Z "*.jpg" -Z "*.jpeg" -Z "*.png" -Z "*.zip" -Z "*.tgz" -Z "*.gzip" -Z "*.bzip" -Z "*.bzip2" -Z "*.rar" -Z "*.Z" #Define directories to be backed up #First give a root --fs-root / #Then list directories to back up (relative to fs-root) #If none are given, everything under root is backed up #If something is specified, only those are backed up #just/give/path/like/this #Exclude directories/files with the --prune option #--prune not/this -g etc -g var/lib --prune var/lib/rpm --prune var/lib/urpmi var/local var/www var/ftp usr/local -g root --prune root/RPMS --prune root/tmp --prune root/kino --prune root/Desktop/Trash --prune root/Desktop/Roskakori --prune root/.Trash -g home/hingo --prune home/hingo/tmp --prune home/hingo/RPMS --prune home/hingo/kino --prune home/hingo/Desktop/Trash --prune home/hingo/.Trash --prune home/hingo/nobackup #Be sure to add quotes around tricky paths, or why not all paths... "mnt/win_d/My Documents/" -g mnt/win_d/text/ #End of file #Use something like this to restore everything: # dar -x /mnt/win_d/darbackups/SIMSON_backup -R / #something like this to restore something (etc-subtree): # dar -x /mnt/win_d/darbackups/SIMSON_backup -R / etc #And something like this to retrieve a single file to temp # dar -x /mnt/win_d/darbackups/SIMSON_backup -R /tmp/ etc/httpd/conf/httpd2.conf --flat #Really looking forward to having ark support for dar! dar-2.6.8/doc/samples/automatic_backup.txt0000644000175000017520000000651113476543220015516 00000000000000Let describe this automatic tool by its author, Manuel Iglesias: (extracted from email exchanges): ------------------------------------------------------------------------------ To make it easier to use I have written a shell script with the following features: -It assumes all backup files are in an accessible directory. From there the user can copy them to removable media. -Easy to configure different backups: Make a copy of the script and edit 'BACKUP SETUP.' in the new file. Configuration file 'darrc' is not necessary. -Mounts, makes a backup and then un-mounts filesystems: A mounted file system could be an external H.D. where the backup files could be written. -Decides/recommends which backup mode is the most suitable: Create FullBackup, Rewrite FullBacup, Create DiffNN, Rewrite DiffNN, ....... -Fully automatic: Use '-auto' option to use with cron. I have studied my system (I am new to Linux :-(.) and cron only sends mail if files in /etc/cron.{hourly,daily,weekly,monthly} exit with code != 0. I have written some shell scripts to handle cron jobs. -Different backups can use the same 'Destination' directory: Backups are created with base names made up of the shell script name: Shell script 'LoveLettersBackup' creates: -LoveLettersBackupFull.1.dar -....... -LoveLettersBackupFull.N.dar -LoveLettersBackupDiff01.1.dar -....... -LoveLettersBackupDiff01.N.dar -....... -LoveLettersBackupDiffNN.N.dar -LoveLettersBackupDataBase -Creates and keeps updated a Data Base file for later use by dar_manager. The backup mode algorithm is the following: -If there are not FullBackup files then create FullBackup. -If there are not DiffBackup01 files then create DiffBackup01. -If the sum of all DiffBackup files is less than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite DiffBackup01. -If the sum of all DiffBackup files is greater than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite FullBackup. -If DiffBackupXX is less than sum(DiffBackup(XX+1)..DiffBackupNN) then rewrite DiffBackupXX. ------------------------------------------------------------------------------ In my last E-mail I forgot to mention another condition which the backup mode algorithm takes in account: NrOfDiffBackups. In the copies of the script I sent you: -If NrOfDiffBackups is greater than MaxNrOfDiffBackups (set in 'BACKUP SETUP.') then rewrite FullBackup. In the meanwhile I have decided it is better to rewrite DiffBackup01 in that situation and I have modified the script accordingly. The backup mode algorithm is now the following: -If there are not FullBackup files then create FullBackup. -If there are not DiffBackup01 files then create DiffBackup01. -If the sum of all DiffBackup files is less than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite DiffBackup01. -If NrOfDiffBackups is greater than MaxNrOfDiffBackups (set in 'BACKUP SETUP.') then rewrite DiffBackup01. -If the sum of all DiffBackup files is greater than %OfFullBackup (% set in 'BACKUP SETUP.') then rewrite FullBackup. -If DiffBackupXX is less than sum(DiffBackup(XX+1)..DiffBackupNN) then rewrite DiffBackupXX. ------------------------------------------------------------------------------ dar-2.6.8/doc/samples/etc_darrc0000644000175000017520000001176413552573443013326 00000000000000############################################################# # This is the default system wide configuration file for dar # # This file provide a set of options referred each by a target # name. They are not applied unless you specify that target on # command line or included file. For example for par2: # dar <...list of options...> par2 # # This options set are available automatically for dar unless # you define a .darrc in your home directory or use -N option # on command-line. # You can continue using this default file even if you use your # own .darrc file, by including the following in it or explicitely # command-line: # # -B /etc/darrc # # In the following we are using short options here because long # options may not be available everywhere. ############################################################## # target: par2 # activates: # - par2 file generation when creating an archive # - par2 file verification and correction when testing an archive # usage: dar par2 par2: -B "SOMEPATH/dar_par.dcf" ############################################################## # target: compress-exclusion # avoid compressing types of file known to already be compressed # or to have very bad compression ratio # # usage: dar compress-exclusion compress-exclusion: # here we define some files that have not to be compressed. # First setting case insentive mode on: -an # Then telling dar that the following masks are glob expression # which is the default, right, but if sooner on command-line the # user swapped to regex, the following mask would not work as expected # any more, so we force back to glob expression in any case: -ag # Now follows all the file specification to never try to compress: # Compressed video format. -Z "*.avi" -Z "*.cr2" -Z "*.flv" -Z "*.jng" -Z "*.m4v" -Z "*.mkv" -Z "*.mov" -Z "*.mp4*" -Z "*.mpeg" -Z "*.mpg" -Z "*.oga" -Z "*.swf" -Z "*.vob" -Z "*.webm" -Z "*.wmv" # Compressed animation. -Z "*.mng" # Compressed image format. -Z "*.bmp" -Z "*.gif" -Z "*.ico" -Z "*.jpe" -Z "*.jpeg" -Z "*.jpg" -Z "*.mmpz" -Z "*.mpeg" -Z "*.png" -Z "*.tif" -Z "*.tiff" -Z "*.webp" # Compressed audio format. -Z "*.ac3" -Z "*.als" -Z "*.ape" -Z "*.bonk" -Z "*.flac" -Z "*.m4a" -Z "*.mp2" -Z "*.mp3" -Z "*.mpc" -Z "*.nsf" -Z "*.ogg" -Z "*.speex" -Z "*.spx" -Z "*.weba" -Z "*.wv" # Compressed package. -Z "*.deb" -Z "*.rpm" -Z "*.run" -Z "*.sis" -Z "*.xpi" # Compressed data. -Z "*.7z" -Z "*.Z" -Z "*.bz2" -Z "*.cab" -Z "*.gz" -Z "*.jar" -Z "*.rar" -Z "*.tbz" -Z "*.tbz2" -Z "*.tgz" -Z "*.txz" -Z "*.wsz" -Z "*.wz" -Z "*.xz" # These are zip files. Not all are compressed, but considering that they can # get quite large it is probably more prudent to leave this uncommented. -Z "*.pk3" -Z "*.zip" # You can get better compression on these files, but then you should be # de/recompressing with an actual program, not dar. -Z "*.lz4" -Z "*.zoo" # Other, in alphabetical order. -Z "*.Po" -Z "*.aar" -Z "*.bx" -Z "*.chm" -Z "*.doc" -Z "*.epub" -Z "*.f3d" -Z "*.gpg" -Z "*.htmlz" -Z "*.iix" -Z "*.iso" -Z "*.jin" -Z "*.ods" -Z "*.odt" -Z "*.ser" -Z "*.svgz" -Z "*.swx" -Z "*.sxi" -Z "*.whl" -Z "*.wings" # These are blender bake files. Compression on these is optional in blender. # Blender's compression algorithm is better at compressing these than xz or # any other compression program that I have tested. # Comment only if you use uncompressed blender bake files. -Z "*.bphys" # Dar archives (may be compressed). -Z "*.dar" # Now we swap back to case sensitive mode for masks which is the default # mode: -acase ############################################################## # target: verbose # show both skipped files and files being processed # # usage: dar verbose verbose: -va ############################################################## # target: no-emacs-backup # ignore temporary files or backup files generated by emacs # no-emacs-backup: -ag -X "*~" -X ".*~" ############################################################## # target: samba # take care of daylight saving time for the samba filesystem # type samba: -H 1 # samba filesystem need this to properly report date # and not lead dar to resave all files when changing # from summer to winter time and viceversa. ############################################################## # target: dry-run # an alias for --empty option, that get its name because the # only available option letter was 'e' that leads to this non # intuitive option name "empty". # dry-run: -e ############################################################## # target: bell # ring the terminal upon user interaction request # bell: -b ############################################################## # target: full-from-diff # rebuilds a full backup from a differential backup and its # full backup of reference # usage: dar -+ new_full -A old_ref_full -@ diff full-from-diff # # can also be used to rebuild a full backup from a decremental # backup and a full backup # usage: dar -+ old_full -A recent_fill -@ decr full-from-diff full-from-diff: -/ '{!(~I)}[Rr] {~S}[O*] P* ; {~s}[*o] *p' dar-2.6.8/doc/samples/dar_par.dcf0000644000175000017520000000140313476543220013523 00000000000000# configuration file for dar to have Parchive integrated # with DAR # to be passed to dar as argument of -B option (-B dar_par.dcf) # either directly on command line or through $HOME/.darrc or /etc/darrc # file create: -E 'SOMEPATH/dar_par_create.duc "%p" "%b" %N %e %c 2' # 2 stands for 2% of redundancy # adjust it to your needs test: -E 'SOMEPATH/dar_par_test.duc "%p" "%b" %N %e %c' # note that you may need to set the path to dar_par_test.duc # and dar_par_create.duc, at dar/libdar installation, SOMEPATH # is substitued by the path where these are installed to # fix from Sergey Feo default: -E "echo Warning: dar_par.dcf will not be used in this operation. Please review command line options. -c or -t should be used before -B ...dar_par.dcf" dar-2.6.8/doc/samples/dar_rqck.bash0000644000175000017520000000111713476543220014064 00000000000000#!/bin/bash MT=$(sed '/^MemTotal/!d;s/.* //' /proc/meminfo) echo -e "\n\tyou have $MT total memory" ST=$(sed '/^SwapTotal/!d;s/.* //' /proc/meminfo) echo -e "\n\tyou have $ST total swap" P=$(mount | sed '/^none/d' | awk '{print $3}') for p in $P do fc=$(find $p -xdev \ -path '/tmp' -prune -o \ -path '/var/tmp' -prune -o \ -print | wc -l) echo -e "\n\tpartition \"$p\" contains $fc files" (( iioh = ($fc * 1300)/1024 )) echo -e "\tdar differential backup with infinint requires $iioh kB memory" done echo # /proc and /sys (and /dev if it's udev) are excluded by "-xdev" dar-2.6.8/doc/samples/PN_backup-root.sh0000644000175000017520000000014513476543220014616 00000000000000#!/bin/bash dar -c "/mnt/storage/backup/root_$(date +%Y-%m-%d-%H%M%S)" -B /root/backup-root.options dar-2.6.8/doc/samples/dar_par_create.duc0000755000175000017520000000200513476543220015067 00000000000000#!/bin/sh ### # # this script is to be launched on dar command line when creating an archive with -s option (slicing) # you need to run this script from dar, adding the following argument on command-line # # -E "dar_par_create.duc %p %b %N %e %c 20" # # note that 20 means 20% of redundancy, tune it to your needs # ### # # if you prefer you can also add the line above in your the $HOME/.darrc file # under the create: conditional statement (see dar man page) # ### # # usage par_script slice.basename slice.number extension level # generates a Parchive redundancy file from the slice file # ### if [ "$1" = "" -a "$2" = "" -a "$3" = "" -a "$4" = "" -a "$6" = "" ]; then echo "usage: $0 " echo "$0 builds Parchive redundancy file for the given slice" exit 1 fi # change according to you need PAR=par2 echo "creating PAR file for file $1/$2.$3.dar ..." exec $PAR c -r$6 -n1 "$1/$2.$3.$4" # script returned code it those of par dar-2.6.8/doc/samples/PN_backup-storage.options0000644000175000017520000000103413476543220016356 00000000000000### Options that are appended to the dar command: # No warning when not run from a terminal -Q # Don't try to read darrc files -N # Be verbose (so everything can be logged) -v # No warn on overwrite (should not happen anyway) -w # Compression level -z1 # Keep empty directories as such, so they can be restored -D # Blowfish encryption -K bf:secretpassword # Directory to backup -R "/mnt/storage/" # Excludes (must be specified as relative paths to the directory # that is to be backed up) -P "backup" -P "tmp" -P "winhome" -P "ftp/cisco" dar-2.6.8/doc/samples/pause_every_n_slice.duc0000755000175000017520000000200113476543220016153 00000000000000#!/bin/sh ############################################################################ # WARNING: this script is now obsolete, due to the -p option new feature # that provide the same behavior. You can still use this script or tune it # for your own need if you like, this is why it is kept present here as sample ############################################################################ # This script is to be launched on dar command line when creating an archive # with -s option (slicing), in place of -p option (pause every slice done) # # -E "pause_every_n_slice.duc %p %b %n %e %c N" # # will make dar pause when slice N will be done, when slice 2*N, 3*N etc. # will be done. # if [ "$1" = "" -a "$2" = "" -a "$3" = "" -a "$4" = "" -a "$6" = "" ]; then echo "usage: $0 " exit 1 fi toto=$(( $3%$6 )) if [ $toto -eq 0 ] ; then echo "Pausing after slice $3" echo "Press return to continue" read junk fi dar-2.6.8/doc/samples/available_space.duc0000755000175000017520000000171713476543220015240 00000000000000#!/bin/sh if [ -z "$1" -o -z "$2" -o -z "$3" -o -z "$4" -o -z "$5" -o -z "$6" -o -z "$7" ]; then echo "This script is expected to be run from dar this way:" echo "dar ... -E \"$0 %p %b %n %e %c \" ..." echo "where %p %b ... %c are to be used verbatim, while is to be" echo "replaced by the path of the mounted filesystem to monitor" echo "and by the minimum space required to store a full slice" exit 1 fi SLICE_PATH="$1" SLICE_BASENAME="$2" SLICE_NUMBER="$3" SLICE_EXTENSION="$4" DAR_CONTEXT="$5" MOUNT_POINT="$6" SLICE_SIZE="$7" FREE=`df $MOUNT_POINT | grep '/' | sed -re 's/.*[ ]+([0-9]+)[ ]+[0-9]+%.*/\1/'` while [ $FREE -le $SLICE_SIZE ]; do FREE=`df $MOUNT_POINT | grep '/' | sed -re 's/.*[ ]+([0-9]+)[ ]+[0-9]+%.*/\1/'` echo Free space on $MOUNT_POINT is $FREE KB echo "Waiting for disk change... Press enter when ready to continue!" read i done echo "Continuing with slice $SLICE_NUMBER" dar-2.6.8/doc/samples/PN_ftpbackup.sh0000644000175000017520000000744213476543220014356 00000000000000#!/bin/bash # ftpbackup.sh - Version 1.1 - 2006-01-09 - Patrick Nagel # Carry out backups automatically and put the resulting # archive onto a backup FTP server. Mail the result to # root. # # Dependencies: ncftp # Change this to your needs ########################### PASSWORDFILE="/root/ftpbackup.credentials" # $PASSWORDFILE must look like this (and should # of course only be readable for the user who # executes the script): # ----------------------------- # |USER="username" | # |PASS="password" | # |SERVER="hostname.of.server"| # ----------------------------- LOGFILE="/root/ftpbackup.log" # The logfile will be gzipped and be available # as $LOGFILE.gz after the script exits. NUMBEROFBACKUPS=2 # How many different backups should this script # carry out? BACKUPCOMMAND[1]="/root/backup-root.sh" # Backup command which carries out 1st backup. # Each backup command must create exactly ONE # archive file. BACKUPCOMMAND[2]="/root/backup-storage.sh" # Backup command which carries out 2nd backup. BACKUPCOMMAND[n]="" # Backup command which carries out nth backup. LOCALBACKUPDIR="/mnt/storage/backup" # This is where the backup archive (must be ONE # FILE!) will be stored by the $BACKUPCOMMAND[x] # program. MOUNTPOINT="/mnt/storage" # The mountpoint of the partition where the # backup archives will be stored on. # For free space statistics. BACKUPFTPQUOTA=42949672960 # Backup FTP server quota or total storage amount # (in bytes). ####################################################### # Initial variables and checks which ncftp &>/dev/null || { echo "Missing ncftp, which is a dependency of this script."; exit 1; } STARTTIME="$(date +%T)" # Functions function backup_to_ftp_start() { ncftpbatch -D return } function backup_to_ftp_queue() { # Puts newest file in ${LOCALBACKUPDIR} to the backup FTP server. source ${PASSWORDFILE} BACKUPFILE="${LOCALBACKUPDIR}/$(ls -t -1 ${LOCALBACKUPDIR} | head -n 1)" ncftpput -bb -u ${USER} -p ${PASS} ${SERVER} / ${BACKUPFILE} return } function backup_local_used() { du -bs ${LOCALBACKUPDIR} | awk '{printf($1)}' return } function backup_local_free() { df -B 1 --sync ${MOUNTPOINT} | tail -n 1 | awk '{printf($4)}' return } function backup_ftp_used() { source ${PASSWORDFILE} ncftpls -l -u ${USER} -p ${PASS} ftp://${SERVER} | grep -- '^-' | echo -n $(($(awk '{printf("%i+", $5)}'; echo "0"))) return } function backup_ftp_free() { echo -n $((${BACKUPFTPQUOTA} - $(backup_ftp_used))) return } function backup_success() { { echo -en "Backup succeeded.\n\nBackup started at ${STARTTIME} and ended at $(date +%T).\n\n" echo -en "Statistics after backup (all numbers in bytes):\n" echo -en "Used on Backup-FTP: $(backup_ftp_used)\n" echo -en "Free on Backup-FTP: $(backup_ftp_free)\n" echo -en "Used on local backup directory: $(backup_local_used)\n" echo -en "Free on local backup directory: $(backup_local_free)\n" } | mail -s "Backup succeeded" root return } function backup_failure_exit() { { echo -en "Backup failed!\n\nBackup started at ${STARTTIME} and ended at $(date +%T).\n\n" echo -en "Statistics after backup failure (all numbers in bytes):\n" echo -en "Used on Backup-FTP: $(backup_ftp_used)\n" echo -en "Free on Backup-FTP: $(backup_ftp_free)\n" echo -en "Used on local backup directory: $(backup_local_used)\n" echo -en "Free on local backup directory: $(backup_local_free)\n" } | mail -s "Backup FAILED" root gzip -f ${LOGFILE} exit 1 } # Main rm -f ${LOGFILE} # In case the script has been aborted before { for ((i=1; i<=${NUMBEROFBACKUPS}; i+=1)); do ${BACKUPCOMMAND[$i]} >>${LOGFILE} 2>&1 && backup_to_ftp_queue >>${LOGFILE} 2>&1 done && \ backup_to_ftp_start >>${LOGFILE} 2>&1 && \ backup_success } || backup_failure_exit gzip -f ${LOGFILE} dar-2.6.8/doc/samples/Makefile.in0000644000175000017520000003432413617552304013512 00000000000000# Makefile.in generated by automake 1.15 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2014 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = doc/samples ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(dist_noinst_DATA) \ $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac DATA = $(dist_noinst_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) am__DIST_COMMON = $(srcdir)/Makefile.in README DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CXXSTDFLAGS = @CXXSTDFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPGME_CFLAGS = @GPGME_CFLAGS@ GPGME_CONFIG = @GPGME_CONFIG@ GPGME_LIBS = @GPGME_LIBS@ GREP = @GREP@ HAS_DOT = @HAS_DOT@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBCURL_CFLAGS = @LIBCURL_CFLAGS@ LIBCURL_LIBS = @LIBCURL_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTHREADAR_CFLAGS = @LIBTHREADAR_CFLAGS@ LIBTHREADAR_LIBS = @LIBTHREADAR_LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYEXT = @PYEXT@ PYFLAGS = @PYFLAGS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dot = @dot@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ NO_EXE_SAMPLES = darrc_sample sample1.txt README automatic_backup.txt JH-readme.txt JH_dar_archiver.options JH_darrc cluster_digital_readme.txt index.html PN_backup-root.options PN_backup-storage.options Patrick_Nagel_Note.txt EXE_SAMPLES = cdbackup.sh pause_every_n_slice.duc automatic_backup dar_backup dar_rqck.bash JH-dar-make_user_backup.sh cluster_digital_backups.sh dar_par_create.duc dar_par_test.duc MyBackup.sh.tar.gz PN_backup-root.sh PN_backup-storage.sh PN_ftpbackup.sh dar_backups.sh available_space.duc dist_noinst_DATA = $(NO_EXE_SAMPLES) $(EXE_SAMPLES) dar_par.dcf etc_darrc all: all-am .SUFFIXES: $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/samples/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu doc/samples/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am all-am: Makefile $(DATA) installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." clean: clean-am clean-am: clean-generic clean-libtool mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: @$(NORMAL_INSTALL) $(MAKE) $(AM_MAKEFLAGS) install-data-hook install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: uninstall-local .MAKE: install-am install-data-am install-strip .PHONY: all all-am check check-am clean clean-generic clean-libtool \ cscopelist-am ctags-am distclean distclean-generic \ distclean-libtool distdir dvi dvi-am html html-am info info-am \ install install-am install-data install-data-am \ install-data-hook install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am uninstall-local .PRECIOUS: Makefile install-data-hook: $(INSTALL) -d $(DESTDIR)$(pkgdatadir)/samples sed -e "s%SOMEPATH%$(pkgdatadir)/samples%g" dar_par.dcf > $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf chmod 0644 $(DESTDIR)$(pkgdatadir)/samples/dar_par.dcf $(INSTALL) -m 0644 $(NO_EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -m 0755 $(EXE_SAMPLES) $(DESTDIR)$(pkgdatadir)/samples $(INSTALL) -d $(DESTDIR)$(sysconfdir) sed -e "s%SOMEPATH%$(pkgdatadir)/samples%g" etc_darrc > $(DESTDIR)$(sysconfdir)/darrc uninstall-local: rm -rf $(DESTDIR)$(pkgdatadir)/samples # $(sysconfdir)/darrc not removed as it may contain system admin specific configuration # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.6.8/doc/samples/index.html0000644000175000017520000002564013476543220013443 00000000000000 Dar - Scripts and Examples
Dar Documentation


Scripts and Examples





In this page you can find several script and configuration files, that have been sent by dar users. They should all work, if some do not, see them as illustration or examples that let you have a base to your own configuration scripts.

You will here both DUC files (Dar User Commands) that can be launched from dar thanks to its -E or -F options as well as scripts from where dar is launched:


Description
Author
Type
Download
 script that use dar to make full or differential backup to CDR
stef at hardco.de
Script
cdbackup.sh
sample /etc/darrc or ~/.darrc file
"(me)"
DCF
darrc_sample
 for those who like to learn with examples (a rich one) ;-)
Henrik Ingo
DCF
sample1.txt
script to create PAR redundancy data for protection against media corruption
Denis Corbin
DUC
dar_par_create.duc
 script to test and repair slice with redundancy data
Denis Corbin
DUC dar_par_test.duc
dar config files for dar_par_create and dar_par_test.duc

DCF
dar_par.dcf
pause every N slice instead of every  slice when creating an archive  [This is now obsolete as the -p option can now receive an argument to tell every how much slice to pause]
Denis Corbin
DCF
pause_every_n_slice.duc
automatic full/differential backup script, with automatic mounting unmount see comments inside automatic_backup.txt for more info, see also this documentation file (same Author).
Manuel Iglesias
Script
automatic_backup
perl script wrapping: dar+parchive+growisofs
Matthew Caron
Script
dar_backup
 bash script for Linux users to have raw estimation of the required amount of virtual memory to use to be able to save the whole system.
Bob Barry
Script
dar_rqck.bash
To save your home directory without worry (skip trash directory, make full or differential backup), all is explain by the author in this tiny document.
Jakub Holy
Script
DCF
DCF
JH-dar-make_user_backup.sh
JH_darrc
JH_dar_archiver.options
Local or remote backup script (using scp) to be launched from a cron, doing automatic decision whether the backup has to be full or incremental
Roi Rodriguez Mendez & Mauro Silvosa Rivera (Cluster Digital S.L.)
Script
cluster_digital_backups.sh
Shell script to backup to an FTP server Patrick Nagel Script

Note.txt
ftpbackup.sh
backup-root.sh
backup-root.options
backup-storage.sh
backup-storage.options

Enhanced version of the Script done by Roi and Mauro  (see cluster_digital_backups.sh) above Jason Lewis Script dar_backups.sh
A very complete script that:
  • can perform Logging
  • uses configuration files (see attached sample including usage comments)
  • can use Snapshots (if fs_root is on an LVM volume)
  • do DVD formatting
  • can write create a dar archive to DVD.
The design requires that each backup job fits on a single DVD, optionally writing a directory and contents to DVD. This allows:
  • copying system documentation to DVD for reference during system recovery
  • Writing dar_static to DVD for potential use during system recovery
  • Writing /etc/lvm and contents to DVD for potential use during
  • system recovery
  • Options to restart failed DVD operations by skipping to DVD
  • writing and to DVD verification
  • Extensive error trapping
The script contains itself a very detailed user information.
Charles
Script
MyBackup.sh.tar.gz
A shell script to replace -p option when one need to pause before dar lacks space to add a new slice on the disk. This may be of some use when using support of different sizes to store a given archive. You then need to  choose the size of slices (-s option) as the biggest common divisor of all slice sizes to let dar handle this situation quite nicely.
Denis Corbin
DUC available_space.duc


dar-2.6.8/doc/samples/JH_dar_archiver.options0000644000175000017520000000201213476543220016061 00000000000000################################# # # # DAR Archiver - options # # # ################################# # -m N - do not compress files smaller then N [B] # -Z pattern - matching files are not compressed # -P subdir - ignore (don't backup) directories matching the pattern; relative to -R # -X pattern - exclude files matching pattern; it may not include a file path, only the name # -R /home/aja - the directory to backup # -s 700M - cut the archive into 'slices' (parts) of max. size 700 MB # -y [level] - compress with bzip2 # -G - generuj zvlast katalog archivu # -D,--empty-dir - vtvor prazdne adresare pro ty excludovane (s -P) # -M - skip other filesystems (tj. namountovane FS). # -v - verbose output # --beep - pipni kdyz je pozadovana uzivatelova akce # !!! The option -c , has to be on the cmd line # !!! The option -R as well ## Obecne volby -s 700M -m 256 -y -M -v --empty-dir --beep ## Preskocene adresare -P .java/deployment -P .netbeans/var -P Trash dar-2.6.8/doc/samples/cdbackup.sh0000644000175000017520000001060113476543220013545 00000000000000#!/bin/sh #script for doing sliced full and incremental backups to cdr #stef at hardco.de, 2003 #full backup: "cdbackup.sh full" #incremental backup: "cdbackup.sh
" #Reference archive name is the filename of the first slice without .number.dar #Dar will also search/ask for the last reference archive slice. #A plain catalogue file can also be used as an incremental reference. #backups everything starting from / (see DAR_PARAMS) to iso/rr cdrs #Archive slices are stored temporarily in ./ (see TDIR) and get deleted #if written successfully to cdr. #The first cdr will also contain the dar_static executable. #If anything goes wrong while trying to write to cdr, you can try again #or keep the current archive slice as a file in ./ (see TDIR). #For backing up to files only, simply accept the cdr write error and #answer with 'keep file' (or even better: use dar directly). #Slice size is for 700MB cdr blanks, see (and maybe change) DAR_PARAMS below. #For (slow!) compression, add a -y or -z parameter to DAR_PARAMS. #The archive slice file names are: #- for full backups: YYYY-MM-DD..dar #- for incrementals: YYYY-MM-DD_YYYY-MM-DD..dar # The second date is the name of the reference archive, so you can end # up with names like YYYY-MM-DD_YYYY-MM-DD_YYYY-MM-DD_YYYY-MM-DD.1.dar # for a four level stacked incremental backup. #Files which don't get backed up: (see DAR_PARAMS below) #- the slice files of the current archive #- the slice files of the reference archive #- files called "darswap" (for manually adding more swap space for incrementals) #- directory contents of /mnt, /cdrom, /proc, /dev/pts #hints: #- You need at least 700MB of free disk space in ./ (or in TDIR, if changed). #- For incrementals, you need about 1KB of memory per tested file. # Create a large file "darswap" and add this as additional swap space. #- If you are doing more than one backup per day, the filenames may interfere. #- Carefully read the dar man page as well as the excellent TUTORIAL and NOTES. #uncompressed, for 700MB cdr blanks: DAR_PARAMS="-s 699M -S 691M -R / -P dev/pts -P proc -P mnt -P cdrom -D" #temporary or target directory: TDIR="." #I'm using a USB CDR drive, so i don't know which 'scsi'-bus it is on. #Cdrecord -scanbus is grepped for the following string: DRIVENAME="PLEXTOR" #Also because of USB i have to limit drive speed: DRIVESPEED=4 #used external programs: DAR_EXEC="/root/app/dar-1.3.0/dar" #tested: dar-1.3.0 DAR_STATIC="/root/app/dar-1.3.0/dar_static" #copied to the first cdr MKISOFS="/root/app/cdrtools-2.0/bin/mkisofs" #tested: cdrtools-2.0 CDRECORD="/root/app/cdrtools-2.0/bin/cdrecord" #tested: cdrtools-2.0 GREP="/usr/bin/grep" #tested: gnu grep 2.2 BASENAME="/usr/bin/basename" DATECMD="/bin/date" MKDIR="/bin/mkdir" MV="/bin/mv" CP="/bin/cp" RM="/bin/rm" #initial call of this script (just executes dar with the proper parameters): DATE=`$DATECMD -I` START=`$DATECMD` if [ -n "$1" ] && [ -z "$2" ] ; then if [ "$1" = "full" ] ; then echo "starting full backup" $DAR_EXEC -c "$TDIR/$DATE" \ -X "$DATE.*.dar" -X "darswap" \ -N $DAR_PARAMS -E "$0 %p %b %N" else echo "starting incremental backup based on $1" LDATE=`$BASENAME $1` $DAR_EXEC -c "$TDIR/${DATE}_$LDATE" -A $1 \ -X "${DATE}_$LDATE.*.dar" -X "$LDATE.*.dar" -X "darswap" \ -N $DAR_PARAMS -E "$0 %p %b %N" fi echo "backup done" echo "start: $START" echo "end: `$DATECMD`" #called by dar's -E parameter after each slice: elif [ -r "$1/$2.$3.dar" ] ; then echo -n "creating cdr $3 volume dir containing $2.$3.dar" $MKDIR "$1/$2.$3.cdr" $MV "$1/$2.$3.dar" "$1/$2.$3.cdr" if [ "$3" = "1" ] ; then echo -n " and dar_static" $CP $DAR_STATIC "$1/$2.$3.cdr" fi echo DEV=`$CDRECORD -scanbus 2>/dev/null | $GREP $DRIVENAME | cut -b2-6` CDBLOCKS=`$MKISOFS -R -print-size -quiet $1/$2.$3.cdr` echo "writing cdr $3 (${CDBLOCKS}s)..." KEEPFILE="n" until $MKISOFS -R "$1/$2.$3.cdr" | \ $CDRECORD -eject -s dev=$DEV speed=$DRIVESPEED tsize=${CDBLOCKS}s - do echo -n "write error, try [A]gain or [k]eep $2.$3.dar? " read ERR if [ "$ERR" = "k" ] ; then KEEPFILE="y" break fi done if [ "$KEEPFILE" = "y" ] ; then echo "cdr not written, keeping $2.$3.dar as file" $MV "$1/$2.$3.cdr/$2.$3.dar" "$1/$2.$3.dar" fi echo "removing volume dir" $RM -rf "$1/$2.$3.cdr" echo "backup continues" else echo "usage: $0 " fi exit 0 dar-2.6.8/doc/Tutorial.html0000644000175000017520000013247113552573442012500 00000000000000 DAR - Tutorial
Dar Documentation


TUTORIAL




Introduction

This tutorial shows you how to backup your file system (partially or totally) on USB key (thing works the same with harddisks or cloud storage), but we will keep USB keys for simplicity. Most important, we will also see how to restore your system from scratch in case of hard disk failure (or other cataclysms).

Note: This document has been initially written circa 2003, so don't pay attention to the usage of old hardware it mentions, the dar usage stay the same with modern removable media or cloud storage, and the document has been updated with recent features as if those old stuffs were still of actuality :-)


STEP 1: The FULL backup

We need first to make a full backup, let's go:

Let's assume the size of the usb keys is 100 MB,
*** here comes the option: -s 100M
This tells dar to not create a single backup file but to split it in several files with a size of at most 100 Megabytes.

On your first usb key drive we want to copy the dar binary outside the backup to be able to restore it in case of hard disk failure, for example.

IMPORTANT:
dar binary relies on several libraries which must also be available in the rescue system or copied with the dar binary. But, if you don't want to worry about needed libraries, there is a static version of dar which only difference is that it has all required library included in it (thus it is a larger binary). Its name is "dar_static", and its main reason of existence is to be placed beside backups in case something goes wrong in your system. Note that dar_static is useless for windows, you will always need the Cygwin dll.

You can also add man pages or a copy of this tutorial, if you are scared not to be able to remember all the many feature of dar ;-) and find the -h option too sparse. Note that all the dar documentation is available on the web. OK you need an Internet access to read them, which might not be the case if your system is to be restored, though today we have no more a single device per house connected to internet (tablets, smartphones, ... can help accessing documentation just in case).

This make the free space on the first usb key floppy a bit smaller, I let you make the substraction because this is subject to change from system to system, but let's assume dar_static is less than 5 MB, thus the initial slice should not exceed 95 MB
*** Here comes the option: -S 95M
(Note that '-s' is lowercase for all the slices, and '-S' is UPPERCASE meaning the initial slice only).

We need to pause between slices to change the usb key when it is full
*** here comes the option: -p -b
telling dar to pause before writing a new slice (-p) and to ring the terminal bell (-b) when user action is needed.

We will compress data inside the backup
*** here comes the option: -z
by default -z option uses gzip compression algorithm (gzip, bzip2, lzo, xz and some others are also available). Optionally, if speed is more important than archive size, you can degrade compression specifying the compression level : -z1 for example. By default the maximum compression is used (-z is equivalent to -z9)

Now, we want to backup the whole file system.
*** here comes the option: -R /
This tells dar that no files out of the provided directory tree will be saved. Here, it means that no files will be excluded from the backup, if no filter is specified, see below) here "R" stands for "Root".

There are some files you probably don't want to backup like backup files generated by emacs "*~" and .*~".
*** here comes the options: -X "*~" -X ".*~"
Note that you have to quote the mask for it not to be interpreted by the shell, the -X options (X for eXclude) do not apply to directories, nor to path, they just apply to filenames. See also -I option (I for Include) in man page for more information.

Among these files are several sub-trees you must not save: the /proc file system for example, as well as the /dev/pts and /sys . These are virtual filesystems, saving them would only make your backup bigger filled with useless stuff.
*** here come the options: -P dev/pts -P proc -P sys
Note that path must be relative to -R option (thus no leading '/' must be used) Unlike the -X/-I options, the -P option (P for "prune") can apply to a directory. If a directory matches -P option, all its subdirectory will also be excluded. note also that -P can receive wildcards, and they must be quoted not to be interpreted by the shell: -P "home/*/.mozilla/cache" for example. Lastly, -P can also be used to exclude a plain file (if you don't want to exclude all files of a given name using -X option): -P home/joe/.bashrc for example would only exclude joe's .bashrc file not any other file, while -X .bashrc would exclude any file of that name including joe's file. See also -g, -[ and -] options in man page for more, as well as the "file selection in brief" paragraph

More importantly we must not save the backup itself:
*** here comes the option: -P mnt/usb
assuming that your usb key is mounted under /mnt/usb. We could also have excluded all files of extension "dar" which are backup generated by dar using -X "*.*.dar", but this would have  also exclude other dar archive from the backup, which may not always fit your need.

Now, as we previously excluded the /dev/pts /proc and /mnt/usb directories, we would have to create these directory mount-points by hand at recovery time to be able to mount the corresponding filesystems. But we can better use the -D option: it changes dar's behavior by not totally ignoring excluded directories but rather storing them as empty.
*** here comes the option -D
thus at recovery time excluded directories will be generated automatically as empty directories

Lastly, we have to give a name to this full backup. Let's call it "linux_full" and as it is supposed to take place on the usb key, its path is /mnt/usb/linux_full
*** here comes the option: -c /mnt/usb/linux_full
Note that linux_full is not a complete filename, it is a "basename", on which dar will add a number and an extension ".dar", this way the first slice will be a file of name linux_full.1.dar located in /mnt/usb

Now, as we will have to mount and umount the /mnt/usb file system, we must not have any process using it, in particular, dar current directory must no be /mnt/usb  so we change to / for example.

All together we follow this procedure for our example:

Plug an empty usb key and mount it according to your /etc/fstab file.

mount /mnt/usb

Copy the dar binary to the first usb key (to be able to restore in case of big problem, like a hard disk failure) and eventually man pages and/or this tutorial.

cp `which dar_static` /mnt/usb

then, type the following:

cd /
dar -c /mnt/usb/linux_full -s 100M -S 95M -p -b -z -R / -X "*~" -X ".*~" -P dev/pts -P sys -P proc -P mnt/usb -D

Note that option order has no importance. Some options may be used several times (-X, -I, -P) some others cannot (see man page for more). When the first slice will be done, DAR will pause, ring the terminal bell and display a message. You will have to unmount the usb key

umount /mnt/usb

and replace it by an empty new one and mount it

mount /mnt/usb

To be able to do that, you can swap to another virtual console pressing ALT+F? keys (if under Linux), or open another xterm if under X-Windows, or suspend dar by typing CTRL-Z and reactivating it after mounting/unmounting by typing `fg' (without the quotes).

Then proceed with dar for the next slice, pressing the <enter> key.

Dar will label slices this way:
slice 1: linux_full.1.dar
slice 2: linux_full.2.dar
and so on.

That's it! We have finished the first step, it may take a long time depending on the size of the data to backup. The following step (differential backup) however can be done often, and it will stay fast every time (OK, except if a big part of your system has changed, in that case you can consider making another full backup).

Just a little check on the archive you've just made: suppose you want to read the content of the backup you made, you would have
to run:

dar -l /mnt/usb key/linux_full

It is recommended to either test the archive contents, or compare what's stored in it with the current file system before relying on it:

dar -t /mnt/usb key/linux_full

This will test the whole archive structure and content, while it is recommended to first unmount and remount removable disk, this to flush the cache. Else you may read data from cache (in memory) and do not detect an error on you disk. dar -t cannot check a single slice, it checks all the archive. If you need to check a single slice,(for example after burning it on DVD-RW, you can use the diff command: for example, you have burnt the last completed slices on DVD-RW, but have just enough free space to store one slice on disk. You can thus check the slice typing something like:

diff /mnt/cdrom/linux_full.132.dar /tmp/linux_full.132.dar

where 132 has to be replaced by the real slice number.

You can also add the --hash command when you create the archive (for example --hash md5), it will produce for each slice a small hash file named after the slice name "linux_full.1.dar.md5", "linux_full.2.dar.md5", etc. Then using the unix standard command "md5sum" you can check the integrity of the slice :

md5sum -c linux_full.1.dar.md5

If all is ok for the slice on the DVD-RW  (diff does not complain or md5sum returns "OK"), you can delete the slice from the hard disk (/tmp/slice.x.dar), and continue with dar. Else, you will have to restart the DVD-RW burning process with the same slice.

Just above we have "tested" the archive. That's a good habit to do it. But instead of testing the whole archive you could also compare it with the just saved system:

dar -d /mnt/usb key/linux_full -R /

will compare the archive with filesystem tree located at / . Same remark as previously, it is recommended to first unmount and mount the removable media to flush the system cache.

If you backup a live filesystem, you way prefer 'testing' an archive as it will not issue errors about files that changed since the backup was made, but if you are archiving files, diffing is probably a better idea as you really compare the content of the files and you should not experiment file changes on data you are archiving as most of the time such data about to be archived is old steady data that is not likely to change.

STEP 2: DIFFERENTIAL BACKUP

The only thing to add is the base name of the backup we take as reference
*** here comes the option: -A /mnt/usb/linux_full

Of course, we have to choose another name for that new backup, let's call it linux_diff1
*** here comes the option: -c /mnt/usb/linux_diff1

Last point: if you want to put the new backup at the end of the full backup, you will have to change the -S option according to the remaining space on the last usb key. suppose the last slice of linux_full takes 34MB you have 76MB available for the first slice of the differential backup (and always 100MB for the following ones),
*** here comes the option: -S 76M
but if you want to put the backup on a new usb key, just forget the -S option.

here we also want to produce a hash file to test each slice integrity before removing it from hard disk:
*** here comes the option: --hash md5

All together we get:

dar -c /mnt/usb/linux_diff1 -A /mnt/usb key/linux_full -s 100M -S 76M -p -b -z -R / -X "*~" -X ".*~" -P dev/pts -P proc -P mnt/usb key -P sys -D --hash md5

The only new point is that, just before effectively starting to backup, dar will ask for the last slice of the archive of reference (linux_full), then dar will pause (thanks to the -p option) for you to change the disk if necessary and put the one where you want to write the new backup's first slice, then pause again for you to change the disk for the second slice and so on.

STEP 3: ENDLESS DIFFERENTIAL BACKUP

You can make another differential backup, taking linux_diff1 as reference, in this case you would change only the following

-c /mnt/usb/linux_diff2 -A /mnt/usb key/linux_diff1

You could also decide to change of device, taking 4,4 GiB DVD-RAM ... or maybe rather something more recent and bigger if you want,  this would not cause any problem at all. After some time when you get many differential backup for a single full backup, you will have to make a new full backups, depending on your available time for doing it, or on your patient if one day you have to recover the whole data after a disk crash: You would then have to restore the full backup, then all the following differential backup up to the most recent one. This requires more user intervention than restoring a single full backup, all is a matter of balance, between the time it takes to backup and the time it takes to restore.

Note, that starting with release 1.2.0 a new command appeared that helps restoring a small set of file from a lot a differential backup. Its name is dar_manager. See at the end of this tutorial and the associated man page for more.

Another solution, is when you have too much differential backup, is to make the next differential backup taking the last full_backup as reference, instead of the last differential backup done. This way, it will take less time than doing a full backup, and you will not have to restore all intermediate differential backup. Some people make difference between "incremental" backup and "differential" backup. Here for dar, they look like the same, it just depends on the nature of the reference backup you take.

Of course, a given backup can be used as reference for several differential backup, there is no limitation in number nor in nature (the reference can be a full of differential backup).

Yet another solution is to setup decremental backup, this is let you have the full backup as the most recent one and the older ones as difference from the backup done just after them... but nothing is perfect, doing so takes much more time than doing full backup at each step but as less storage space as doing incremental backups and restoration time is as simple as restoring a full backup. here too all is a matter of choice, taste and use case.


STEP 4: RECOVER AFTER A DISK CRASH

Sorry, it arrived, your old disk has crashed. OK, you are happy because you have now a good argument to buy the very fast and very enormous very lastest hard disk available. Usually, you also cry because you have lost data and you will have to reinstall all your system, that was working  so well and for so long!

If however the last backup you made is recent, then keep smiling! OK, you have installed your new hard disk and configured you BIOS to it (well at ancient time it was necessary to manually setup the BIOS with the new disk, today you can forget it).

1. You first need to boot your new computer with the empty disk in order to restore your data onto it. For that I would advise using Knoppix or better system rescue CD that let you boot from CD or USB key. You don't need to install something on your brand-new disk, just make partitions and format them as you want (we will detail that below). You may even change the partition layout add new ones or merge several ones into a single one: what is important is that you setup each one with enough space to hold the data to be restored in them: We suppose your new disk is /dev/hda and /dev/sga is your removable media drive (USB key, DVD device, ...) For clarity, in the following we will keep assuming it to be a set of USB keys, it could be CD, DVD, or other disk you would do slightly the same.

2. Create the partition table as you wish, using
fdisk /dev/hda

3. Format the partition which will receive your data, dar is filesystem independent, you can use ext2 (as here in the example), ext3, ext4, ReiserFS, Minix, UFS, HFS Plus, XFS, whatever is the Unix-like filesystem you want, even if the backed up data did not reside on such filesystem at backup time!
mke2fs /dev/hda1
copy and record in a temporary file the UUID of the generated filesystem if the /etc/fstab we will restore in the next steps rely in that instead of fixed path (like /dev/hda1).

4. Additionally if you have created it, format the swap partition and also record the generated UUID if necessary
mkswap -c /dev/hda2

5. If you have a lot of file to restore, you can activate the swap on the partition of your new hard drive:
swapon /dev/hda2

6. Now we must mount the hard disk, somewhere.

cd /
mkdir disk
mount -t ext2 /dev/hda1 /disk

would do the trick

6-alternative. If you want to restore your system over several partitions like /usr /var /home and / , you must create the partitions, format them as seen above and then create the directories that will be used as mounting point an mount the partitions on these directories. For example if you have / , /usr , /var and /home partitions this would look like this:

mkdir /disk/usr /disk/var /disk/home
mount -t ext2 /dev/hda2 /disk/usr
mount -t ext2 /dev/hda3 /disk/var
mount -t ext2 /dev/hda4 /disk/home



7. If the boot system used does not already include dar/libdar (unlike system rescue CD and Knoppix for example) we need to copy the dar binary from a removable medium to your disk: insert the USB key  containing the dar_static binary to be able to freely change of key later on:

cd /

mkdir /usb_key
mount -t ext2 /dev/sga /usb_key
cp /usb_key/dar_static /disk

where /dev/sga points to your usb_key drive (run "dmesg" just after plugging the key to know which device to use in place of the fancy /dev/sga). We will remove dar_static from your new hard drive at the end of restoration.

8. Now we can restore the archive. The stuff has to go in /disk subdirectory
*** here comes the option: -R /disk

9. The process may be long, thus it might be useful to be noticed when a user action is required by dar.
*** here comes the option: -b
note that -p option is not required here because if a slice is missing dar will pause and ask you its number. If slice "0" is requested, it means the "last" slice of the backup.

let's go restoring!
/disk/dar_static -x /usb_key/linux_full -R /disk -b

... and when the next USB key is needed,
umount /usb_key

then unplug it, plug the next one and mount it:
mount -t ext2 /dev/sga /usb_key

as previously, to do that either use an second xterm / virtual console or suspend dar by CTRL-Z and awake it back by the 'fg' command. Then press <enter> to proceed with dar

10. Once finished with the restoration of linux_full, we have to do the same with any following differential/incremental backup. However, doing so will warn you any time dar restores a more recent file (file overwriting) or any time a file that has been removed since the backup of reference, has to be removed from file system (suppression). If you don't want to press the <enter> key several thousand times:
*** here comes the option: -w
(don't warn). All file will be overwritten without warning.

All together it makes:
/disk/dar_static -x /usb_key/linux_diff1 -R /disk -b -w

Then any additional archive:
/disk/dar_static -x /usb_key/linux_diff2 -R /disk -b -w
...
/disk/dar_static -x /usb_key/linux...    -R /disk -b -w


11. Finally, remove the dar binary from the disk:
rm /disk/dar_static

12. modify the /etc/fstab with the new UUID you have recorded at step 4, 5 and eventually 6 bis.

13. Last, reinstall you original boot loader  from the restored data:
If you still use lilo type:

    lilo -r /disk

If your boot loader is grub/grub2 type:

update-grub
grub-install /dev/sda

14. You can reboot you machine and be happy with you brand-new hard disk with your old precious data on it:
shutdown -r now

In this operation dar will in particular restore sparse files and hard linked inodes, thus you will have no drawback and even possibly better space usage than the original filesystem as dar can even transparently convert big plain files into smaller sparse files without any impact.

STEP 4(bis): recover only some files

Gosh, you have remove a important file by error. Thus, you just need to restore it, not the rest of the full and differential backups.

a) First method:

We could as previously, try all archive starting from the full backup up to the most recent differential backup, and restore just the file if it is present in the archive:

dar -R / -x /usb/linux_full -g home/denis/my_precious_file

This would restore only the file /home/denis/my_precious_file from the full backup.

OK, now we would also have to restore from all differential backup the same way we did. Of course, this file may have changed since the full backup.

dar -R / -x /usb/linux_diff1 -g home/denis/my_precious_file

and so on, up to the last differential archive.

dar -R / -x /usb/linux_diff29 -g home/denis/my_precious_file


b) Second method (more efficient):

We will restore our lost file, starting from the most recent differential backup and *maybe* up to the full backup. Our file may or may not be present in the a differential archive as it may have changed or not since the previous version, thus we have to check if our file is restored, using the -v option (verbose):

dar -R / -x /usb/linux_diff29 -v -g home/denis/my_precious_file

If we can see a line like

restoring file: /home/denis/my_precious_file

Then we stops here, because we got the most recent backup version of our lost file. Otherwise we have to continue with the previous differential backup, up to the full backup if necessary. This method has an advantage over the first one, which is not to have *in all case* the need to use all the backup done since the full backup.

If you are lazy (as I am) on the other hand, have a look at dar_manager (at the end of the tutorial)

OK, now you have two files to restore. No problem, just do the second method but add -r option not to override any more recent file already restored in a previous step:

dar -x /usb key/linux_diff29 -R / -r -v -g home/denis/my_precious_file -g etc/fstab

Check the output to see if one or both of your files got restored. If not, continue with the previous backup, up to the time you have seen for each file a line indicating it has been restored. Note that the most recent version of each files may not be located in the same archive, thus you might get /etc/fstab restored from linux_diff28, and /home/denis/my_precious_file restored at linux_diff27. In the case /etc/fstab is also present in linux_diff27 it would not have been overwritten by an older version, thanks to the -r option.

This option is very important when restoring more than one file using the second method. Instead, in the first method is used (restoring first from the full backup, then from all the following differential backups), -r option is not so important because if overwriting occurs when you restore lost files, you would only overwrite an older version by a newer.

Same thing here, even if you are not lazy, dar_manager can help you a lots here to automate the restoration of a set of file.

ISOLATING A CATALOGUE

We have seen previously how to do differential backups. Doing so, dar asks the last slice of the archive of reference. This operation is required to read the table of contents (also known as "catalogue" [this is a French word that means "catalog" in English, I will keep this French word in the following because it is also the name of the C++ class used in libdar]) which is located at the end of the archive (thus on the last slice(s)). You have the possibility to isolate (that's it to extract) a copy of this table of content to a small file. This small file is quite exactly the same as a differential archive that holds no data in it. Let's take an example with the full backup we did previously to see how to extract a catalogue:

    dar -C /root/CAT_linux_full -A /mnt/usb/linux_full -z


Note here that we used the UPPERCASE 'C' letter, by opposition the the lowercase 'c' which is used for archive creation, here we just created an isolated catalogue, which is usually a small archive. In addition, you can use -z option to have it compressed, -s and -S option to have it split in slices, -p option, -b option, but for an isolated catalogue this is not often necessary as it is usually rather small. The only thing we have seen for backup that you will not be able to do for isolation is to filter files (-X, -I, -g, -P, -[ and -] option are not available for that operation).

So what, now we have our extracted catalogue, what can we do with it? Two things:

First, we can use the extracted catalogue in place of the archive, as reference for a differential backup. No need to manipulate the old usb key, you can store the last's backup isolated catalogue on your hard disk instead and use it as reference for the next backup. If we had used an isolated catalogue in the previous examples, we would have built our first differential backup this way (note that here we have chose to use the CAT_ prefix to indicate that the archive is an isolated catalogue, but the choice is yours to label isolated catalogue the way you want):

    dar -c linux_diff1 -A /root/CAT_linux_full ... (other options seen above stay the same)

Second, we can use the isolated catalogue as backup of the internal catalogue if it get corrupted. Well to face to data corruption the best solution ever invented is Parchive, an autonomous program that builds parity file (same mechanism as the one used for RAID disks) for a given file. Here we can use Parchive to create a parity file for each slice. So, assuming you lack Parchive, and that you failed reading the full backup because the usb key is corrupted in the part used to store the internal catalogue, you can use an isolated catalogue as rescue:

    dar -x linux_full -A /root/CAT_linux_full ...
    dar -d linux_full -A /root/CAT_linux_full ...
    dar -t linux_full -A /root/CAT_linux_full ...
    dar -l /root/CAT_linux_full

An isolated catalogue can be built for any type of archive (full, differential or incremental archive, even for an already isolated catalogue, which I admit is rather useless). You can also create an isolated catalogue at the same time you do a backup, thanks to the -@ option:

    dar -c linux_diff1 -A /mnt/usb key/linux_full -@ CAT_linux_diff1 ... (other options...)
    dar -c linux_full -@ CAT_linux_full ... (other options see above stay the same for backup)

This is know as "on-fly" isolation.

DAR_MANAGER TUTORIAL

dar_manager builds a database of all your archive contents, to automatically restore the latest versions of a given set of files. Dar_manager is not targeted to the restoration a whole filesystem, the best ways to restore a whole filesystem has been described above and does not rely on dar_manager. So let's use dar_manager to restore a set of files or a whole directory. First, we have to create a "database" file:

dar_manager -C my_base.dmd

This created a file "my_base.dmd" where dmd stands for Dar Manager Database, but you are free to use any other extension.

This database is created empty. Each time you make a backup, may it be full or differential, you will have to add its table of contents (aka "catalogue") to this database using the  following command:

dar_manager -B my_base.dmd -A /mnt/usb/linux_full

This will add ("A" stands for "add") the archive contents to the base. In some cases you may not have the archive available but its extracted catalogue instead. Of course, you can use the extracted catalogue in place of the archive!

dar_manager -B my_base.dmd -A ~/Catalogues/CAT_linux_full

The problem however is that when dar_manager will need to recover a file located in this archive it will try to open the archive ~/Catalogue/CAT_linux_full for restoration, which does not contain any data because it is just the catalogue of the archive.

No problem in that case, thanks to the -b option we can change afterward the basename of the archive, and thanks to the -p option you can change afterward the path at any time. Let's now list the database contents:

dar_manager -B my_base.dmd -l

It shows the following:


dar path    :
dar options :

archive #   |    path      |    basename
------------+--------------+---------------
        1       /home/denis/Catalogues      CAT_linux_full

We should change the path of archive number 1 for dar_manager looks on the usb key drive:

dar_manager -B my_base.dmd -p 1 /mnt/usb

and also replace the name of the extracted catalogue by the real archive name

dar_manager -B my_base.dmd -b 1 linux_full

Now we have exactly the same database as if we had use the real archive instead of its catalogue:

dar_manager -B my_base.dmd -l


dar path    :
dar options :

archive #   |    path      |    basename
------------+--------------+---------------
        1       /mnt/usb      linux_full


In place of using -b and -p options, you can also tell the path and the name of the real archive to use at restoration time, when you add the catalogue to the database:

dar_manager -B my_base.dmd -A ~/Catalogues/CAT_linux_full /mnt/usb/linux_full

This is done adding an optional argument. The first ~/Catalogue... is the archive where to read the catalogue from, and the second /mnt/usb/... is the name to keep for it. No access is done to this second archive at the time of the addition, thus it may stay unavailable at the time the command is typed.

You can add up to 65534 archives to a given database, and have as much base as you want.

Note that we did not yet gave important options in the database to be passed to dar. For example, you will likely restore from the root of your filesystem, therefore  when called from dar_manager, dar must get the "-R /" option. This is done with:

dar_manager -B my_base.dmd -o -R /

All that follows -o is passed to dar as-is. You can see the options passed to dar when listing the database contents (-l option).

Let's now suppose that after each backup you took the time to update your database, and you now just have removed an important file by mistake.

 We can restore our /home/denis/my/precious/file using dar_manager that way:

dar_manager -B my_base.dmd -r home/denis/my/precious/file

dar_manager will find the proper archive to use, and call dar with the following options:

dar -x <archive> -R / -g home/denis/my/precious/file

Which in turn will ask you the corresponding slices. If you want to restore more files at a time or even a directory tree, you can add several arguments after -r option of dar_manager:

dar_manager -B my_base.dmd -r home/denis/my/precious/file etc/fstab home/joe

Once an archive become obsolete you can delete it from the database thanks to the -D option, you can also change archive order (-m option), get a list in which is located a given file (-f option), get the list of most recent files in a given archive (-u option), and get overall statistics per archive (-s option). Lastly you can specify which dar command to use given its path (-d option), by default, dar_manager uses the PATH shell variable to choose the dar command.

A new feature for those that are really very lazy (still as I am myself): dar_manager has an interactive mode, so you don't have to remeber all these command-line switches except one:

dar_manager -B my_base.dmd -i

Interactive mode allow you to do all operation except restoration which can be done as previously explained.

TO GO FURTHER WITH DAR/LIBDAR

Well, we have reached the end of this tutorial, but dar/libdar has still a lot of features to be discovered:
- strong encryption
- archive merging
- decremental backup
- dar command-line files (DCF)
- user commands between slices (and DUC files)
- Extended Attribute manipulations
- hard links
- Sparse files
- remote backup over ssh
- suspending/resuming a database from dar before/after backing it up
- using regex in place of glob expressions in masks
- using dar with tape thanks to the sequential reading mode
- having dar adding padded zeros to slice numbers
- excluding some files from compression
- asking dar to retry saving a file if it changes a the time of the backup
- what is a "dirty" files in a dar archive
- listing an archive contents under XML format
- using conditional syntax in DCF files
- using user targets
- adding user comments in dar archive
- using DAR_DCF_PATH and DAR_DUC_PATH environment variables
- truncated archive repairing

all this is described in much details in the following documents:
FAQ , mini-howto, command-line usage notes, man pages. You can find out more precisely where, using the feature description page. However if you find some thing unclear, feel free to report or ask for help on dar-support mailing-list.




dar-2.6.8/doc/portable_cp0000755000175000017520000000120013476543220012204 00000000000000#!/bin/sh if [ -z "$1" -o -z "$2" -o ! -z "$3" ] ; then echo "usage: $0 " exit 1 fi TMP_SRC=dar_install_cp_test_src TMP_DST=dar_install_cp_test_dst if mkdir "$TMP_DST" && mkdir "$TMP_SRC" ; then if cp -dR --preserve=mode $TMP_SRC $TMP_DST ; then rm -rf "$TMP_SRC" "$TMP_DST" exec cp -dR --preserve=mode "$1" "$2" else # BSD-like Unix that does not support -d or --preserve options rm -rf "$TMP_SRC" "$TMP_DST" exec cp -pR "$1" "$2" fi else rm -rf "$TMP_SRC" "$TMP_DST" echo "Impossible to create $TMP_DST or $TMP_SRC in order to determine capabilities of the 'cp' command" exit 2 fi dar-2.6.8/doc/usage_notes.html0000644000175000017520000040546213552573443013215 00000000000000 DAR's Usage Notes
Dar Documentation


Command-line Usage Notes





Introduction

You will find here a collection of example of use for several features of dar suite command-line tools.

Contents




Dar and remote backup server

In the following you will find the three different available ways to have dar working with remote repositories (or remote server if you prefer):
  1. single pipe
  2. dual pipes
  3. native support for FTP and SFTP since release 2.6.0

The situation is the following : you have a host (called local in the following), on which resides an operational system which you want to backup regularly, without perturbing users. To face hard disk failure or local disaster, you want to store the backup on another host (called remote host in the following). Of course you have not much space on local host to store the archive, else you could do the backup first and then copy the resulting slices afterward. For restoration you would first need to download the archive then proceed to restoration, that's of course possible but still requires enough local storage space.

Between these two hosts, you could also use NFS and nothing more would be necessary to use dar as usually. but if for security reasons you don't want to use NFS (insecure network, backup over Internet, ...), but prefer to communicate through an encrypted session (using ssh for example), then you need to use dar features brought by version 1.1.0:

1 - Single pipe

dar can output its archive to its standard output instead of a given file. To activate it, use "-" as basename. Here is an example :

dar -c - -R / -z | some_program
or
dar -c - -R / -z > named_pipe_or_file

Note, that file splitting is not available as it has not much meaning when writing to a pipe. (a pipe has no name, there is no way to skip (or seek) in a pipe, while dar needs to set back a flag in a slice header when it is not the last slice of the set). At the other end of the pipe (on the remote host), the data can be redirected to a file, with proper filename (something that matches "*.1.dar").

some_other_program > backup_name.1.dar

It is also possible to redirect the output to dar_xform which can in turn on the remote host split the data flow in several files, pausing between them if necessary, exactly as dar is able to do:

some_other_program | dar_xform -s 100M - backup_name

this will create backup_name.1.dar and so on. The resulting archive is totally compatible with those directly generated by dar. OK, you are happy, you can backup the local filesystem to a remote server through a secure socket session, in a full featured dar archive without using NFS. But, now you want to make a differential backup taking this archive as reference. How to do that? The simplest way is to use the new feature called "isolation", which extracts the catalogue from the archive and stores it in a little file. On the remote backup server you would type:

dar -A backup_name -C CAT_backup_name -z

Note that without -z option no compression is used as by default with dar for any operation, use -z option: it worse compressing an isolated catalogue.

if the isolated catalogue is too big to fit on a floppy [yes, this was written long ago :-)], you can split it as usually using dar:

dar -A backup_name -C CAT_backup_name -z -s 1440k

the generated archive (CAT_backup_name.1.dar, and so on), only contains the catalogue, but can still be used as reference for a new backup (or backup of the internal catalogue of the archive, using -x and -A at the same time). You just need to transfer it back to the local host, either using floppies or USB key,  through a secured socket session, or even directly isolating the catalogue to a pipe that goes from the remote host to the local host:

on remote host:
dar -A backup_name -C - -z | some_program

on local host:
some_other_program > CAT_backup_name.1.dar

or use dar_xform as previously if you need splitting :
some_other_program | dar_xform -s 1440k CAT_backup_name

then you can make your differential backup as usual:
dar -A CAT_backup_name -c - -z -R / | some_program

or if this time you prefer to save the archive locally:
dar -A CAT_backup_name -c backup_diff -z -R /

To read an archive from a remote host using a single pile, you will need to run dar with the --sequential-read option. In that so called sequential read mode, dar sequentially reads the archive contents from the beginning up to the end. This has some huge drawback when it comes to extract few files from a large archive especially when such large archive has to be sent over the network. For that reason let's see another method:

2 - Dual pipes

We have previously seen how to use a single pipe to generate an archive over the network, how to fetch an archive with a single pipe either to restore some files or to make a differential backup (but that way is very inefficient) and how to make differential backup by use of an isolated catalogue.

For differential backups instead of isolating the catalogue, it is also possible to read an archive or its extracted catalogue through pipes. Yes, two pipes are required for dar to be able to read an archive efficiently. The first goes from dar to the external program "dar_slave" and carries orders (asking some portions of the archive) while the other pipe goes from "dar_slave" back to "dar" and carries the requested data for reading.

By default, if you specify "-" as basename for -l, -t, -d, -x, or to -A (used with -C or -c), dar and dar_slave will use their standard input and output to communicate. Thus you need additional program to make the input of the first going to the output to the second, and vice versa. Warning: you cannot use named pipe that way, because dar and dar_slave would get blocked upon opening of the first named pipe, waiting for the peer to open it also, even before they have started (dead lock at shell level). For named pipes, there is -i and -o options that help, they receive a filename as argument, which may be a named pipe. The argument provided to -i is used instead of stdin and the one  provided to -o is used instead of stdout. Note that -i and -o options are only available if "-" is used as basename. Let's take an example:

You now want to restore an archive from your remote backup server. Thus on it you have to run dar_slave this way

on remote server:
some_prog | dar_slave backup_name | some_other_prog
or
dar_slave -o /tmp/pipe_todar -i /tmp/pipe_toslave backup_name

and on the local host you have to run dar this way:

some_prog | dar -x - -v ... | some_other_prog
or
dar -x - -i /tmp/pipe_todar -o /tmp/pipe_toslave -v ...

The order is not important: you can run dar or dar_slave first. What is important instead is to connect dar and dar_slave in a way that the output of the first goes to the input of the second and viceversa. An important point to note is also that communication support must be perfect: no data loss, no duplication, no order change, thus communication over TCP should be fine.

Of course, you can not only extract files (-x command as above) but also isolate a catalogue through pipes, test an archive, make difference, use as catalogue of reference and even then, output the resulting archive to pipe! If using -C or -c with "-" while using -A also with "-", it is then mandatory to use -o: The output catalogue will be generated on standard output, thus to send order to dar_slave you must use another channel thanks to -o option:

       LOCAL HOST                                   REMOTE HOST
   +-----------------+                     +-----------------------------+
   |   filesystem    |                     |     backup of reference     |
   |       |         |                     |            |                |
   |       |         |                     |            |                |
   |       V         |                     |            V                |
   |    +-----+      | backup of reference |      +-----------+          |
   |    | DAR |--<-]=========================[-<--| DAR_SLAVE |          |
   |    |     |-->-]=========================[->--|           |          |
   |    +-----+      | orders to dar_slave |      +-----------+          |
   |       |         |                     |      +-----------+          |
   |       +--->---]=========================[->--| DAR_XFORM |--> backup|
   |                 |        saved data   |      +-----------+ to slices|
   +-----------------+                     +-----------------------------+

on local host :
dar -c - -A - -i /tmp/pipe_todar -o /tmp/pipe_toslave | some_prog

on the remote host :

dar_slave -i /tmp/pipe_toslave -o /tmp/pipe_todar full_backup

dar_slave provides the full_backup for dar's -A option...

some_other_prog | dar_xform - diff -s 140M -p ...

...while dar_xform make slices of the output archive provided by dar

3 - Native SFTP and FTP support

Since release 2.6.0, you can use an URL-like archive basename. Assuming you have slices test.1.dar, test.2.dar ... available in the directory Archive of an FTP server you could read, extract, list, test, ... that archive using the following syntax:

dar -t ftp://login@ftp.server.some.where/Archive/test ... <other options>

Same thing with -l, -x, -A and -@ options. Note that you still need to provide the archive base name not a slice name. This option is also compatible with slicing and slice hashing, they will be generated on remote server:

dar -c sftp://login:password@secured.server.some.where/Archive/day2/incremental -A ftp://login@ftp.server.some.where/Archive/CAT_test --hash sha512 -@ sftp://login2:password2@secured.server.some.where/Archive/day2/CAT_incremental <other options>

By default if no password is given, dar asks the user interactively. If no login is used, dar assumes the login to be "anonymous". When you add the -afile-auth option, in absence of password on command-line, dar checks for a password in the file ~/.netrc for both FTP and SFTP protocols to avoid exposing password on command-line while still have non interactive backup. See man netrc for this common file's syntax.

 In the next paragraph you will find examples of use with netcat and ssh relying on the two first options for remote operation, but just before find a summary table of the pro and cons of the different remote server access methods:

Mode
network protocol
supported
slicing
slice hashing
efficient archive reading
Note
single pipe
any reliable one (*)
using dar_xform for both reading and writing
NO
NO
only efficient for archive creation
dual pipes
any reliable one (*)
using dar_xform for writing
and dar_slave for reading
NO
YES
efficient for archive reading and using archive as reference
dar native support
FTP and SFTP only
-s and -S options
YES
(--hash option)
YES
FTP is insecure but less CPU resources

(*) any protocol that is lossless, keep byte order and do not duplicate data



dar and ssh

As reported "DrMcCoy" in the historical forum "Dar Technical Questions", the netcat program can be very helpful if you plane to backup over the network. However netcat does not cipher the data over the network.

The context in which will take place the following examples are a "local" host named "flower" has to be backup or restored form/to a remote host called "honey" (OK, the name of the machines are silly...)

Example of use with netcat. Note that netcat command name is "nc"

Creating a full backup of "flower" saved on "honey"
on honey:
nc -l -p 5000 > backup.1.dar

then on flower:
dar -c - -R / -z | nc -w 3 honey 5000

but this will produce only one slice, instead you could use the following to have several slices on honey:

on honey:
nc -l -p 5000 | dar_xform -s 10M -S 5M -p - backup

on flower:
dar -c - -R / -z | nc -w 3 honey 5000

by the way note that dar_xform can also launch a user script between  slices exactly the same way as dar does, thanks to the -E and -F options.

Testing the archive
testing the archive can be done on honey but you could also do it remotely even if it is not very interesting doing it that way !

on honey:
nc -l -p 5000 | dar_slave backup | nc -l -p 5001

on flower:
nc -w 3 honey 5001 | dar -t - | nc -w 3 honey 5000

note also that dar_slave can run a script between slices, if for example you need to load slices from a robot, this can be done automatically, or if you just want to mount/unmount a removable media eject or load it and ask the user to change it ...

Comparing with original filesystem
on honey:
nc -l -p 5000 | dar_slave backup | nc -l -p 5001

on flower:
nc -w 3 honey 5001 | dar -d - -R / | nc -w 3 honey 5000

Making a differential backup
Here the problem is that dar needs two pipes to send orders and read data coming from dar_slave, and a third pipe to write out the new archive. This cannot be realized only with stdin and stdout as previously. Thus we will need a named pipe (created by the mkfifo command). 

on honey:
nc -l -p 5000 | dar_slave backup | nc -l -p 5001
nc -l -p 5002 | dar_xform -s 10M -p - diff_backup

on flower:
mkfifo toslave
nc -w 3 honey 5000 < toslave &
nc -w 3 honey 5001 | dar -A - -o toslave -c - -R / -z | nc -w 3 honey 5002


with netcat the data goes in clear over the network. You could use ssh instead if you want to have encryption over the network. The principle are the same.

Example of use with ssh

Creating full backup of "flower" saved on "honey"
we assume you have a sshd daemon on flower.
on honey:
ssh flower dar -c - -R / -z > backup.1.dar

or still on honey:
ssh flower dar -c - -R / -z | dar_xform -s 10M -S 5M -p - backup

Testing the archive
on honey:
dar -t backup

or from flower: (assuming you have a sshd daemon on honey)

ssh honey dar -t backup

Comparing with original filesystem
on flower:
mkfifo todar toslave
ssh honey dar_slave backup > todar < toslave &
dar -d - -R / -i todar -o toslave


Important. Depending on the shell you use, it may be necessary to invert the order in which "> todar" and "< toslave" are given on command line. The problem is that the shell hangs trying to open the pipes. Thanks to "/PeO" for his feedback.

or on honey:
mkfifo todar toslave
ssh flower dar -d - -R / > toslave < todar &
dar_slave -i toslave -o todar backup


Making a differential backup
on flower:
mkfifo todar toslave
ssh honey dar_slave backup > todar < toslave &

and on honey:
ssh flower dar -c - -A - -i todar -o toslave > diff_linux.1.dar
or
ssh flower dar -c - -A - -i todar -o toslave | dar_xform -s 10M -S 5M -p - diff_linux


Bytes, bits, kilo, mega etc.


you probably know a bit the metric system, where a dimension is expressed by a base unit (the meter for distance, the liter for volume, the Joule for energy, the Volt for electrical potential, the bar for pressure, the Watt for power, the second for time, etc.), and declined using prefixes:

      prefix (symbol) = ratio
    ================
deci  (d) = 0.1
centi (c) = 0.01
milli (m) = 0.001
micro (u) = 0.000,001 (symbol is not "u" but the "mu" Greek letter)
nano  (n) = 0.000,000,001
pico  (p) = 0.000,000,000,001
femto (f) = 0.000,000,000,000,001
atto  (a) = 0.000,000,000,000,000,001
zepto (z) = 0.000,000,000,000,000,000,001
yocto (y) = 0.000,000,000,000,000,000,000,001
deca (da) = 10
hecto (h) = 100
kilo  (k) = 1,000  (yes, this is a lower case letter, not an upper case! Uppercase letter 'K' is the Kelvin: temperature unit)
mega  (M) = 1,000,000
giga  (G) = 1,000,000,000
tera  (T) = 1,000,000,000,000
peta  (P) = 1,000,000,000,000,000
exa   (E) = 1,000,000,000,000,000,000
zetta (Z) = 1,000,000,000,000,000,000,000
yotta (Y) = 1,000,000,000,000,000,000,000,000

This way two milliseconds (noted "2 ms") are 0.002 second, and 5 kilometers (noted "5 km") are 5,000 meters. All was fine and nice up to the recent time when computer science appeared: In that discipline, the need to measure the size of information storage raised. The smallest size, is the bit (contraction of binary digit), binary because it has two possible states: "0" and "1". Grouping bits by 8 computer scientists called it a byte or an octet. A byte has 256 different states, (2 power 8). The ASCII (American Standard Code for Information Interchange) code arrived and assigned a letter or more generally a character to some well defined values of a byte, (A is assigned to 65, space to 32, etc). And as most text is composed of a set of character, they started to count size in byte. Time after time, following technology evolution, memory size approached 1000 bytes.

But as memory is accessed through a bus which is a fixed number of cables (or integrated circuits), on which only two possible voltages are authorized (to mean 0 or 1), the total amount of byte that a bus can address is always a power of 2. With a two cable bus, you can have 4 values (00, 01, 10 and 11, where a digit is the state of a cable) so you can address 4 bytes. Giving a value to each cable defines an address to read or write in the memory. Unfortunately 1000 is not a power of 2 and approaching 1000 bytes, was decided that a "kilobyte" would be 1024 bytes which is 2 power 10. Some time after, and by extension, a megabyte has been defined to be 1024 kilobytes, a terabyte to be 1024 megabytes, etc. at the exception of the 1.44 MB floppy where here the capacity is 1440 kilobytes thus here "mega" means 1000 kilo...

In parallel, in the telecommunications domain, going from analogical to digital signal made the bit to be used also. In place of the analogical signal, took place a flow of bits, representing the samples of the original signal. For telecommunications the problem was more a problem of size of flow: how much bit could be transmitted by second. At some ancient time appeared the 1200 bit by second, then 64000, also designed as 64 kbit/s. Thus here, kilo stays in the usual meaning of 1000 time the base unit. You can also find Ethernet 10 Mbit/s which is 10,000,000 bits by seconds, same thing with Token-Ring that had rates at 4, 16 or 100 Mbit/s (4,000,000 16,000,000 or 100,000,000 bits/s). But, even for telecommunications, kilo is not always 1000 times the base unit: the E1 bandwidth at 2Mbit/s for example, is in fact 32*64kbit/s thus 2048 kbit/s ... not 2000 kbit/s

Anyway, back to dar, you have to possibility to give the size in byte or using a single letter as suffix (k, M, T, P, E, Z, Y, the base unit being implicitely the byte) thus the possibility to provide a size in kilo, mega, tera, peta, exa, zetta or yotta byte, with the computer science definition of these terms (power of 1024) by default.

These suffixes are for simplicity and to not have to compute how much make powers of 1024. For example, if you want to fill a CD-R you will have to use the "-s 650M" option which is equivalent to "-s 6815744400", choose the one you prefer, the result is the same :-). Now, if you want 2 Megabytes slices in the sense of the metric system, simply use "-s 2000000" or read below:

Starting version 2.2.0, you can alter the meaning of all the suffixes used by dar, using the following option.

--alter=SI-units

(which can be shorten to -aSI or -asi) It changes the meaning of the prefixes that follow on the command-line, to follow the metric system (or System International) way of counting, up to the end of the line or to a

--alter=binary-units

arguments (which can be shortened to -abinary), after which we are back to the computer science meaning of kilo, mega, etc. up to the end of the line or up to a next --alter=SI-units. Thus in place of -s 2000000 one could use:

   -aSI -s 2M


Yes, and to make things more confuse, marketing/sales arrived and made sellers count gigabits a third way: I remember some  time ago, I bought a hard disk which was described as "2.1 GB", (OK, that's now long ago!), but in fact it had only 2097152 bytes available. This is far from 2202009 bytes (= 2.1 GiB for computer science meaning), and a bit more than 2,000,000 bytes (metric system). OK, if it had these 2202009 bytes (computer science meaning of 2.1 GB), this hard disk would have been sold under the label "2.5 GB"! ... just kidding :-)

Note that to distinguish kilo, mega, tera and so on, new abbreviations are officially defined, but are not used within dar:
ki = 1024
Mi = 1024*1024
GiB = and so on...
Ti
Pi
Ei
Zi
Yi

For example, we have 1 kiB for 1 kilobytes (= 1024 bytes), and 1 kibit for 1 kilobits (= 1024 bits) and 1 kB (= 1000 Bytes) and 1 kbit (= 1000 bits), ...



Running DAR in background


DAR can be run in background:

dar [command-line arguments] < /dev/null &



Files' extension used

dar suite programs use several type of files:
  • slices (dar, dar_xform, dar_slave, dar_manager)
  • configuration files (dar, dar_xform, dar_slave)
  • databases  (dar_manager)
  • user commands for slices (dar, dar_xform, dar_slave, using -E, -F or -~ options)
  • user commands for files (dar only, during the backup process using -= option)
  • filter lists (dar's -[ and -] options)
If for slice the extension and even the filename format cannot be customized, (basename.slicenumber.dar) there is not mandatory rule for the other type of files.

In the case you have no idea how to name these, here is the extensions I use:
"*.dcf": Dar Configuration file, aka DCF files (used with dar's -B option)
"*.dmd": Dar Manager Database, aka DMD files (used with dar_manager's -B and -C options)
"*.duc": Dar User Command, aka DUC files (used with dar's -E, -F, -~ options)
"*.dbp": Dar Backup Preparation, aka DBP files (used with dar's -= option)
"*.dfl": Dar Filter List, aka DFL files (used with dar's -[ or -] options)

but, you are totally free to use the filename you want !   ;-)




Running command or scripts from DAR


You can run command from dar at two different places:
  • when dar has finished writing a slice only in backup, isolation or merging modes, or before dar needs a slice (DUC files), in reading mode (testing, diffing, extracting, ...) and when reading an archive of reference.
  • before and after saving a given file during the backup process (DBP files)

A - Between slices:

This concerns -E, -F and -~ options. They all receive a string as argument. Thus, if the argument must be a command with its own arguments, you have to put these between quotes for they appear as a single string to the shell that interprets the dar command-line. For example if you want to call

df .

[This is two worlds: "df" (the command) and "." its argument] then you have to use the following on DAR command-line:

-E "df ."
or
-E 'df .'


DAR provides several substitution strings in that context:
  • %% is replaced by a single % Thus if you need a % in you command line you MUST replace it by %% in the argument string of -E, -F  or -~
  • %p is replaced by the path to the slices
  • %b is replaced by the basename of the slices
  • %n is replaced by the number of the slice
  • %N is replaced by the number of the slice with padded zeros (it may differ from %n only when --min-digits option is used)
  • %c is replaced by the context replaced by "operation", "init" or "last_slice" depending on the context.
The number of the slice (%n) is either the just written slice or the next slice to be read. For example if you create an new archive (either using -c, -C or -+), in -E option, the %n macro is the number of the last slice completed. Else (using -t, -d, -A (with -c or -C), -l or -x), this is the number of the slice that will be required very soon. While %c (the context) is substituted by "init", "operation" or "last_slice".

  • init : when the slice is asked before the catalogue is read
  • operation : once the catalogue is read and/or data treatment has begun.
  • last_slice : when the last slice has been written (archive creation only)

What the use of this feature? For example you want to burn the brand-new slices on CD as soon as they are  available.

let's build a little script for that:

%cat burner
#!/bin/bash

if [ "$1" == "" -o "$2" == "" ] ; then
  echo "usage: $0 <filename> <number>"
  exit 1
fi

mkdir T
mv $1 T
mkisofs -o /tmp/image.iso -r -J -V "archive_$2" T
cdrecord dev=0,0 speed=8 -data /tmp/image.iso
rm /tmp/image.iso
# Now assuming an automount will mount the just newly burnt CD:
if diff /mnt/cdrom/$1 T/$1 ; then
  rm -rf T
else
  exit 2
endif
%

This little script, receive the slice filename, and its number as argument, what it does is to burn a CD with it, and compare the resulting CD with the original slice. Upon failure, the script return 2 (or 1 if syntax is not correct on the command-line). Note that this script is only here for illustration, there are many more interesting user scripts made by several dar users. These are available in the examples part of the documentation.

One could then use it this way:

-E "./burner %p/%b.%n.dar %n"

which can lead to the following DAR command-line:

dar -c ~/tmp/example -z -R / usr/local -s 650M -E "./burner %p/%b.%n.dar %n" -p

First note that as our script does not change CD from the device, we need to pause between slices (-p option). The pause take place after the execution of the command (-E option). Thus we could add in the script a command to send a mail or play a music to inform us that the slice is burned. The advantage, here is that we don't have to come twice by slices, once the slice is ready, and once the slice is burnt.

Another example:

you want to send a huge file by email. (OK that's better to use FTP, but sometimes, people think than the less you can do the more they control you, and thus they disable many services, either by fear of the unknown, either by stupidity). So let's suppose that you only have mail available to transfer your data:

dar -c toto -s 2M my_huge_file -E "uuencode %b.%n.dar %b.%n.dar | mail -s 'slice %n' your@email.address ; rm %b.%n.dar ; sleep 300"

Here we make an archive with slices of 2 Megabytes, because our mail system does not allow larger emails. We save only one file: "my_huge_file" (but we could even save the whole filesystem it would also work). The command we execute each time a slice is ready is:

  1. uuencode the file and send the output my email to our address.
  2. remove the slice
  3. wait 5 minutes, to no overload too much the mail system, This is also
  4. useful, if you have a small mailbox, from which it takes time to retrieve mail.
Note that we did not used the %p substitution string, as the slices are saved in the current directory.

Last example, is while extracting: in the case the slices cannot all be present in the filesystem, you need a script or a command to fetch the next to be requested slice. It could be using ftp, lynx, ssh, etc. I let you do the script as an exercise. :-). Note, if you plan to share your DUC files, thanks to use the convention fo DUC files.

B - Before and after saving a file:

This concerns the -=, -< and -> options. The -< (include) and -> (exclude) options, let you define which file will need a command to be run before and after their backup. While the -= option, let you define which command to run for those files.

Let's suppose you have a very large file changing often that is located in /home/my/big/file, and several databases that each consist of several files under /home/*/database/data that need to have a coherent status and are also changing very often.

Saving them without precaution, will most probably make your big file flagged as "dirty" in dar's archive, which means that the saved status of the file may be a status that never existed for that file: when dar saves a file it reads the first byte, then the second, etc. up to the end of file. While dar is reading the middle of the file, an application may change the very begin and then the very end of that file, but only modified ending of that file will be saved, leading the archive to contain a copy of the file in a state it never had.

For a database this is even worse, two or more files may need to have a coherent status. If dar saves one first file while another file is modified at the same time, this will not lead having the currently saved files flagged as "dirty", but may lead the database to have its files saved in incoherent states between them, thus leading you to have saved the database in a corrupted state.

For that situation not to occur, we will use the following options:

-R / "-<" home/my/big/file  "-<" "home/*/database/data"

First, you must pay attention to quote the -< and -> options for the shell not to consider you ask for redirection to stdout or from stdin. Back to the example, that says that for the files /home/my/big/file and for any "database/data" directory (or file) in the home directory of a user, a command will be run before and after saving that directory of file. We need thus to define the command to run using the following option:

-= "/root/scripts/before_after_backup.sh %f %p %c"

Well as you see, here too we may (and should) use substitutions macro:
  • %% is replaced by a litteral  %
  • %p is replaced by the full path (including filename) of the file/directory to be saved
  • %f is replaced by the filename (without path) of the file/directory to be saved
  • %u is the uid of the file's owner
  • %h is the gid of the file's owner
  • %c is replaced by the context, which is either "start" or "end" depending on whether the file/directory is about to be saved or has been completely saved.

 And our script here could look like this:

cat /root/scripts/before_after_backup.sh
#!/bin/sh


if [ "$1" == "" ]; then
   echo "usage: $0 <filename> <dir+filename> <context>"
   exit 1
fi

# for better readability:
filename="$1"
path_file="$2"
context="$3"

if [ "$filename" = "data" ]; then
   if ["$context" = "start" ]; then
       # action to stop the database located in "$2"
   else
       # action to restart the database located in "$2"
   fi
else
   if ["$path_file" = "/home/my/big/file"]; then
     if ["$context" = "start" ]; then
       # suspend the application that writes to that file
     else
       # resume the application that writes to that file
     fi
   else
     # do nothing, or warn that no action is defined for that file
fi


So now, if we run dar with all these command, dar will execute our script once before entering any database/data directory located in a home directory of some user, and once all files of that directory will have been saved. It will run our script also before and after saving our /home/my/big/file file.

If you plan to share your DBP files, thanks to use the DBP convention.



Convention for DUC files

Since version 1.2.0 dar's user can have dar calling a command or scripts between slices, thanks to the -E,  -F and -~ options, called DUC files. To be able to easily share your DUC commands or scripts, I propose you the following convention:

- use the ".duc" extension to show anyone the script/command respect the following
- must be called from dar with the following arguments:

example.duc %p %b %n %e %c [other optional arguments]

- when called without argument, it must provide brief help on what it does and what are the expected arguments. This is the standard "usage:" convention.

Then, any user, could share their DUC files and don't bother much about how to use them. Moreover it would be easy to chain them:

if for example two persons created their own script, one "burn.duc" which burns a slice onDVD-R(W) and "par.duc" which makes a Parchive redundancy file from a slice, anybody could use both at a time giving the following argument to dar:

-E "par.duc %p %b %n %e %c 1 ; burn.duc %p %b %n %e %c"

or since version 2.1.0 with the following argument:

-E "par.duc %p %b %n %e %c 1" -E "burn.duc %p %b %n %e %c"

of course a script has not to use all its arguments, in the case of burn.duc for example, the %c (context) is probably useless, and not used inside the script, while it is still possible to give it all the "normal" arguments of a DUC file, extra not used argument are simply ignored.

If you have interesting DUC scripts, you are welcome to contact me by email, for I add them on the web site and in the following releases. For now, check doc/samples directory for a few examples of DUC files.

Note that all DUC scripts are expected to return a exit status of zero meaning that the operation has succeeded. If another exit status has been returned, dar asks the user for decision (or aborts if no user has been identified, for example, dar is not ran under a controlling terminal).



Convention for DBP files

Same as above, the following convention is proposed to ease the sharing of Dar Backup Preparation files:

- use the ".dbp" extension to show anyone the script/command respect the following
- must be called from dar with the following arguments:

example.dbp %p %f %u %g %c [other optional arguments]

- when called without argument, it must provide brief help on what it does and what are the expected arguments. This is the standard "usage:" convention.

Identically to DUC files, DBP files are expected to return a exist status of zero, else the backup process is suspended for the user to decide wether to retry, ignore the failure or abort the whole backup process.



User targets in DCF

Since release 2.4.0, a DCF file (on given to -B option) can contain user targets. A user target is an extention of the conditional syntax. So we will first make a brief review on conditional syntax.

Conditional syntax in DCF files:

The conditional syntax gives the possiblility to have options in a DCF file that are only active in a certain context:
  • archive extraction
  • archive creation
  • archive listing
  • archive testing
  • archive isolation
  • archive merging
  • no action yet defined
  • all context
  • when a archive of reference is used
  • when an auxilliary archive of reference is used
These works with the following reserved keywords (see dar's man page for an exhaustive list). Let's take an example:

cat sample.dcf
# this is a comment

all:
-K aes:

extract:
-R /

reference:
-J aes: 

auxilliary:
-~ aes:

create:
-ac
-Z "*.mp3"
-Z "*.avi"

default:
-V

This way, the -Z options are only used when creating an archive, while the -K option is used in any case. Well, now that we have briefly review the conditional syntax, you may have guess that new "targets" (or keywords) if you prefer can be added. Let's add the following in our DCF file:

compress:
-z lzo:5

In the usual situation all that follows the target "compress" up to the next target or the end of the file will not be used to configure dar, unless you provide the "compress" keyword on command-line:

dar -c test -B sample.dcf compress

Which will do exactly the same as if you have typed:

dar -c test -z lzo:5

Of course, you can use as many user target as you wish in your files, the only constraint is that it must not have the name of the reserved keyword of a conditional syntax, but you can also mix conditional syntax and user targets. Here follows an example:

cat sample.dcf
# this is a comment

all:
-K aes:

extract:
-R /

reference:
-J aes: 

auxilliary:
-~ aes:

create:
-ac
-Z "*.mp3"
-Z "*.avi"

default:
-V

# our first user target named "compress":
compress:
-z lzo:5

# a second user target named "verbose":
verbose:
-v
-vs

# a third user target named "ring":
ring:
-b

# a last user target named "hash":
--hash sha1

So now, you can use dar and ctivate a set of commands by simply adding the name of the target on command-line:

dar -c test -B sample.dcf compress ring verbose hash

which is equivalent to:

dar -c test -K aes:
-ac -Z "*.mp3" -Z "*.avi" -z lzo:5 -v -vs -b --hash sha1

Last for those that like complicated things, you can recusively use DCF inside user targets, which may contain conditional syntax and the same or some other user targets of you own.





 Using data protection with DAR & Parchive

Parchive (PAR in the following) is a very nice program that makes possible to recover a file which has been corrupted. It creates redundancy data stored in a separated file (or set of files), which can be used to repair the original file. This additional data may also be damaged, PAR will be able to repair the original file as well as the redundancy files, up to a certain point, of course. This point is defined by the percentage of redundancy you defined for a given file. But,... check the official PAR site here:

         http://parchive.sourceforge.net (original site no more maintained today)
         https://github.com/BlackIkeEagle/par2cmdline (fork from the official site maintained since decembre 2013)

Since version 2.4.0, dar is provided with a default /etc/darrc file. It contains a set of user targets among which is "par2". This user target invokes the dar_par.dcf file provided beside dar that automatically creates parity file for each slice during backup and verifies and if necessary repaires slices when testing an archive. So now you only need to use dar this way to activate Parchive with dar:

dar [options] par2

Simple no?



Examples of file filtering

File filtering is what defines which files are saved, listed, restored, compared, tested, and so on. In brief, in the following we will say which file are elected for the operated, meaning by "operation", either a backup, a restoration, an archive contents listing, an archive comparison, etc.

File filtering is done using the following options -X, -I, -P, -R, -[,  -] or -g.

OK, Let's start with some concretes examples:

dar -c toto

this will backup the current directory and all what is located into it to build the toto archive, also located in the current directory. Usually you should get a warning telling you that you are about to backup the archive itself

Now let's see something less obvious:

dar -c toto -R / -g home/ftp

the -R option tell dar to consider all file under the / root directory, while the -g "home/ftp" argument tells dar to restrict the operation only on the home/ftp subdirectory of the given root directory thus here /home/ftp.

But this is a little bit different from the following:

dar -c toto -R /home/ftp

here dar will save any file under /home/ftp without any restriction. So what is the difference? Yes, exactly the same files will be saved as just above, but the file /home/ftp/welcome.msg for example, will be stored as <ROOT>/welcome.msg . Where <ROOT> will be replaced by the argument given to -R option (which defaults to "."), at restoration or comparison time. While in the previous example the same file would have been stored with the following path <ROOT>/home/ftp/welcome.msg .

dar -c toto -R / -P home/ftp/pub -g home/ftp -g etc

as previously, but the -P option make all files under the /home/ftp/pub not to be considered for the operation. Additionally the /etc directory and its subdirectories are saved.

dar -c toto -R / -P etc/password -g etc

here we save all the /etc except the /etc/password file. Arguments given to -P can be plain files also. But when they are directory this exclusion applies to the directory itself and its contents. Note that using -X to exclude "password" does have the same effect:

dar -c toto -R / -X "password" -g etc

will save all the /etc directory except any file with name equal to "password". thus of course /etc/password will no be saved, but if it exists, /etc/rc.d/password will not be saved neither if it is not a directory. Yes, if a directory /etc/rc.d/password exist, it will not be affected by the -X option. As well as -I option, -X option do not apply to directories. The reason is to be able to filter some kind of file without excluding a particular directory for example you want to save all mp3 files and only MP3 files,

dar -c toto -R / -I "*.mp3" -I "*.MP3" home/ftp

will save any mp3 or MP3 ending files under the /home/ftp directories and subdirectories. If instead -I (or -X) applied to directories, we would only be able to recurse in subdirectories ending by ".mp3" or ".MP3". If you had a directory named "/home/ftp/Music" for example, full of mp3, you would not have been able to save it.

Note that the glob expressions (where comes the shell-like wild-card '*' '?' and so on), can do much more complicated things like "*.[mM][pP]3". You could thus replace the previous example by:

dar -c toto -R / -I "*.[mM][pP]3" home/ftp

this would cover all .mp3 .mP3 .Mp3 and .MP3 files. One step further, the -acase option makes following filtering arguments become case sensitive (which is the default), while the -ano-case (alias -an in short) set to case insensitive mode filters arguments that follows it. In shorter we could have:

dar -c toto -R / -an -I "*.mp3' home/ftp

And, instead of using glob expression, you can use regular expressions (regex) using the -aregex option. You can also use alternatively both of them using -aglob to return back to glob expressions. Each option -aregex / -aglob define the expected type of expression in the -I/-X/-P/-g/-u/-U/-Z/-Y options that follows, up to end of line or to the next -aregex / -aglob option.

Last a more complete example:

dar -c toto -R / -P "*/.mozilla/*/[Cc]ache" -X ".*~" -X ".*~" -I "*.[Mm][pP][123]" -g home/ftp -g "fake"

so what ?

OK, here we save all under /home/ftp and /fake but we do not save the contents of "*/.mozilla/*/[Cc]ache" like for example "/home/ftp/.mozilla/ftp/abcd.slt/Cache" directory and its contents. In these directories we save any file matching "*.[Mm][pP][123]" files except those ending by a tilde (~ character), Thus for example file which name is "toto.mp3" or ".bloup.Mp2"

Now the inside algorithm:

 a file is elected for operation if
 1 - its name does not match any -X option or it is a directory
*and*
 2 - if some -I is given, file is either a directory or match at least one of the -I option given.
*and*
 3 - path and filename do not match any -P option
*and*
 4 - if some -g options are given, the path to the file matches at least one of the -g options.

The algorithm we detailed above is the default one, which is historical and called the unordered method, since version 2.2.x there is also an ordered method (activated adding -am option) which gives even more power to filters, the dar man mage will give you all the details.

In parallel of file filtering, you will find Extended Attributes filtering thanks to the -u and -U options (they work the same as -X and -I option but apply to EA), you will also find the file compression filtering (-Z and -Y options) that defines which file to compress or to not compress, here too the way they work is the same as seen with -X and -I options, the -ano-case / -acase options do also apply here, as well as the -am option. Last all these filtering (file, EA, compression) can also use regular expression in place of glob expression (thanks to the -ag / -ar options).

Note in very last point, that the --backup-hook-include and --backup-hook-exclude options act the same as -P and -g options but apply to the files about to be saved and provides to the user the possibility to perform an action (--backup-hook-execute) before and after saving files matching the masks options. The dar man page will give you all the necessary details to use this new feature.




Decremental Backup


Well, you have already heard about "Full" backup, in which all files are completely saved in such a way that let you use this backup alone to completely restore your data. You have also probably heard about "differential" backup in which is only stored the changes that occurred since an archive of reference was made. There is also the "incremental" backup, which in substance, is the same as "differential" ones. The difference resides in the nature of the archive of reference: "Differential" backup use only a "full" backup as reference, while "incremental" may use a "full" backup, a "differential" backup or another "incremental" backup as reference (Well, in dar's documentation the term "differential" is commonly used in place of "incremental", since there is no conceptual difference from the point of view of  dar software).

Well, here we will describe what is meant by "decremental" backup. All started by a feature request from Yuraukar on dar-support mailing-list:

In the full/differential backup scheme, for a given file, you have as many versions as changes that were detected from backup to backup. That's fair in terms of storage space required, as you do not store twice the same file in the same state, which you would do if you were doing only full backups. But the drawback is that you do not know by advance in which backup to find the latest version of a given file. Another drawback comes when you want to restore your entire system to the latest state available from your backup set, you need to restore the most ancient backup (the latest full backup), then the others one by one in chronological order (the incremental/differential backups). This may take some time, yes. This is moreover inefficient, because, you will restore N old revisions of a file that have changed often before restoring the last and more recent version.

Yuraukar idea was to have all latest versions of files in the latest backup done. Thus the most recent archive would always stay a full backup. But, to still be able to restore a file in an older state than the most recent (in case of accidental suppression), we need a so called decremental backup. This backup's archive of reference is in the future (a more recent decremental backup or the latest backup done, which is a full backup in this scheme). This so called "decremental" backup stores all the file differences from this archive of reference that let you get from the reference state to an older state.

Assuming this is most probable to restore the latest version of a filesystem than any older state available, decremental backup seem an interesting alternative to incremental backups, as in that case you only have to use one archive (the latest) and each file get restored only once (old data do not get overwritten at each archive restoration as it is the case with incremental restoration).

Let's take an example: We have 4 files in the system named f1, f2, f3 and f4. We make backups at four different times t1, t2, t3 and t4 in chronological order. We will also perform some changes in filesystem along this period: f1 has will be removed from the system between t3 and t4, while f4 will only appear between t3 and t4. f2 will be modified between t2 and t3 while f3 will be changed between t3 and t4.

All this can be represented this way, where lines are the state at a given date while each column represents a given file.
 
time
   ^
   |                       * represents the version 1 of a file
t4 +         #    #    *   # represents the version 2 of a file
   |
t3 +    *    #    *  
   |
t2 +    *    *    *
   |
t1 +    *    *    *
   |
   +----+----+----+----+---
        f1   f2   f3   f4  


Now we will represent the contents of backups at these different times, first using only full backup, then using incremental backups and at last using decremental backups. We will use the symbol 'O' in place of data if a given file's data is not stored in the archive because it has not changed since the archive of reference was made. We will also use an 'x' to represent the information that a given file has been recorded in an archive as deleted since the archive of reference was made. This information is used at restoration time to remove a file from filesystem to be able to get the exact state of files seen at the date the backup was made.

FULL BACKUPS

   ^
   |
t4 +         #    #    *           
   |
t3 +    *    #    *  
   |
t2 +    *    *    *
   |
t1 +    *    *    *
   |
   +----+----+----+----+---
        f1   f2   f3   f4  

Yes, this is easy, each backup contains all the files that existed at the time the backup was made. To restore in the state the system had at a given date, we only use one backup, which is the one that best corresponds to the date we want. The drawback is that we saved three time the file f1 an f3 version 1, and twice f2 version 2, which correspond to a waste of storage space.


FULL/INCREMENTAL BACKUPS


   ^
   |
t4 +    x    0    #    *     0 represents a file which only state is recorded
   |                         as such, no data is stored in the archive
t3 +    0    #    0          very little space is consummed by such entry
   |
t2 +    0    0    0          x represents an entry telling that the corresponding
   |                         file has to be removed
t1 +    *    *    *
   |
   +----+----+----+----+---
        f1   f2   f3   f4  

Now we see that archive done at date 't2' does not contain any data as no changed have been detected between t1 and t2. This backup is quite small and needs only little storage. Archive at t3 date only stores f2's new version, and at t4 the archive stores f4 new file and f3's new version. We also see that f1 is marked as removed from filesystem since date t3 as it no longer existing in filesystem but existed in the archive of reference done at t3.

As you see, restoring to the latest state is more complicated compared to only using full backups, it is neither simple to know in which backup to took for a given file's data at date t3 for example, but yes, we do not waste storage space anymore. The restoration process the user has to follow is to restore in turn:
- archive done at t1, which will put old version of files and restore f1 that have been removed at t4
- archive done at t2, that will do nothing at all
- archive done at t3, that will replace f2's old version by its new one
- archive done at t4, that will remove f1, add f4 and replace f3's old version to by its latest version.

The latest version of files is scattered over the two last archives here, but in common systems, much of the data does not change at all and can only be found in the first backup (the full backup).

FULL/DECREMENTAL BACKUP

Here is represented the contents of backups using decremental approach. The most recent (t4) backup is always a full backup. Older backups are decremental backups based on the just more recent one (t3 is a difference based on t4, t1 is a difference based on t2). At the opposit of incremental backups, the reference of the archive is in the future not in the past.

   ^
   |
t4 +         #    #    *           
   |
t3 +    *    0    *    x
   |
t2 +    0    *    0
   |
t1 +    0    0    0
   |
   +----+----+----+----+---
        f1   f2   f3   f4  

Thus obtaining the latest version of the system is as easy as done using only full backups. And you also see that the space required to store these decremental backup is equivalent to what is needed to store the incremental backups. However, still the problem exist to locate the archive in which to find a given's file data at a given date. But also, you may see that backup done at time t1 can safely be removed as it became useless because it does not store any data, and loosing archive done at t1 and t2 is not a big problem, you just loose old state data.

Now if we want to restore the filesystem in the state it has at time t3, we have to restore archive done at t4 then restore archive done at t3. This last step will have the consequences to create f1, replace f3 by its older version and delete f4 which did not exist at time t3 (file which is maked 'x' meaning that it has to be removed). if we want to go further in the past, we will restore the decremental backup t2 which will only replace f2's new version by the older version 1. Last restoring t1 will have no effect as no changed were made between t1 and t2.

What about dar_manager? Well, in nature, there is no difference between an incremental backup and a differential/incremental backup. The only difference resided in the way (the order) they have to be used.

So, even if you can add decremental backups in a dar_manager database, it is not designed to handle them correctly. It is thus better to keep dar_manager only for incremental/differential/full backups.

But how to get buitd decremental backup as the reference is in the future and does not exist yet?

Assuming you have a full backup describing your system at date t1, can we have in one shot both the new full backup for time t2 and also transform the full backup of time t1 into a decremental backup relative to time t2? In theory, yes. But there is a risk in case of failure (filesystem full, lack of electric power, bug, ...): you may loose both backups, the one which was under construction as well as the one we took as reference and which was under process of transformaton to decremental backup.

Seen this, the libdar implementation is to let the user do a normal full backup at each step [Doing just a differential backup sounds better at first, but this would end in more archive manipulation, as we would have to generate both decremental and new full backup, and we would manipulate at least the same amount of data]. Then with the two full backups the user would have to use archive merging to create the decremental backup using -ad option. Last, once the resulting (decremental) archive have been tested and that the user is sure this decremental backup is viable, he can remove the older full backup and store the new decremental backup beside older ones and the new full backup. This at last only, will save you disk space and let you easily recover you system using the latest (full) backup.

Can one use an extracted catalogue instead of the old full backup to perform a decremental backup? No. The full backup to transform must have the whole data in it to be able to create a decremental back with data in it. Only the new full backup can be replaced by its extracted catalogue.

This last part about decremental backup is extracted from a discussion with Dan Masson on dar-support mailing-list:
Seriously, stop that long-winded considerations! Tell me what I need to do!

OK, OK, ... First start by a full backup to initiate the cycle:

dar -c /mnt/backup/FULL-2015-04-10 -R / -z -g /mnt/backup -D

Then at each new cycle:
Do a new full backup

dar -c /mnt/backup/FULL-2015-04-11 -R / -z -g /mnt/backup -D

To save space, build a decremental backup from the previous full one:

dar -+ /mnt/backup/DECR-2015-04-10 -A /mnt/backup/FULL-2015-04-10 -@ /mnt/backup/FULL-2015-04-11 -ad -ak

By precaution test that the decremental archive is viable

dar -t /mnt/backup/DECR-2015-04-10

Then make space by removing the old full backup

rm /mnt/backup/FULL-2015-04-10.*.dar

Assuming you run this cycle each day, you get the following at each
new step/day:

The 2015-04-10 you have:
    FULL-2015-04-10


The 2015-04-11 you have:
    FULL-2015-04-11

    DECR-2015-04-10

The 2015-04-12 you have:
    FULL-2015-04-12

    DECR-2015-04-11
    DECR-2015-04-10

The 2015-04-13 you have:
    FULL-2015-04-13

    DECR-2015-04-12
    DECR-2015-04-11
    DECR-2015-04-10

and so on.

So yes, <decremental backup t2> is different from <old full backup t2> The differences are:
  1. <old full backup2 t2> (here FULL-2015-04-10 or FULL-2015-04-11 for example) contains all files of your system.
  2. <decremental backup t2> contains only files that have changed between two consecutive full backups: DECR-2015-04-10 contains all files and only those that have changed between 2015-04-10 and 2015-04-11, DECR-2015-04-11 contains all files and only those that have changed between 2015-04-11 and 2015-04-12, and so on.
So, what?

Scenario 1: today 2015-04-17 you have lost your system, you want to restore it as it was at the time of the last backup => use the last backup it is a full one, it is the latest backup, nothing more.

dar -x /mnt/backup/FULL-2015-04-16 -R /

Scenario 2: today 2015-04-17 you have lost your system due to a virus or your system had been compromised and you know it started the 2015-04-12 so you want to restore your system at the time of 2015-04-11. First, restore the last full archive (FULL-2015-04-16) then in reverse order all the decremental ones: DECR-2015-04-15 then DECR-2015-04-14, then DECR-2015-04-13, then DECR-2015-04-12 then DECR-2015-04-11. The decremental backup are small, their restoration is usually quick (depending on how much files changed in the day). So here we get in the exact same situation you would have reach restoring only FULL-2015-04-11, but you did not not have to store all the full backups, just the latest.

You want the command-line for that? sure:

dar -x /mnt/backup/FULL-2015-04-16 -R /
dar -x /mnt/backup/DECR-2015-04-15 -R / -w
dar -x /mnt/backup/DECR-2015-04-14 -R / -w
dar -x /mnt/backup/DECR-2015-04-13 -R / -w
dar -x /mnt/backup/DECR-2015-04-12 -R / -w
dar -x /mnt/backup/DECR-2015-04-11 -R / -w

Is that serious?

Yes! :)


Door inodes (Solaris)

A door inode is a dynamic object that is created on top of an empty file, it does exist only when a process has a reference to it, it is thus not possible to restore it. But the empty file it is mounted on can be restored instead. As such, dar restores an door inode with an empty file having the same parameters as the door inode.

If an door inode is hard linked several times in the file system dar will restore a plain file having as much hard links to the corresponding locations.

Dar is also able to handle Extended Attributes associated to a door file, if any. Last, if you list an archive containing door inodes, you will see the 'D' letter as their type (by opposition to 'd' for directories), this is conform to what the 'ls' command displays for such entries.


How to use "delta compression", "binary diff" or "rsync like increment" with dar

Terminology

"delta compression", "binary diff" or "rsync increment" all point to the same feature: a way to avoid resaving a whole file during a differential/incremental backup and only save the modified part of it instead. This solution is of course interesting for large files that change often but only for little parts of them (Microsoft exchange mailboxes, for example). Dar implements this feature relying on librsync library, feature which we will call binary delta in the following

Librsync specific concepts

Before looking at the way to use dar, several concepts from librsync have to be known:

In order to make a binary delta of a file "foo" which at time t1 contained data F1 and at time t2 containted data F2, librsync requires first that a "delta signature" be made against F1. Then using that signature and data F2, librsync is able to build a delta patch P1 that, if applied to F1 will provide content F2:

          backing up file "foo"
              |
              V
time t1  content = F1    ---------> delta signature of F1
              |                              |
              |                              |
              |                              +------------->  )       building delta patch "P1"
              V                                               )---->  containing the difference
time t2  content = F2    ---------------------------------->  )       from F1 to F2

At restoration time dar has then first to restore F1, from a full backup or from a previous differential backup, then using librsync applying the patch "P1" to modify F1 into F2.

         restoring file "foo"
              |
              V
time t3  content = F1    <---  from a previous backup
              |                             
              +------>--------------->----------------+
              .                                       |
              .                                       V
              .                                       + <----- applying patch "P1"
              .                                       |
              +-----<---------------<-------------<---+
              |
              V

time t4  content = F2

Usage with dar

First, delta signature is not activated by default, you have to tell dar you want to generate delta signature using the --delta sig option at archive creation/isolation/merging time. Then as soon as a file has a delta signature in the archive of reference, dar will perform a delta binary if such file has changed since the archive of reference was done. But better an example than a long explanation:

Case of a differential backup scheme:

First, doing a full backup, we add the --delta sig option for the resulting archive to contain the necessary signatures to be provided to librsync later on in order to setup delta patches. This has the drawback of additional space requirement but the advantage of space economy at incremental/differential backups:

 dar -c full -R / -z --delta sig
 
Then there is nothing more specific to delta signature, this is the same way as you were used to do with previous releases of dar: you just need to rely on a archive of reference containing delta signatures for dar activating delta binary. Here below, diff1 archive will eventually contain delta patches of modified files since full archive was created, but will not contain any delta signature.

dar -c diff1 -A full -R / -z

The next differential backups will be done the same, based on the full backup:

dar -c diff2 -A full -R / -z

Looking at archive content, you will see the "[Delta]" flag in place of the "[Saved]" flag for files that have been saved as a delta patch rather than having their whole data saved in the backup:

[Data ][D][ EA ][FSA][Compr][S]| Permission | User | Group | Size | Date | filename
--------------------------------+------------+-------+-------+-------+-------------------------------+------------
[Delta][ ] [-L-][ 99%][X] -rwxr-xr-x 1000 1000 919 kio Tue Mar 22 20:22:34 2016 bash

Case of incremental backup scheme:

Doing incremental backups, the first one is always a full backup and is done the same as above for differential backup:

dar -c full -R / -z --delta sig

At the opposit of differential backups, incremental backups are also used as reference for the next backup. Thus if you want to continue doing binary delta, delta signatures must be present beside the delta patch in the resulting archives:

dar -c incr1 -A full -R / -z --delta sig

Here the --delta sig switch leads dar to copy from the full backup into the new backup all the delta signatures of unchanged files and to recompute new delta signature of files that have changed.

Case of catalogue isolation:

If you do not want having the previous differential, incremental or full backup around in order to make a new backup, you can still use isolated catalogues to do so. The point to take care about here is the way to build this isolated catalogue: If you want to perform a binary difference, the signature of reference files must be present in the isolated catalogue:  

dar -C CAT_full -A full -z --delta sig

Note that if the archive of reference does not hold any delta signature, the previous command will lead dar to compute on-fly delta signature of saved files while performing catalogue isolation. You can thus chose not to include delta signature inside full backup while still being able to let dar use binary delta. However as dar cannot compute delta signature without data, file that have been recorded as unchanged since the archive of reference was made cannot have their delta signature computed at isolation time. Same point if a file is stored as a delta patch without delta signature associated with it, dar will not be able to add a delta signature at isolation time for that file.

Yes, this is as simple as adding --delta sig to what you were used to do before. The resulting isolated catalogue will be much larger than without delta signatures but still much smaller than the full backup itself. The incremental or differential backup can then be done the same as before but using CAT_full in place of full:

dar -c diff1 -A CAT_full -R / -z

dar -c incr1 -A CAT_full -R / -z --delta sig

Case of archive merging:

You may need to merge two archive or make a subset of a single archive or even a mix of these two operations, which is available using the --merge operation for a long time now. Here too if you want to keep the delta signatures that could be present in the source archives you will have to use --delta sig option:

dar --merge merged_backup -A archive1 -@archive2 -z --delta sig


Case of restoration:

No special option has to be provided at restoration time. Dar will figure out by itself whether the data for a file is a plain data and can replace the whole current data when overwriting is allowed or is a delta patch that has to be applied to the existing file lying on filesystem. Before patching the file dar will calculate and check its CRC. if the CRC is the expected one, the file will be patched else a warning will be issued and the file will not be modified at all.

The point with restoration is to *always* restore all previous backups in order from the full backup to the latest incremental one (or the full backup and the latest differential one), for dar be able to apply stored patches. Else restoration can fail for some or all files. Dar_manager can be of great help here as it will know which archive to skip and which not to skip in order to restore a particular set of files.

Performing binary difference only for some files and normal backup for others:

You can exclude files from delta difference operation by avoiding creating a delta signature for them in the archive of reference, using  the option --exclude-delta-sig. You can also include only some files for delta signatures by use of --include-delta-sig option. Of course as with other masks-related options like -I, -X, -U, -u, -Z, -Y, ... it is possible to combine them to have an even greater and more accurate definition of files for which you want to have delta signature being built for.

dar -c full -R / -z --delta sig --include-delta-sig "*.opt" --include-delta-sig "*.pst" --exclude-delta-sig "home/joe/*"

Independently of this filtering mechanism based on path+filename, delta signature is never calculated for files smaller than 10 kio because it does not worse performing delta difference for them. You can change that behavior using the option --delta-sig-min-size <size in byte>

dar -c full -R / -z --delta sig --delta-sig-min-size 20k


Archive listing:

Archive listing received adhoc addition to show which file have delta signature and which one have been saved as delta patch. The [Data ] column shows [Delta] in place of [Saved] when a delta patch is used, and a new column entitled[D] shows [D] when a delta signature is present for that file.

See man page about --delta related options for even more details.

Comparing the different way to perform remote backup

Since release 2.6.0 dar can directly use ftp or sftp to operate remotely. This new feature has sometime some advantage over the methods descibed above with ssh or netcat sometimes it has not, the objective here is to clarify this situation.

Operation
dar with dar_slave and dar_xform
through ssh or netcat
(direct access mode)
dar alone
through ssh or netcat
(sequential read mode)
sftp/ftp within dar
Backup
  • best solution if you want to keep a local copy of the backup or if you want to push the resulting archive to several destinations
  • workaround if you hit the sftp known_hosts limitation
  • if sftp not available, only ssh is
  • on-fly hash file is written locally (where is dar_xform ran) and is computed by dar_xform which cannot see network transmission errors
  • efficient but does not support slicing, for the rest this is an as good solution as with dar_xform
  • best solution if you do not have space on local disks to store the resulting backup
  • requires on-fly isolation to local disk if you want to feed a local dar_manager database with the new archive
  • if ssh not available, only sftp is
  • on-fly hash file is written to the remote directory beside the slice
Testing
Diffing
Listing
  • workaround if you hit the sftp known_hosts limitation
  • sftp not available only ssh
  • relies on dar <-> dar_slave exchanges which protocol is not designed for long latency exchanges and gives slow network performances in that situation
  • very slow as it requires reading the whole archive
  • maybe a simpler command line to execute
  • best solution if filtering a few files from a large archive dar will fetch over the network only the necessary data.
  • ssh not available only sftp
Restoration
  • very slow as it requires reading the whole archive
  • efficient and simple
  • ssh not available only sftp
Merging

(should be done locally rather than over network if possible!!!)
  • complicated with the many pipes to setup
  • not supported!
  • not adapted if you need to feed the merging result to a local dar_manager database (on-fly isolation not available with merging with dar)
Isolation
  • very slow as it requires reading the whole archive
  • efficient and simple, transfers the less possible data over the network
  • ssh not available only sftp
Repairing

(should be done locally rather than over network if possible!!!)
  • not supported!
  • propably the best way to repaire remotely for efficiency, as this operation uses sequential reading
  • ssh not available only sftp





Multi recipient signed archive weakness

As described in the usage notes it is possible to encrypt an archive that can be readable by several recipients using their own gnupg private key. So far, so good! It is also possible to embed your gnupg signature within such archive for your recipient to have a proof the archive comes from you. But there is a known weakness in this signing approach as implemented in libdar, weakness that could be exploited by an expert to fake your signature with a different archive.

Well, if this type of attack should be accessible by an expert guy with some constraints, it can only take place between a set of friends! Exchanging secret data within a group implicitely means having a certain trust level of the members of that group for this secret data not to become public, this is in that sense I mean "friends". So if you do not fully trust well one person in a group and want to share data by mean of signed/gnupg encrypted dar archive you have several options:
  • encrypt the archive with dar but sign it outside dar, by using gpg on the generated slices (the signature should not modify the slices, but be stored in a sperated file) (gpg --sign -b <slice>.x.dar)
  • Use the --hash option while creating your gnupg encrypted dar archive, and sign the hash generated files (still the signature should not modify the hash for it can be transparently used by sha512sum for example, so the store the hash signature in a separated file)
example of use:
dar -c my_secret_group_stuff -z -K gnupg:recipents1@group.group,recipient2@group.group -R /home/secret --hash sha512

# check the archive has not been corrupted
sha512sum -c my_secret_group_stuff.1.dar.sha512

#sign the hash file
gpg --sign -b my_secret_group_stuff.1.dar.sha512

#send all three files to your recipients:

my_secret_group_stuff.1.dar
my_secret_group_stuff.1.dar.sha512
my_secret_group_stuff.1.dar.sha512.sig



dar-2.6.8/doc/README0000644000175000017520000000026213476543220010653 00000000000000 Dar Documentation Main Directory All the documentation has been moved to HTML. To access it, please point your web browser to the index.html file found in this directory. dar-2.6.8/doc/Known_Bugs.html0000644000175000017520000014044713476543220012747 00000000000000 Known Bugs
Dar Documentation


Known Bugs





Here follows the description and status of old bugs. For recent bugs please consult the bug tracker on sourceforge.


#1:
description: files pointed to by a symbolic links get their times (mtime & atime) not properly restored

problem: when restoring time of a symbolic link the utime() system call does not change the inode of the symbolic link itself but the one of the file pointed to by the symbolic link

solution: as the utime() call behaves the same on many Unix platforms, (HP-UX, Solaris, Linux, etc.), the times for the symbolic links are no more restored, thus files pointed to by symbolic links are not affected by the utime() strange behavior. Dar, behaves not the same as touch, tar, etc.

affects version: 1.0.0
fixed in version: 1.0.1


#2:
description: dar immediately exits with message "file mode is neither read nor write" while creating an archive on 64 bits platforms.

problem: missing mask with O_ACCMODE for flags returned by fcntl() system call

solution: O_ACCMODE mask added

affects versions: 1.0.0, 1.0.1
fixed in version: 1.0.2


#3:
description: making differential backup with some directory implicitly or explicitly (-P) excluded, sometime lead to full backup.

problem: when entering a directory that was ignored, the comparison in the catalogue of reference was not going back to the parent directory, leading the comparison to be no more synchronized with the file being read.

solution: added a line to go back to parent directory when treating an ignored directory

affects versions: 1.0.0, 1.0.1
fixed in version: 1.0.2


#4:
description: on Linux 64 bits, when an interaction from the user is required, (escape or return key), the keys have to be pressed four time before dar can read a character.

problem: unknown, same code works fine on other platforms, and if bad character remains in the pipe, the message would be displayed several time, which is not the case, here. It seems that the user has to press four key for the first one to be accessible to dar.

solution: none actually

affects versions: 1.0.0, 1.0.1 , 1.0.2 and probably also 1.1.0
fixed in version: none

#5:
description: Dar stops one of the two following error:
--- exception nature = [alive]  exception type = [BUG] ----------
[source]
       file compressor.cpp line 244 : it seems to be a bug here
[most outside call]
 -----------------------------------

or

--- exception nature = [alive]  exception type = [BUG] ----------
[source]
      file compressor.cpp line 254 : it seems to be a bug here
[most outside call]
-----------------------------------

problem: the zlib deflate() calls returned an unexpected value (Z_STREAM_END) and inflate() in some rare cases was called without data to process, when reaching end of file, which made inflate() to return Z_BUF_ERROR code.

solution: adding code for taking care of the returned code Z_STREAM_END by deflate(), and code for taking care of Z_BUF_ERROR returned at end of file.

affects version: 1.0.0, 1.0.1, 1.0.2
fixed in version: 1.0.3


#6:
description: Dar does not properly display dates on Linux 64 bits systems.

problem: non portable cast from 'time_t' to 'unsigned long int'

solution: changed code not to have to use 'unsigned long int'

affects version: 1.0.x, 1.1.x
fixed in version: 1.2.0


#7:
description: while saving, Dar stops when a given inode could not be read, even if the inode had not to be saved

problem: Dar reads all file in the directory to save, and then applies filters to their filename. Error appears before the filter, leading to a global saving error.

solution: read error are now caught sooner a message is displayed that the file or directory will be ignored, and saving process continues. As this error occurs before testing filename against filters, an error message is still displayed (which may seems strange when the file was about to be excluded by filter mechanism)

Just reminds that dar reads a directory contents first, and then check filenames against filter. At the time of the message, Dar does not know the file would not to be saved. Thus, don't worry if you see this kind of message, they don't mean that dar was about to save the given file but rather exactly what the message says, that the file or directory could not be read. ;-)

affects version: 1.0.x, 1.1.x
fixed in version: 1.2.0


#8:
description: when saving hard linked file using filter that does not save the first hard link, dar stops with SEGFAULT.

problem: The first hard link is used as reference for the following occurrences. As it is not covered by the filter it is destroyed in filter module, but filesystem module keeps using it as reference for following hard links on that inode.

solution: added a call in filesystem module to "forget" a given reference. This way, if a file_etiquette is not covered, filter module can inform filesystem module to delete the corresponding entry on the hard link map. Subsequent hard link on that inode will generate first a new file_etiquette, which will be used as reference (unless it is not covered by filter too).

affects version: 1.1.x
fixed in version: 1.2.0


#9:
description: failed to read an archive under windows NT and 2000

problem: error concerning the integer macro definition (module integer.hpp)

solution: replaced the macro by a proper definition.

affects version: 1.2.0
fixed in version: 1.2.1

#10:
description: when using dar on windows95 if giving c:/ or other driver to -R option, dar stops with the following error:

file filesystem.cpp line 186 : it seems to be a bug here

problem: dar checks first that c:/ is a directory. Cygwin says yes this dar continues : from c:/ dar completes the path with a dot thus dar tries to open the path c:/. which is Cygwin does not report as an existing directory.

solution: dar does not try to open c:/. but tries to open c:/ . Moreover additional code as been added to detect the incoherence if in some other conditions such problem occurs. Dar will report the exact problem (path does not exist / or path is not a directory)

affects version: 1.2.1, 1.2.0
fixed in version: 1.3.0

#11:
description: when giving to dar_manager an absolute path to -r option dar_manager stops with INTERNAL ERROR message.

problem: a check is missing to display error message in such condition

solution: added a error message, if an absolute path is given to -r

affects dar_manager version   : 1.0.0
fixed in dar_manager version  : 1.0.1

#12:
description: memory leakage when using -Z or -Y option. This bug has been found and explained by David Rose, who proposed a good patch, which I have just a little adapted.

problem: the dar code that call the zlib when changing the compression algorithm did properly released memory allocated by dar but forget to execute a call of the zlib library that releases memory allocate by the library itself.

solution: solution, add call to release memory used by zlib library when changing of compression algorithm

affects dar version   : 1.2.0, 1.2.1
fixed in dar version  : 1.3.0

#13:
description: archive created by dar under Windows NP SP6, XP, 2000, cannot be read, tested. All action on generated archive finishes by CRC error message, incoherent catalogue structure, or badly formated infinint.

problem: under windows, Dar uses Cygwin. As Unix and Window do not define end of line the same way (\n for Unix and \r\n for Windows), Cygwin translates end of line on the fly when reading a file (Windows to Unix), and when writing (Unix to Windows). At writing time especially, each \n anywhere in the archive is written \r\n, thus two bytes get written in place of one. In another hand, to get the global position in the whole archive dar counts bytes it writes to file. Thus a shift occurs between the position it thinks it writes the catalogue and the position it actually does. Thus reading the archive, the given position is wrong, and archive cannot be opened.

solution: deactivate the translation when generating the archive. Cygwin adds a two special flags to pass to open(2) which are O_TEXT and O_BINARY, that change the way Cygwin handles file read(2) and write(2).

affects all dar suite program version 1.0.x, 1.1.x, 1.2.x
fixed in dar version 1.3.0

#14:
description: Dar aborts with the following message

---- exception nature = [alive] exception type = [BUG] ----------
[source]
    file compressor.cpp line 320 : it seems to be a bug here
[most outside call]
-----------------------------------

It appears while doing a backup using compression and scrambling (could not (yet) be reproduced twice).

problem: this message is issued because zlib library reported an inconsistency in call order or in call arguments given to zlib calls. Actually I cannot figure out what is wrong, nor how this bug may happens.

solution: none actually at the time of 2.0.0 release.

But, if you are face to face with this bug, thanks to try reproducing it, re-issuing the same command in the same condition,  and if possible to reproduce then as much as possible reduce the necessary environment condition that make this bug happen. Then please, contact me (either via Sourceforge or via email).

affects version 1.2.1 (at least)
not yet fixed (check Sourceforge for fresh news), nor reproduced.

#15:
description: Dar aborts with the following message when giving -y0 as argument

---- exception nature = [alive]  exception type = [BUG] ----------
[source]
    file compressor.cpp line 96 : it seems to be a bug here
[most outside call]
-----------------------------------

problem: correct compression level for zlib are from 0 to 9, 0 meaning no compression, while for libbz2 correct compression levels are from 1 to 9, thus zero is not a valid value. libbz2 returns an error code which triggers a libdar internal sanity check code, which throws this BUG exception.


solution: forbid the use of -z0 or -y0. If you don't want compression, you will need to not give neither -z nor -y.

affects version 1.3.0
fixed in version 2.0.0

#16:
description: Dar include does not ignore comments in included file as given by -B option

problem: when a line with only one carriage-return is put in a file, the following line is considered as a normal line (not a comment), whatever it is, in particular if it starts with a hash (#).

solution: make the algorithm consider normal line once another character than space, tab hash or carriage-return is met. (added the carriage-return in the list).

affects version 1.2.0, 1.2.1, 1.3.0
fixed in version 2.0.0


#17:
description: Dar warns that the archive is about to save itself while a -P option is used to prevent this eventuality.

problem: testing against this situation did not check that a parent directory of the path where is saved the archive is excluded by a -P option.

solution: updating the test for the warning only appear in real auto-saving conditions.

affects version 2.0.0
fixed in version 2.0.1

#17bis:
description: save as #17, but in the case where the archive is in the under the current directory, and some -P options exclude it.

problem: the current directory "." is not substitued by its full path, and the testing algorithm does tread "." as directly under the root "/" directory, no -P could thus exclude it from the files to save.

solution: for testing and preventing self-backup, "." is replaced by its full path counterpart. affects version 2.0.0, 2.0.1, fixed in version 2.0.2

#18:
description: Dar does not save files or just stores empty directories when using -B option or having a ~/.darrc or /etc/darrc file

problem: the parsing code that strip the comments from files is not counting empty lines ('\n') between two lines of commands (i.e.: uncommented lines). This strips the last uncommented line before the next line of comment, usually leading to the  removal of an ending '\n' in the uncommented line, implying the fusion of the last command on this line and the next command on any uncommented after the last line of comment two commands that were initially separated by a '\n' and one or several  lines of comments. This wrongly interpreted command most of the time appear as a member of [list of path] and thus restricts  the backup to only a subdirectory that most of the time does not exist. If -D option is used, all first level directory are excluded from backup and thus stored as empty directories.

solution: bug fix in no_comment.cpp. A workaround is to remove comments or empty lines from configuration files.

affects versions 2.0.0, 2.0.1
fixed in versions 2.0.2

#19:
description: Dar hangs when using --nodump with named pipe to save

problem: to get the nodump flag value, dar needs to call the open() system call in read-only mode. For named pipes this  system call does return only when another process opens the same pipe in write mode.

solution: as proposed by Joshua Neal, the open will be set to non blocking mode to return immediately.

affects version 1.3.0, 2.0.0, 2.0.1
fixed in version 2.0.2

#20:
description: dar_manager did not completely removed reference to an archive when it was removed from a database

problem: data_dir is a C++ class that inherit from data_tree. This one has a virtual method remove_all_from(), that had was redefined in the data_dir class with a very little difference, making it not be a redefinition of the inherited class. (argument was "archive_num" in place of "const & archive_num"). This redefined method was never called, and the removal never recursed in subdirectories.

solution: make the two signature identical in the mother and inherited classes.

affects version 1.3.0, 2.0.0, 2.0.1, 2.0.2
fixed in version 2.0.3

#21:
description: under FreeBSD using -B option leads to parse error

problem: under FreeBSD, getopt() call is not reset just by setting optind to zero like under Linux, but the variable optreset must be set to 1.

solution: configure script now check for the presence of optreset variable which allows to properly reset the getopt call according to the system nature.

affects version 2.1.0 and older
fixed in version 2.1.1

#22:
description: dar stops when fed with an empty file for -B option

problem: the comments stripping class lack a variable initialization

solution: initialize variable before use

affects version 2.1.0 and older
fixed in version 2.1.1


#23:
description: -z -y -H and -w do not work under FreeBSD

problem: the getopt() version does not support optional arguments

solution: document this problem, and make possible to reach any feature

in the meanwhile, replace
-z    by   -z 9
-y    by   -y 9
-H    by   -H 1
-w    by   -w d (added for the circumstances)

affects all versions
documented starting version 2.1.2 (see dar's man page)

#24:
description: under windows 2000 SP4 failed to properly save a text file with DOS carriage return. Testing the archive reports CRC error.

problem: The cygwin adaptation layer for dar to be able to run under windows, needs non standard flag mode to not automatically translate between windows and unix carriage return. This bug is related to bug #13 which fix was not properly applied in one single place.

solution: properly apply the fix of bug #13 in this single place too.

affects version 2.1.1 and older
fixed in version 2.1.2

#25:
description: under some hypothetical conditions (when the archive is larger than 10^16 Terabytes, 1 followed by 16 zeros), dar is not able to read the archive.

problem: In this situation, the terminateur is not properly formed and cannot fulfill its role: point to the start of the catalogue. A byte 0x00 is missing near the end of the archive, that indicate the end of the terminateur.

solution: properly write down terminateur string of archive. Note that archive produced by earlier version are not possible to read dar version that have the fix, but don't worry, if you have a such big archive (more than 10^16 Terabytes), tell me I will create a little program that fixes the archive ;-) . Note, "terminateur" the french word for terminator, the word has been kept here because it is used in the source code.

affects version 2.1.2 and older
fixed in version 2.1.3

#26:
description: in configuration file using conditional syntax (see man page), when two targets follow each other on two directly following lines (the first condition is thus empty), the second condition is skipped. This condition also occurs if there is only comments between the two targets.

problem: when looking for the first target, an extra character is read (the first of the second target), as there is no contents in this target no reposition (seek) in the file is done, and the search for the next target start at that point (second character of the second target), which makes the second target not recognized because of the missing initial character.

solution: avoid reading this extra character so the next character to read is the first character (not the second) of the following target.

affects version 1.2.0 and older versions
fixed in version 2.1.3


#27:
description: when restoring only more recent files (-r), with hourshift activated (-H) the -H feature was randomly not activated.

problem: a test comparing the dates is made on address's objects rather on objects themselves. This leads to the -H test being activated randomly depending on address of two objects to compare.

solution: make the test on objects rather than on their addresses in memory.

affects version from 2.0.0 to 2.1.3
fixed in version 2.1.4

#28:
description: dar does not restore root Extended Attributes

problem: a collision occurred in the naming of the enumeration listing the extended attributes available domains, and the variables carrying user choice to consider or not user or root extended attributes for the operation (backup, restoration, comparison, etc.)

solution: rename both the domain enumeration and the variables carrying user choice about extended attribute comportment.

affects version from 2.1.0 to 2.1.3
fixed in version 2.1.4

#29:
description: dar does Segmentation fault during backup when hard links are involved

problem: when an plain file inode linked several time to the directory tree (a hard linked file), is not readable due to a lack a permission, the filesystem module generates a C++ object and stores its address for further reference and hard link handling. When the file opening comes an exception is throw, leading to this object destruction, but the reference stays in the the filesystem module, leading the next link to this inode being referenced by a destroyed object.

solution: remove the reference when exception is caught. This removal was properly done when such an inode was not to be saved (and the corresponding C++ object was destroyed), the case of the exception had been forgotten.

affects version up to 2.1.4
fixed in version 2.1.5

#30:
description: compilation failed with syntax error reported in libstdc++ header files (reported on Solaris and openBSD).

problem: the configure scripts does detect the presence of mbstate_t type, and redefines it. This conflicts the system header files and lead to a syntax error.

workaround 1:
will update the configure script to check if that fixes the problem. Else there is a workaround. Once configure has finished, edit the config.h file adding the line
#define HAVE_MBSTATE_T 1
and commenting out the line
#define mbstate_t int
Thanks to Steve Evans for this workaround.

solution: undefine the mbstate_t in dar's source code. This is implemented and seems to work. No action is required for your part.

may affects version up to 2.2.0
fixed in version 2.2.1


#31:
description: dar segfault when restoring Extended Attributes

problem: to speed up the reading of the catalogue from an archive an intermediate layer has been added thanks to an "cache"  object. This object is temporary instead of the object it does caching on. When the catalogue reading has finished, the reference given to inode becomes invalid because it is the caching object in place of the real underlying archive object.

solution: keep doing archive reading through the caching object but give the real archive object to the inode contained in the catalogue for they have a valid reference when it is time to restore EA.

affects version 2.2.0
fixed in version 2.2.1

#32:
description: dar is unable to read blowfish encrypted archives under certain conditions

problem: this bug appears when using infinint (no --enable-mode in command-line). The implicit cast from a unsigned integer to an infinint in a operation was not properly made by the compiler. Instead of converting the system integer to an infinint which is possible thanks to the infinint constructor class, the compiler down cast the the unsigned integer to unsigned char.

solution: use explicit cast conversion when doing operations which concern both infinint and unsigned integers.

affects version 2.2.0
fixed in version 2.2.1

#33:
decription: dar hangs when the last line of a DCF file is starting with a few spaces or tabs just following a comment.

problem: The layer that removes comments was counting the bytes from the beginning while the first byte retained as significative was the first non space of the line. leading to provide to upper layer a wrong information about the number of byte in the file.

solution: start counting bytes from the first significative byte of the line instead of the first byte of the line.

affects versions 2.1.x up to 2.2.0
fixed in version 2.2.1

#34:
description: when restoring files in a directory, this one has all permission set for every body. This makes a security hole on multi-user system.

problem: when restoring in a directory as non privileged user it is necessary to have write access to that directory, thus temporary change the permission. The permission is thus changed, but the temporary change is too permissive, thus opens a security hole, allowing local user delete/create/replace under the directory restored.

solution: only add the user write access (if necessary) to the directory when dar is run as non privileged user.

affects versions 1.x.x up to 2.2.0
fixed in version 2.2.1

#35:
description: when producing archive to stdout sometimes the message "Error while saving data: Bad file descriptor given" is displayed and dar aborts its execution

problem: health check done on a uninitialized object field in place of a constructor's argument that will be used to set this field.

solution: test the method's argument in place of the object field

affects version 2.2.x up to 2.2.3
fixed in version 2.2.4

#36:
description: when using -I or -X with -l option directory names are also filtered while they should not be, as described in the man page

solution: avoid having -X/-I filtering directories

affected versions 2.2.x up to 2.2.5
fixed in 2.2.6

#37:
description: when restoring a file which all EA has been dropped since the archive of reference, dar reports CRC error.

problem: to store that EA has been dropped, dar saves an empty EA list, but did not calculated the CRC value

solution: make the CRC value set when storing an empty list of EA. Note that this bug did not cause any problem except a CRC error message.

affects versions up to 2.2.6
fixed in 2.2.7




dar-2.6.8/doc/man/0000755000175000017520000000000013617552352010631 500000000000000dar-2.6.8/doc/man/Makefile.am0000644000175000017520000000075513530031622012576 00000000000000dist_noinst_DATA=index.html if USE_GROFF TARGET=copyman SUFFIXES = .html .1 copyman: cp ../../man/*1 . $(MAKE) dar.html dar_slave.html dar_xform.html dar_manager.html dar_cp.html dar_split.html touch copyman .1.html: sed -e 's%\-%\\-%g' < ./$< | groff -man -Thtml | sed -e 's% $@ all-local: $(TARGET) clean-local: rm -f $(TARGET) *.1 copyman dar.html dar_slave.html dar_xform.html dar_manager.html dar_cp.html dar_split.html endif dar-2.6.8/doc/man/Makefile.in0000644000175000017520000003317113617552304012620 00000000000000# Makefile.in generated by automake 1.15 from Makefile.am. # @configure_input@ # Copyright (C) 1994-2014 Free Software Foundation, Inc. # This Makefile.in is free software; the Free Software Foundation # gives unlimited permission to copy and/or distribute it, # with or without modifications, as long as this notice is preserved. # This program is distributed in the hope that it will be useful, # but WITHOUT ANY WARRANTY, to the extent permitted by law; without # even the implied warranty of MERCHANTABILITY or FITNESS FOR A # PARTICULAR PURPOSE. @SET_MAKE@ VPATH = @srcdir@ am__is_gnu_make = { \ if test -z '$(MAKELEVEL)'; then \ false; \ elif test -n '$(MAKE_HOST)'; then \ true; \ elif test -n '$(MAKE_VERSION)' && test -n '$(CURDIR)'; then \ true; \ else \ false; \ fi; \ } am__make_running_with_option = \ case $${target_option-} in \ ?) ;; \ *) echo "am__make_running_with_option: internal error: invalid" \ "target option '$${target_option-}' specified" >&2; \ exit 1;; \ esac; \ has_opt=no; \ sane_makeflags=$$MAKEFLAGS; \ if $(am__is_gnu_make); then \ sane_makeflags=$$MFLAGS; \ else \ case $$MAKEFLAGS in \ *\\[\ \ ]*) \ bs=\\; \ sane_makeflags=`printf '%s\n' "$$MAKEFLAGS" \ | sed "s/$$bs$$bs[$$bs $$bs ]*//g"`;; \ esac; \ fi; \ skip_next=no; \ strip_trailopt () \ { \ flg=`printf '%s\n' "$$flg" | sed "s/$$1.*$$//"`; \ }; \ for flg in $$sane_makeflags; do \ test $$skip_next = yes && { skip_next=no; continue; }; \ case $$flg in \ *=*|--*) continue;; \ -*I) strip_trailopt 'I'; skip_next=yes;; \ -*I?*) strip_trailopt 'I';; \ -*O) strip_trailopt 'O'; skip_next=yes;; \ -*O?*) strip_trailopt 'O';; \ -*l) strip_trailopt 'l'; skip_next=yes;; \ -*l?*) strip_trailopt 'l';; \ -[dEDm]) skip_next=yes;; \ -[JT]) skip_next=yes;; \ esac; \ case $$flg in \ *$$target_option*) has_opt=yes; break;; \ esac; \ done; \ test $$has_opt = yes am__make_dryrun = (target_option=n; $(am__make_running_with_option)) am__make_keepgoing = (target_option=k; $(am__make_running_with_option)) pkgdatadir = $(datadir)/@PACKAGE@ pkgincludedir = $(includedir)/@PACKAGE@ pkglibdir = $(libdir)/@PACKAGE@ pkglibexecdir = $(libexecdir)/@PACKAGE@ am__cd = CDPATH="$${ZSH_VERSION+.}$(PATH_SEPARATOR)" && cd install_sh_DATA = $(install_sh) -c -m 644 install_sh_PROGRAM = $(install_sh) -c install_sh_SCRIPT = $(install_sh) -c INSTALL_HEADER = $(INSTALL_DATA) transform = $(program_transform_name) NORMAL_INSTALL = : PRE_INSTALL = : POST_INSTALL = : NORMAL_UNINSTALL = : PRE_UNINSTALL = : POST_UNINSTALL = : build_triplet = @build@ host_triplet = @host@ subdir = doc/man ACLOCAL_M4 = $(top_srcdir)/aclocal.m4 am__aclocal_m4_deps = $(top_srcdir)/m4/gettext.m4 \ $(top_srcdir)/m4/iconv.m4 $(top_srcdir)/m4/lib-ld.m4 \ $(top_srcdir)/m4/lib-link.m4 $(top_srcdir)/m4/lib-prefix.m4 \ $(top_srcdir)/m4/nls.m4 $(top_srcdir)/m4/po.m4 \ $(top_srcdir)/m4/progtest.m4 $(top_srcdir)/configure.ac am__configure_deps = $(am__aclocal_m4_deps) $(CONFIGURE_DEPENDENCIES) \ $(ACLOCAL_M4) DIST_COMMON = $(srcdir)/Makefile.am $(dist_noinst_DATA) \ $(am__DIST_COMMON) mkinstalldirs = $(install_sh) -d CONFIG_HEADER = $(top_builddir)/config.h CONFIG_CLEAN_FILES = CONFIG_CLEAN_VPATH_FILES = AM_V_P = $(am__v_P_@AM_V@) am__v_P_ = $(am__v_P_@AM_DEFAULT_V@) am__v_P_0 = false am__v_P_1 = : AM_V_GEN = $(am__v_GEN_@AM_V@) am__v_GEN_ = $(am__v_GEN_@AM_DEFAULT_V@) am__v_GEN_0 = @echo " GEN " $@; am__v_GEN_1 = AM_V_at = $(am__v_at_@AM_V@) am__v_at_ = $(am__v_at_@AM_DEFAULT_V@) am__v_at_0 = @ am__v_at_1 = SOURCES = DIST_SOURCES = am__can_run_installinfo = \ case $$AM_UPDATE_INFO_DIR in \ n|no|NO) false;; \ *) (install-info --version) >/dev/null 2>&1;; \ esac DATA = $(dist_noinst_DATA) am__tagged_files = $(HEADERS) $(SOURCES) $(TAGS_FILES) $(LISP) am__DIST_COMMON = $(srcdir)/Makefile.in DISTFILES = $(DIST_COMMON) $(DIST_SOURCES) $(TEXINFOS) $(EXTRA_DIST) ACLOCAL = @ACLOCAL@ AMTAR = @AMTAR@ AM_DEFAULT_VERBOSITY = @AM_DEFAULT_VERBOSITY@ AR = @AR@ AUTOCONF = @AUTOCONF@ AUTOHEADER = @AUTOHEADER@ AUTOMAKE = @AUTOMAKE@ AWK = @AWK@ CC = @CC@ CCDEPMODE = @CCDEPMODE@ CFLAGS = @CFLAGS@ CPP = @CPP@ CPPFLAGS = @CPPFLAGS@ CXX = @CXX@ CXXCPP = @CXXCPP@ CXXDEPMODE = @CXXDEPMODE@ CXXFLAGS = @CXXFLAGS@ CXXSTDFLAGS = @CXXSTDFLAGS@ CYGPATH_W = @CYGPATH_W@ DEFS = @DEFS@ DEPDIR = @DEPDIR@ DLLTOOL = @DLLTOOL@ DOXYGEN_PROG = @DOXYGEN_PROG@ DSYMUTIL = @DSYMUTIL@ DUMPBIN = @DUMPBIN@ ECHO_C = @ECHO_C@ ECHO_N = @ECHO_N@ ECHO_T = @ECHO_T@ EGREP = @EGREP@ EXEEXT = @EXEEXT@ FGREP = @FGREP@ GETTEXT_MACRO_VERSION = @GETTEXT_MACRO_VERSION@ GMSGFMT = @GMSGFMT@ GMSGFMT_015 = @GMSGFMT_015@ GPGME_CFLAGS = @GPGME_CFLAGS@ GPGME_CONFIG = @GPGME_CONFIG@ GPGME_LIBS = @GPGME_LIBS@ GREP = @GREP@ HAS_DOT = @HAS_DOT@ INSTALL = @INSTALL@ INSTALL_DATA = @INSTALL_DATA@ INSTALL_PROGRAM = @INSTALL_PROGRAM@ INSTALL_SCRIPT = @INSTALL_SCRIPT@ INSTALL_STRIP_PROGRAM = @INSTALL_STRIP_PROGRAM@ INTLLIBS = @INTLLIBS@ INTL_MACOSX_LIBS = @INTL_MACOSX_LIBS@ LD = @LD@ LDFLAGS = @LDFLAGS@ LIBCURL_CFLAGS = @LIBCURL_CFLAGS@ LIBCURL_LIBS = @LIBCURL_LIBS@ LIBICONV = @LIBICONV@ LIBINTL = @LIBINTL@ LIBOBJS = @LIBOBJS@ LIBS = @LIBS@ LIBTHREADAR_CFLAGS = @LIBTHREADAR_CFLAGS@ LIBTHREADAR_LIBS = @LIBTHREADAR_LIBS@ LIBTOOL = @LIBTOOL@ LIPO = @LIPO@ LN_S = @LN_S@ LTLIBICONV = @LTLIBICONV@ LTLIBINTL = @LTLIBINTL@ LTLIBOBJS = @LTLIBOBJS@ LT_SYS_LIBRARY_PATH = @LT_SYS_LIBRARY_PATH@ MAKEINFO = @MAKEINFO@ MANIFEST_TOOL = @MANIFEST_TOOL@ MKDIR_P = @MKDIR_P@ MSGFMT = @MSGFMT@ MSGFMT_015 = @MSGFMT_015@ MSGMERGE = @MSGMERGE@ NM = @NM@ NMEDIT = @NMEDIT@ OBJDUMP = @OBJDUMP@ OBJEXT = @OBJEXT@ OTOOL = @OTOOL@ OTOOL64 = @OTOOL64@ PACKAGE = @PACKAGE@ PACKAGE_BUGREPORT = @PACKAGE_BUGREPORT@ PACKAGE_NAME = @PACKAGE_NAME@ PACKAGE_STRING = @PACKAGE_STRING@ PACKAGE_TARNAME = @PACKAGE_TARNAME@ PACKAGE_URL = @PACKAGE_URL@ PACKAGE_VERSION = @PACKAGE_VERSION@ PATH_SEPARATOR = @PATH_SEPARATOR@ PKG_CONFIG = @PKG_CONFIG@ PKG_CONFIG_LIBDIR = @PKG_CONFIG_LIBDIR@ PKG_CONFIG_PATH = @PKG_CONFIG_PATH@ POSUB = @POSUB@ PYEXT = @PYEXT@ PYFLAGS = @PYFLAGS@ RANLIB = @RANLIB@ SED = @SED@ SET_MAKE = @SET_MAKE@ SHELL = @SHELL@ STRIP = @STRIP@ UPX_PROG = @UPX_PROG@ USE_NLS = @USE_NLS@ VERSION = @VERSION@ XGETTEXT = @XGETTEXT@ XGETTEXT_015 = @XGETTEXT_015@ XGETTEXT_EXTRA_OPTIONS = @XGETTEXT_EXTRA_OPTIONS@ abs_builddir = @abs_builddir@ abs_srcdir = @abs_srcdir@ abs_top_builddir = @abs_top_builddir@ abs_top_srcdir = @abs_top_srcdir@ ac_ct_AR = @ac_ct_AR@ ac_ct_CC = @ac_ct_CC@ ac_ct_CXX = @ac_ct_CXX@ ac_ct_DUMPBIN = @ac_ct_DUMPBIN@ am__include = @am__include@ am__leading_dot = @am__leading_dot@ am__quote = @am__quote@ am__tar = @am__tar@ am__untar = @am__untar@ bindir = @bindir@ build = @build@ build_alias = @build_alias@ build_cpu = @build_cpu@ build_os = @build_os@ build_vendor = @build_vendor@ builddir = @builddir@ datadir = @datadir@ datarootdir = @datarootdir@ docdir = @docdir@ dot = @dot@ doxygen = @doxygen@ dvidir = @dvidir@ exec_prefix = @exec_prefix@ groff = @groff@ host = @host@ host_alias = @host_alias@ host_cpu = @host_cpu@ host_os = @host_os@ host_vendor = @host_vendor@ htmldir = @htmldir@ includedir = @includedir@ infodir = @infodir@ install_sh = @install_sh@ libdir = @libdir@ libexecdir = @libexecdir@ localedir = @localedir@ localstatedir = @localstatedir@ mandir = @mandir@ mkdir_p = @mkdir_p@ oldincludedir = @oldincludedir@ pdfdir = @pdfdir@ pkgconfigdir = @pkgconfigdir@ prefix = @prefix@ program_transform_name = @program_transform_name@ psdir = @psdir@ runstatedir = @runstatedir@ sbindir = @sbindir@ sharedstatedir = @sharedstatedir@ srcdir = @srcdir@ sysconfdir = @sysconfdir@ target_alias = @target_alias@ tmp = @tmp@ top_build_prefix = @top_build_prefix@ top_builddir = @top_builddir@ top_srcdir = @top_srcdir@ upx = @upx@ dist_noinst_DATA = index.html @USE_GROFF_TRUE@TARGET = copyman @USE_GROFF_TRUE@SUFFIXES = .html .1 all: all-am .SUFFIXES: .SUFFIXES: .html .1 $(srcdir)/Makefile.in: $(srcdir)/Makefile.am $(am__configure_deps) @for dep in $?; do \ case '$(am__configure_deps)' in \ *$$dep*) \ ( cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh ) \ && { if test -f $@; then exit 0; else break; fi; }; \ exit 1;; \ esac; \ done; \ echo ' cd $(top_srcdir) && $(AUTOMAKE) --gnu doc/man/Makefile'; \ $(am__cd) $(top_srcdir) && \ $(AUTOMAKE) --gnu doc/man/Makefile Makefile: $(srcdir)/Makefile.in $(top_builddir)/config.status @case '$?' in \ *config.status*) \ cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh;; \ *) \ echo ' cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe)'; \ cd $(top_builddir) && $(SHELL) ./config.status $(subdir)/$@ $(am__depfiles_maybe);; \ esac; $(top_builddir)/config.status: $(top_srcdir)/configure $(CONFIG_STATUS_DEPENDENCIES) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(top_srcdir)/configure: $(am__configure_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(ACLOCAL_M4): $(am__aclocal_m4_deps) cd $(top_builddir) && $(MAKE) $(AM_MAKEFLAGS) am--refresh $(am__aclocal_m4_deps): mostlyclean-libtool: -rm -f *.lo clean-libtool: -rm -rf .libs _libs tags TAGS: ctags CTAGS: cscope cscopelist: distdir: $(DISTFILES) @srcdirstrip=`echo "$(srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ topsrcdirstrip=`echo "$(top_srcdir)" | sed 's/[].[^$$\\*]/\\\\&/g'`; \ list='$(DISTFILES)'; \ dist_files=`for file in $$list; do echo $$file; done | \ sed -e "s|^$$srcdirstrip/||;t" \ -e "s|^$$topsrcdirstrip/|$(top_builddir)/|;t"`; \ case $$dist_files in \ */*) $(MKDIR_P) `echo "$$dist_files" | \ sed '/\//!d;s|^|$(distdir)/|;s,/[^/]*$$,,' | \ sort -u` ;; \ esac; \ for file in $$dist_files; do \ if test -f $$file || test -d $$file; then d=.; else d=$(srcdir); fi; \ if test -d $$d/$$file; then \ dir=`echo "/$$file" | sed -e 's,/[^/]*$$,,'`; \ if test -d "$(distdir)/$$file"; then \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ if test -d $(srcdir)/$$file && test $$d != $(srcdir); then \ cp -fpR $(srcdir)/$$file "$(distdir)$$dir" || exit 1; \ find "$(distdir)/$$file" -type d ! -perm -700 -exec chmod u+rwx {} \;; \ fi; \ cp -fpR $$d/$$file "$(distdir)$$dir" || exit 1; \ else \ test -f "$(distdir)/$$file" \ || cp -p $$d/$$file "$(distdir)/$$file" \ || exit 1; \ fi; \ done check-am: all-am check: check-am @USE_GROFF_FALSE@all-local: all-am: Makefile $(DATA) all-local installdirs: install: install-am install-exec: install-exec-am install-data: install-data-am uninstall: uninstall-am install-am: all-am @$(MAKE) $(AM_MAKEFLAGS) install-exec-am install-data-am installcheck: installcheck-am install-strip: if test -z '$(STRIP)'; then \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ install; \ else \ $(MAKE) $(AM_MAKEFLAGS) INSTALL_PROGRAM="$(INSTALL_STRIP_PROGRAM)" \ install_sh_PROGRAM="$(INSTALL_STRIP_PROGRAM)" INSTALL_STRIP_FLAG=-s \ "INSTALL_PROGRAM_ENV=STRIPPROG='$(STRIP)'" install; \ fi mostlyclean-generic: clean-generic: distclean-generic: -test -z "$(CONFIG_CLEAN_FILES)" || rm -f $(CONFIG_CLEAN_FILES) -test . = "$(srcdir)" || test -z "$(CONFIG_CLEAN_VPATH_FILES)" || rm -f $(CONFIG_CLEAN_VPATH_FILES) maintainer-clean-generic: @echo "This command is intended for maintainers to use" @echo "it deletes files that may require special tools to rebuild." @USE_GROFF_FALSE@clean-local: clean: clean-am clean-am: clean-generic clean-libtool clean-local mostlyclean-am distclean: distclean-am -rm -f Makefile distclean-am: clean-am distclean-generic dvi: dvi-am dvi-am: html: html-am html-am: info: info-am info-am: install-data-am: install-dvi: install-dvi-am install-dvi-am: install-exec-am: install-html: install-html-am install-html-am: install-info: install-info-am install-info-am: install-man: install-pdf: install-pdf-am install-pdf-am: install-ps: install-ps-am install-ps-am: installcheck-am: maintainer-clean: maintainer-clean-am -rm -f Makefile maintainer-clean-am: distclean-am maintainer-clean-generic mostlyclean: mostlyclean-am mostlyclean-am: mostlyclean-generic mostlyclean-libtool pdf: pdf-am pdf-am: ps: ps-am ps-am: uninstall-am: .MAKE: install-am install-strip .PHONY: all all-am all-local check check-am clean clean-generic \ clean-libtool clean-local cscopelist-am ctags-am distclean \ distclean-generic distclean-libtool distdir dvi dvi-am html \ html-am info info-am install install-am install-data \ install-data-am install-dvi install-dvi-am install-exec \ install-exec-am install-html install-html-am install-info \ install-info-am install-man install-pdf install-pdf-am \ install-ps install-ps-am install-strip installcheck \ installcheck-am installdirs maintainer-clean \ maintainer-clean-generic mostlyclean mostlyclean-generic \ mostlyclean-libtool pdf pdf-am ps ps-am tags-am uninstall \ uninstall-am .PRECIOUS: Makefile @USE_GROFF_TRUE@copyman: @USE_GROFF_TRUE@ cp ../../man/*1 . @USE_GROFF_TRUE@ $(MAKE) dar.html dar_slave.html dar_xform.html dar_manager.html dar_cp.html dar_split.html @USE_GROFF_TRUE@ touch copyman @USE_GROFF_TRUE@.1.html: @USE_GROFF_TRUE@ sed -e 's%\-%\\-%g' < ./$< | groff -man -Thtml | sed -e 's% $@ @USE_GROFF_TRUE@all-local: $(TARGET) @USE_GROFF_TRUE@clean-local: @USE_GROFF_TRUE@ rm -f $(TARGET) *.1 copyman dar.html dar_slave.html dar_xform.html dar_manager.html dar_cp.html dar_split.html # Tell versions [3.59,3.63) of GNU make to not export all variables. # Otherwise a system limit (for SysV at least) may be exceeded. .NOEXPORT: dar-2.6.8/doc/man/index.html0000644000175000017520000000516713530031622012541 00000000000000 Dar - Dynamically Generated Documentation
Dar Documentation


Dynamically Generated Documentation





The following documentation has not been built manually. If you cannot access the following links while reading this page from a source package, this is maybe because you have not typed 'make' or because you lack some requirements like Groff and Doxygen. You can also access to Dar Documentation on dar's homepage or mirror page with all dynamcially generated documentation available.


Libdar's API Documentation:

dar-2.6.8/doc/api_tutorial.html0000644000175000017520000051726613552573443013403 00000000000000 Libdar API - Tutorial
Dar Documentation

LIBDAR

APPLICATION INTERFACE

TUTORIAL

for API version 6.0.x and later




Presentation

The Libdar library provides a complete abstraction layer for handling Dar archives. The general operations provided are:

  • archive creation,
  • file extraction from archive,
  • archive listing,
  • archive testing,
  • archive comparison,
  • catalogue isolation,
  • archive merging,
  • archive reparation
  • dar_manager database manipulations
  • dar_slave steering
  • dar_xform operation
Note that Disk ARchive and libdar have been released under the Gnu General Public License (GPL). All code linked to libdar (statically or dynamically), must also be covered by the GPL. Commercial use is prohibited unless a contract has been agreed with libdar's author.

This tutorial will show you how to use the libdar API.  Since release 2.0.0 the dar command-line executable also relies on this API, looking at it's code may provide a good illustration on the way to use libdar, the file src/dar_suite/dar.cpp is the primary consumer of the libdar API.

The sample codes provided here is solely illustrative and is not guaranteed to compile. More detailed API documentation is contained in the source code and can be compiled to the doc/html directory using Doxygen, which is also provided online and is referred below as the API reference documentation.



Let's Start

Conventions

Language

Dar and libdar are written in C++, and so is the libdar API, for other languages check for the existing bindings, like for example the python bindings.

Header files


Only one include file is required in your program to have access to libdar:

#include <dar/libdar.hpp>

Libdar namespace

All libdar symbols are defined under the libdar namespace. You can either add the using namespace libdar; line at the beginning of your source files:

using namespace libdar;

get_version();

 or, as shown below, you can explicitly use the namespace in front of libdar symbols, we will use this notation in the following:


libdar::get_version();

Exceptions

The library makes use of exception to report unexpected conditions. These contain the reason and context the error occurred in and can be caught by your code to display this information. All exceptions used within libdar inherit from the pure virtual class libdar::Egeneric

Most of the time you will use only one of the following two methods:
  • std::string & get_message() const
  • std::string & dump_str() const
get_message() returns a message string describing the error met
dump_str() returns a text paragraph with additional information about the stack as well as context the error occurred in.

We will only focus on one specific exception type libdar::Ebug that is used within libdar when a-situation-that-should-never-occur is met and is assumed to be a bug in libdar. Using the get_message() method in that situation would not provide all necessary details to understand and fix the bug, so it is advised to always use dump_str() for that specific type of exception.

try
{
    // calls to libdar
   ...
    //
}
catch(libdar::Ebug & e)
{
    std::string msg = e.dump_str();

    // do something with msg like for example:
    std::cerr << msg
}
catch(libdar::Egeneric & e)
{
    std::string msg = e.get_message();

    // do something with msg like for example
    std::cerr << msg
}


 1 - First we must initialize libdar

the libdar initialization is performed by calling the libdar::get_version() function.

This function can be called several time though only once is necessary, but this call has to complete before any other call to libdar.

In a multi-thread context libthreadar initialization is not re-entrant. In other word the first call call to libdar::get_version() must complete before any other call to libdar can take place. Once libdar  has been initialized, you can call libdar::get_version() concurrently from several threads at the same time.



libdar::get_version();


2 - We should prepare the end right now

Libdar used some data-structures (mutex, secured memory, etc.) that need to be released properly before ending the program. It is important to invoke the following function before exiting your program if you invoked get_version() previously.


libdar::close_and_clean()


Note: closes_and_clean() makes the necessary for memory to be released in the proper order. Not calling close_and_clean() at the end of your program may result in uncaught exception message from libdar at the end of the execution. This depends on the compiler, libc and option activated in libdar at compilation time.


All in one, at the highest level, you code should look like the following


try
{
    libdar::get_version();
    // calls to libdar
    // thing we will see in next

       ...
       ...
   
}
catch(libdar::Ebug & e)
{
    std::string msg = e.dump_str();

    // do something with msg like for example:
    std::cerr << msg
}
catch(libdar::Egeneric & e)
{
    std::string msg = e.get_message();

    // do something with msg like for example
    std::cerr << msg
}

libdar::close_and_clean();



3 - Intercepting signals

libdar by itself does not make use of any signal (see signal(2) and kill(2)). However, gpgme library with which libdar may be linked with in order to support asymmetrical strong encryption (i.e. encryption using public/private keys) may trigger the PIPE signal. Your application shall thus either ignore it (signal(SIGPIPE, SIG_IGN)) or provide an adhoc handle. By default the PIPE signal leads the receiving process to terminate.



4 - Libdar classes

The main components of libdar are four classes:
  • class libdar::archive to play with dar archives
  • class libdar::database to play with dar_manager databases
  • class libdar::libdar_slave to take the role of dar_slave
  • class libdar::libdar_xform to re-slice existing archives like dar_xform does
In the following we will first see class libdar::archive which will take most of our effort as other classes which we will see at the end are very simple to use.

5 - Multithreaded environment

Except some specific cases that will be mentioned, a given libdar object can only be manipulated by a single thread. You can however perform several operations concurrently from different thread, each having its own set of objects. Though, if one thread is creating an archive by mean of an first object and at the same time another thread by mean of a second object is trying to read the same archive under construction, things might not work as expected. But this is obvious considerations we will not dig any further assuming you know what you are doing.

6 - Let's create a simple archive

Creating a libdar::archive object depending on the constructor used, leads to either:
  • the creation of a brand new archive on filesystem, thus performing a backup (full, incremental, differential, decremental, ...)
  • the opening an archive existing on filesystem, for further operation (listing,  file restoration,  archive testing, archive difference, ...)
  • the merging two existing archives into a new one
  • the reparation of an archive which catalogue is missing or damaged. The catalogue (which means catalog in French) is the table of content of an archive.

6.1 - Basic archive creation

For archive creation the constructor format is the following one:

        archive::archive(const std::shared_ptr<user_interaction> & dialog,
                         const path & fs_root,
                         const path & sauv_path,
                         const std::string & filename,
                         const std::string & extension,
                         const archive_options_create & options,
                         statistics * progressive_report);

For now we will left beside several parameters seen in detail later:
  • dialog can be set to std::nullptr for now, this means that all interaction with the user will be done by mean of standard input, output and error.
  • fs_root is the directory to take as root of the backup. The libdar::path class can be setup from a std::string
  • sauv_path is the path where to write the archive to, here also a std::string will do the job
  • filename is the slice name of the archive to create
  • extension is the archive extension to use. There is no reason to not use the string "dar" here
  • options is a class that carries all optional parameters, it contains a constructor without argument so all options are set to their default that way
  • statistics can receive the address of an existing object that another thread can read while a first one is doing a backup operation, we will see this feature later on, but for now let's set this to a null pointer (i.e.: std::nullptr)
Once the object has been created (the constructor has returned), the archive operation has completed and a new file has been completely written on disk.

libdar::archive my_first_backup(nullptr,
                                "/home/me",
                                "/tmp",
                                "first_backup",
                                "dar",
                                archive_options_create(),
                                nullptr);

Thus, the previous command will create a single sliced archive "first_backup.1.dar" located under /tmp. It will contain the content of the directory /home/me and its sub-directories, without compression and without ciphering. You have guessed compression, slicing, ciphering can be set by playing with passing an adhoc archive_option_create object to this archive constructor, we will see that later.

Once the object has been created there is only little thing we can do with it, like archive listing or archive isolation. But archive extraction, testing or diffing needs  creating a object with a "read" constructor first.

We could also have allocated the archive on the heap, in that case we would have just added the delete operation after the construction has ended:

libdar::archive* my_first_backup = new libdar::archive(nullptr,
                                                                        "/home/me",
                                                                        "/tmp",
                                                                        "first_backup",
                                                                        "dar", 
                                                                         archive_options_create(),
                                                                          nullptr);
  // we assume std::bad_alloc would be thrown if an allocation problem had occurred
  // same thing if libdar throws an exception at constructor time, the object would
  // not be created and would not have to be deleted.
  // So now we can delete the created object:

delete my_first_backup;

6.2 - Progressive report

During the operation we get nothing shown unless an error occurs. To have more visibility on the process we will use an libdar::statistics object passed as last argument of this constructor. The useful method of class libdar::statistics are:
  • std::string get_treated_str()
  • std::string get_hard_links_str()
  • std::string get_skipped_str()
  • std::string get_inode_only_str()
  • std::string get_ignored_str()
  • std::string get_tooold_str()
  • std::string get_errored_str()
  • std::string get_deleted_str()
  • std::string get_ea_treated_str()
  • std::string get_byte_amount_str()
  • std::string get_fsa_treated_str()
If you have a doubt about the meaning and use of a particular counter in a particular operation, please refer to API reference documentation of class libdar::statistics, the private fields corresponding to these counter are explicitly defined there.

libdar::statistics stats;

libdar::archive my_first_backup(nullptr,

                                "/home/me",
                                "/tmp",
                                "first_backup",
                                "dar",
                                archive_options_create(),
                                & stats);

std::cout << stats.get_treated_str() << " file(s) saved" << std::endl;
std::cout << stats.get_errored_str() << " file(s) failed to backup" << std::endl;
std::cout << stats.get_ea_treated_str() << " Extended Attributes saved" << std::endl;


6.3 - Archive creation options

in the previous example, we have created an object of class libdar::archive_options_create and passed it on-fly to the archive constructor without modifying it. Thus we used the default options for this operations. But a lot of options are available, each one can be modified by a specific method, you will quite never use all of them. Follow is a subset of the available options. We won't details them all, but you can refer the doxygen documentation of class libdar::archive_options_create for more information.
  • void set_reference(std::shared_ptr<archive> ref_arch)
  • void set_selection(const mask & selection)
  • void set_subtree(const mask & subtree)
  • void set_allow_over(bool allow_over)
  • void set_warn_over(bool warn_over)
  • void set_info_details(bool info_details)
  • void set_display_treated(bool display_treated, bool only_dir)
  • void set_display_skipped(bool display_skipped)
  • void set_display_finished(bool display_finished)
  • void set_pause(const infinint & pause)
  • void set_empty_dir(bool empty_dir)
  • void set_compression(compression compr_algo)
  • void set_compression_level(U_I compression_level)
  • void set_slicing(const infinint & file_size, const infinint & first_file_size)
  • void set_ea_mask(const mask & ea_mask)
  • void set_execute(const std::string & execute)
  • void set_crypto_algo(crypto_algo crypto)
  • void set_crypto_pass(const secu_string & pass)
  • void set_compr_mask(const mask & compr_mask);
  • void set_min_compr_size(const infinint & min_compr_size)
  • void set_nodump(bool nodump)
  • void set_exclude_by_ea(const std::string & ea_name)
  • void set_what_to_check(comparison_fields what_to_check)
  • void set_hourshift(const infinint & hourshift)
  • void set_empty(bool empty)
  • void set_alter_atime(bool alter_atime)
  • void set_furtive_read_mode(bool furtive_read)
  • void set_same_fs(bool same_fs)
  • void set_snapshot(bool snapshot)
  • void set_cache_directory_tagging(bool cache_directory_tagging)
  • void set_fixed_date(const infinint & fixed_date)
  • void set_slice_permission(const std::string & slice_permission)
  • void set_slice_user_ownership(const std::string & slice_user_ownership)
  • void set_slice_group_ownership(const std::string & slice_group_ownership)
  • void set_retry_on_change(const infinint & count_max_per_file, const infinint & global_max_byte_overhead)
  • void set_security_check(bool check)
  • void set_user_comment(const std::string & comment)
  • void set_hash_algo(hash_algo hash)
  • void set_slice_min_digits(infinint val)
  • void set_backup_hook(const std::string & execute, const mask & which_files);
  • void set_delta_diff(bool val)
  • void set_delta_signature(bool val)
  • void set_delta_mask(const mask & delta_mask)
First you may have find some strange types in arguments, we will briefly explain how to set them:
  • std::shared_ptr<archive> C++11 shared smart-pointer to an existing archive object. We will see how to use it next when performing differential backup
  • infinint can be set from a classical unsigned int, unsigned long or other unsigned integer type
  • mask is a bit more complex, we will see how to use it in a specific paragraph of this tutorial
  • compression is an enumeration with values like
    • libdar::compression::gzip
    • libdar::compression::bzip2
    • libdar::compression::xz
    • libdar::compression:lzo
  • U_I is to be considered an unsigned int
  • crypto_algo is also an enumeration with values like
    • libdar::crypto_algo::scrambling
    • libdar::crypto_algo::blowfish
    • libdar::crypto_algo::aes256
    • libdar::crypto_algo::twofish256
    • libdar::crypto_algo::serpent256
    • libdar::crypto_algo::camellia256
  • secu_string is a class securely storing password and sensible cryptographic information. It can be setup from a char* or from a filedescriptor. its main constructor is:
    • secu_string(const char* ptr, U_I size)
  • comparison_fields is an enumeration with values like
    • libdar::comparison_fields::all
    • libdar::comparison_fields::ignore_owner
    • libdar::comparison_fields::mtime
    • libdar::comparison_fields::inode_type

libdar::archive_options_create opt;

opt.set_allow_over(false);       // forbids slice overwriting
opt.set_display_finished(true);  // show a summary after each completed directory
opt.set_slicing(1024000, 2000);  // slices of 1000 kiB initial slice of 2000 bytes
opt.set_pause(2);                // pause every two slices
opt.set_execute("echo slice %N completed"); // command executed after each slice
opt.set_crypto_algo(libdar::crypto_algo::aes256);

 // providing an empty secu_string leads dar interactively ask the passphrase in a secure manner
opt.set_crypto_pass(secu_string());
 // this previous call is useless as en empty secu_string is the default
 // though one could have setup a secu_string from a std::string this way:
std::string my_pass("hello world!");
libdar::secu_string my_secupass(my_pass.c_str(), my_pass.size());
opt.set_crypto_pass(my_secupass);

opt.set_compression(libdar::compression::xz);
opt.set_compression_level(9);   // this is the default
opt.set_min_compr_size(10240);  // not trying compressing file smaller than 10 kiB

libdar::archive my_first_backup(nullptr,

                                "/home/me",
                                "/tmp",
                                "first_backup",
                                "dar",
                                opt,
                                nullptr);

Of course, you can use both libdar::statistics and libdar::archive_options_create at the same time.

7 - Creating a differential or incremental backup

Maybe you have guessed? Compared to the previous operation (full backup) doing an differential or incremental backup will only ask to open in read-mode an existing archive and pass this object as argument of class archive_options_create::set_reference() seen just above.

The read-only constructor for class archive is the following:

    archive(const std::shared_ptr<user_interaction> & dialog,
            const path & chem,
            const std::string & basename,
            const std::string & extension,
            const archive_options_read & options);


same as before:
  • dialog can be set to a null pointer, we will see later on how to play with user_interaction class
  • chem is the path leading to the archive to read, it can be provided as a std::string
  • basename is the archive basename to read
  • extension should be "dar" unless you want to confuse people
  • options can be set to an empty object for default options, we will see this class in more details with archive listing

  // first we open the previously created archive in read mode:

std::shared_ptr<libdar::archive> ref_archive(new libdar::archive(nullptr,
                                                  "/home/me",
                                                  "first_backup",
                                                  "dar",
                                                  archive_create_options_read()));

  // here we dynamically allocated the object to be able to setup
  // a shared_ptr in order to passed it to
  // the archive_options_create::set_reference() method:

libdar::archive_options_create opt;
opt.set_reference(ref_archive);


libdar::archive my_second_backup(nullptr,

                                "/home/me",
                                "/tmp",
                                "diff_backup",
                                "dar",
                                opt,
                                nullptr);

creating a incremental backup is exactly the same, the difference is the nature of the archive of reference. We used to describe a differential backup one that has a full backup as reference, while an incremental backup has another incremental or differential backup as reference (not a full backup).

8 - Archive listing

Archive listing operation consist of the creation of an archive object in read-mode as we just did above and invoking a method on that newly object to see all or a sub-directory content of the archive. Before looking at the listing method let's zoom on the class libdar::archive_create_options_read we just skip over previously.

8.1 - Archive reading options

 The same as the class archive_options_create detailed above, the class archive_options_read has a constructor without argument that sets the different options to their default value. You can change them one by one by mean of specific methods. The most usual ones are:
  • void set_execute(const std::string & execute)
  • void set_info_details(bool info_details)
  • void set_sequential_read(bool val)
  • void set_slice_min_digits(infinint val)
set_execute() has runs a command before reading a new slice of the archive. See API reference documentation for details. You will meet that class in order to test an archive, compare an archive with filesystem, isolate an archive and repair an archive.

8.2 - Listing methods

There is several way to read an given archive contents:
  • void op_listing(archive_listing_callback callback,
                            void *context,
                            const archive_options_listing & options) const;
  • bool get_children_of(archive_listing_callback callback,
                                 void *context,
                                 const std::string & dir,
                                 bool fetch_ea = false);
  • const std::vector<list_entry> get_children_in_table(const std::string & dir, bool fetch_ea = false) const
archive::op_listing() makes use of a callback function that will be called in turn for each entry of the archive even special entries that flag the end of a directory and the next entry will be located in the parent directory.
archive::get_children_of() use the same callback but only for the different entries of a given directory, that has to exist in the archive of course. It returns false when the end of the directory has been reached.
archive::get_children_in_table() is like the previous listing a given directory but returns a vector of objects libdar::list_entry that provide detailed information about each entry, no callback is used here.

For the two first methods you have to define a callback function of the following form

void (*)(const std::string & the_path,
         const list_entry & entry,
         void *context);


This callback will receive as argument the full path of the object, a libdar::list_entry object providing much details on it and the "context" value passed as argument of archive::op_listing() or archive::get_children_of()

Last point to see before going forward with an example is this libdar::list_entry class, we will use here only a few of the rich set of fields/methods this class provides in the following examples:


  // we first create a read-mode archive object that will be used in the three following examples
 // we will also illustrate the use of libdar::archive_options_read

libdar::archive_options_read opt;

opt.set_info_details(true);
opt.set_execute("echo 'about to read slice %p/%b.%N.%e with context %c'");

libdar::archive my_backup(nullptr,   // this is user_interaction we will see further
                         
"/home/me",
                          "diff_backup",
                          "dar",
                           opt);


  // we will also need a backup function for the two first methods
  // let's define it:

void my_listing_callback(const std::string & the_path,
                         const libdar::list_entry & entry,
                         void *context)
{
    std::cout << the_path;
    if(entry.is_dir())
       std::cout << " is a directory";
    std::cout << " with permission " << entry.get_perm();
    std::cout << " located in slices " << entry.get_slices().display();
    std::cout << std::endl;
     // yep, we do not need context, this
     // is available if you need it though

    if(entry.is_eod())
    {
       // only op_listing() provides such type of object
       // which occurs when we reached the End Of Directory
       // next entry will be located in the parent directory.
       //
       // Note for op_listing: when reading a directory we recurs in it,
       // meaning that the next entry this callback will be
       // invoked for will be located in that directory
       //
       // for get_children_of() no recursion or eod object is
       // performed about directory. The next entry following
       // a directory is still located in the same parent directory
       // which when fully read stops the get_children_of() routine
       // at the difference of op_listing() which parse the whole
       // directory tree.
       //
       // For example, reading a empty directory will provide
       // that directory info, then an eod object a the next
       // callback invocation.
    }
}


8.3 - archive listing using archive::op_listing()


  // first possibility: we can pass nullptr as callback function to archive::op_listing, all will be displayed in stdout

my_backup.op_listing(nullptr, // no callback function
                     nullptr, // we don't care of context here
                     archive_options_listing()) // and use default listing options



  // second possibility: we use the callback defined above

my_backup.op_listing(my_listing_callback,
                     nullptr, // we still don't care of context here
                     archive_options_listing()) // and still the default listing options


  // in complement of both previous variant we can of course set non default listing options

libdar::archive_options_listing opt;

opt.set_filter_unsaved(true); // skip entry that have not been saved since the archive of reference
opt.set_slice_location(true); // necessary if we want to have slicing information available in the callback function
opt.set_fetch_ea(false);  // this is the default. Set it to true if you want to use list_entry::get_ea_reset_read()/get_ea_next_read()

my_backup.op_listing(my_listing_callback,
                     nullptr, // we still don't care of context here
                     opt) // and still the default listing options



8.4 - archive listing using archive::get_children_of()


  // With this method we only list one directory

my_backup.get_children_of(my_listing_callback,
                          nullptr,  // we still don't care of context here
                          "",       // we read the root directory of the archive
                          true);    // and ask for EA retrieval, but as we do not
                                    // use list_entry::get_ea_read_next() in the
                                    // callback this is just wasting CPU and memory

or course if you have a sub-directory /home/me/.gnupg/private-keys-v1.d in your home directory and you want to check how it is saved in the archive, as we defined the root of the backup as /home/me and as you always have to pass a relative path (no leading /) you could do that by calling the following:

my_backup.get_children_of(my_listing_callback,
                          nullptr,
                          "
.gnupg/private-keys-v1.d");                    



8.5 - archive listing using archive::get_children_in_table()


// still listing a single directory but this time without callback function:

my_backup.init_catalogue(); // necessary to fill read the whole catalogue in memory
                            // in particular if archive has been opened in sequential read mode

std::vector<libdar::list_entry> result = my_backup.get_children_in_table(
".gnupg/private-keys-v1.d");

// now reading the std::vector

std::vector<libdar::list_entry>::iterator it = result.begin();
while(it != result.end())
{
   if(it->is_dir())
       std::cout << " is a directory";
    std::cout << " with permission " << it->get_perm();
    std::cout << " located in slices " << it->get_slices().display();
    std::cout << std::endl;

}



9 - Testing an archive

As seen for listing operation we assume a archive object has been create in read mode. Testing the coherence of the relative archive files on disk is done by calling the libdar::op_test method:

        statistics op_test(const archive_options_test & options,
                           statistics * progressive_report);


You may recognize the libdar::statistics type we saw for archive creation. It is present as argument and the provided libdar::statistics object can be read during the whole testing operation by another thread. But if you just want the to know the result, you'd better just use the returned value as it makes the operation quicker due to the absence of multithread management.


   // for the exercise, we will change some default options:

archive_options_test opt;
opt.set_info_details(true); // to have a verbose output

libdar::statistics stats;
stats = my_backup.op_test(nullptr,   // still the user_interaction we will see further
                          opt;       // the non default options set above
                          nullptr);  // we will just use the returned value

std::cout << stats.get_treated_str() << " file(s) tested" << std::endl;
std::cout << stats.get_errored_str() << " file(s) with errors" << std::endl;
std::cout << stats.get_ea_treated_str() << " Extended Attributes tested" << std::endl;


10 - Comparing an archive

As simple as previously, but using the archive::op_diff method:

        statistics op_diff(const path & fs_root,
                           const archive_options_diff & options,
                           statistics * progressive_report);

      

Over the type of the option field, you see the fs_root argument which define which directory of the filesystem to compare the archive to


   // for the exercise, we will change some default options:

archive_options_diff opt;
opt.set_info_details(true); // to have a verbose output
opt.set_what_to_check(libdar::comparison_fields::ignore_owner);
      // this option above will consider equal two files which
      // only change due to user or group ownership difference
      // by default any difference will be considered a difference

(void) my_backup.op_diff("/home/me",
                          opt;       // the non default options set above
                          nullptr);  // not using it for this example




11 - Isolating an archive

As simple as previously, but using the archive::op_isolate method:

        void op_isolate(const path &sauv_path,
                        const std::string & filename,
                        const std::string & extension,
                        const archive_options_isolate & options);
      
You will find similitude with the archive creation though here this is not a constructor

sauv_path is the directory where to create the isolated version of the current archive
filename is the archive basename to create
extension should still be "dar" here too
options are options for isolation like slicing, compression, encryption similar to the archive_options_create class we saw at the beginning of this tutorial

   // for the exercise, we will change some default options:

archive_options_isolate opt;
opt.set_warn_over(false); 
   // by default overwriting is allowed by a warning is issued first
   // here overwriting will take place without warning

opt.set_compression(libdar::compression::gzip);
opt.set_compression_level(9);   // this is the default
opt.set_min_compr_size(10240);  // not trying compressing file smaller than 10 kiB


my_backup.op_isolate("/tmp",
                     "CAT_diff_backup",
                     "dar",
                      opt); // the non default options set above
 
   // have you noted? There is no libdar statistics field returned nor as argument.


12 - Restoring files from an archive

Quite as simple as previously, here we use the archive::op_extract method:

        statistics op_extract(const path &fs_root,
                              const archive_options_extract & options,
                              statistics *progressive_report);


      
fs_root is the directory under which to restore the files and directory
options defines how and what to restore
progressive_report has already been seen several time previously

   // as we still do not have seen masks, we will restore all files contained in the backup
   // such mask would be provided to the
   // archive_options_extract::set_selection() and/or
   // to the archive_options_extract::set_subtree() methods
   // to precisely define what files to restore

archive_options_extract opt;

opt.set_dirty_behavior(false, false); // dirty files are not restored

(void) my_backup.op_extract("/home/me/my_home_copy",
                            opt,
                            nullptr); // we have seen previously how to use statistics
 



13 - Merging archives

Here we will need two archive objects open in read-mode and we will invoke a specific archive constructor passing these two objects as argument, once the constructor will have completed the merging operation will be done:


        archive(const std::shared_ptr<user_interaction> & dialog,
                const path & sauv_path,
                std::shared_ptr<archive> ref_arch1,
                const std::string & filename,
                const std::string & extension,
                const archive_options_merge & options,
                statistics * progressive_report);
      
dialog is will still be set to null pointer for now
sauv_path is the directory where to create the resulting merging archive
ref_arch1 is the first (and mandatory) archive, the second is optional and may be given to the options argument
filename is the resulting archive basename
extension as always should be set to "dar"
options is a set of optional parameters
progressive_report is as seen above the ability to have another thread showing progression info during the operation

  // assuming you have two backups:
  // the first is /tmp/home_backup.*.dar
  // the second is /var/tmp/system_backup.*.dar
  // we will create /tmp/merged.*.dar as result of the merging
  // of these two backups

  // 1 - first things first: opening the first backup

libdar::archive_options_read opt;

opt.set_info_details(true);
opt.set_execute("echo 'about to read slice %p/%b.%N.%e with context %c'");

std::shared_ptr<libdar::archive> home_backup(new libdar::archive(nullptr,   // this is user_interaction we will see further
                                                     
"/tmp",
                                                      "home_backup",
                                                      "dar",
                                                       opt));


  // 2 - opening the second backup

std::shared_ptr<libdar::archive> system_backup(new libdar::archive(nullptr,
                                                       "/var/tmp",
                                                       "system_backup",
                                                       "dar",
                                                       opt);

  // 3 - setting up the options for merging

libdar::archive_options_merge opt_merge;

opt_merge.set_auxiliary_ref(system_backup);
opt_merge.set_slicing(1048576, 0); // all slice would have 1 MiB at most
opt_merge.set_compression(libdar::compression::bzip2);
opt_merge.set_keep_compressed(true);
opt_merge.set_user_comment("archive resulting of the merging of home_backup and system_backup");
opt_merge.set_hash_algo(libdar::hash_algo::sha512); // will generate on-fly hash file for each slice

  // 4 - now performing the merging operation

libdar::archive merged(nullptr,  // still the user_interaction we will see further
                       "/tmp",
                       home_backup,
                       "merged",
                       "dar",
                       opt_merge,
                       nullptr);  // progressive_report we don't use here




14 - Decremental backup

Decremental backup is an operation that from two full backups an old and a recent one creates a backward differential backup corresponding to the old full backup based on the new full backup. In other words, instead of keeping two full backups, you can keep the latest and replace the oldest by its decremental counterpart. This will save you space while letting you restore as if you had the old full backup by restoring first the recent backup then the decremental backup.

Creating a decremental backup is exactly the same as creating a merging backup, you need just to set the archive_options_merge::set_decremental_mode() before proceeding to the merging. To avoid duplication we will just illustrate the last step of the previous operation modified for decremental backup:

  // [...]

libdar::archive_options_merge opt_merge;

  // [...]
opt_merge.set_decremental_mode(true);

  // 4 - now performing the merging operation (here decremental backup)

libdar::archive merged(nullptr,  // still the user_interaction we will see further
                       "/tmp",
                       home_backup,
                       "merged",
                       "dar",
                       opt_merge,
                       nullptr);  // progressive_report we don't use here




15 - Archive repairing

If an archive has been truncated due to lack of disk space and if sequential marks (aka tape marks) had not been disable, it is possible to rebuild sane archive beside this truncated one.

We just need to invoke a specific libdar::archive constructor which form follows:

        archive(const std::shared_ptr<user_interaction> & dialog,
                const path & chem_src,
                const std::string & basename_src,
                const std::string & extension_src,
                const archive_options_read & options_read,
                const path & chem_dst,
                const std::string & basename_dst,
                const std::string & extension_dst,
                const archive_options_repair & options_repair);


You should now be familiarized with the different types and variable uses. As you can note, this constructor takes in charge the work to read the damaged archive, so you won't have to do it first. As always, this constructor will end only once the operation will have completed.


  // assuming the archive /tmp/home_backup.*.dar is damaged
  // and you want to have repaired archive as /tmp/home_backup_repaired.*.dar

libdar::archive repaired(nullptr,   // still the user_interaction we have not yet seen
                         "/tmp"
                         "home_backup",
                         "dar",
                         archive_options_read(),
                         "/tmp",
                         "home_backup_repaired",
                         "dar",
                         archive_options_repair());

  // we have not done fancy things with the two option classes, but we did above
  // enough time for you get all the necessary information from the API reference
  // documentation




16 - Looking at some details

we have covered the different operations the class libdar::archive can be used for, still remains some concepts to details:
  • user_interaction
  • masks
  • how to cleanly interrupt an running libdar routine
  • how to known which compile-time feature has been activated
Then we will see the three other more simple classes :
  • class database
  • class libdar_slave
  • class libdar_xform
For now, maybe you remember that we had to initialize libdar before use, by calling libdar::get_version()? This routine also exists with arguments that will provide as its name suggests the libdar version:

void get_version(U_I & major, U_I & medium, U_I & minor, bool init_libgcrypt = true);

It is advised to use this form to fetch the libdar version major, medium and minor numbers for the following reasons:

you should check that the library you've dynamically linked with is compatible with the features you will be using. The major number must be the same, for no compatibility is assured between two libdar versions of different major numbers. While run-time compatibility is assured between medium numbers, the medium number must be greater or equal to the one used at compilation time to be sure that all the features you want are available in the libdar library you dynamically linked with. Changes between minor versions correspond to bug fixes and is not to imply any API change, thus no constraints is present there (just note the presence of more bugs in lower numbers).

If you use libgcrypt beside libdar in your application you should initialize libgcrypt and not let it be done by libdar the latest argument of this form should be set to false in that case. Note that libgcrypt documentation indicates that libgcrypt must be initialized directly from the application not from an intermediate library.

Follows an example of test that can be performed while initializing libdar:

U_I major, medium, minor;

libdar::get_version(major, medium, minor);

if(maj != libdar::LIBDAR_COMPILE_TIME_MAJOR ||
   med < libdar::LIBDAR_COMPILE_TIME_MEDIUM)
{
    std::cout << "libdar version we link with is too old for this code" << std::endl;
    // throw an exception or anything else appropriate to that condition
}


17 - checking compile-time features activation

once we have called one of the get_version* function it is possible to access the list of features activated at compilation time thanks to a set of function located in the compile_time nested namespace inside libdar:



void my_sample_function()
{
      
bool ea = libdar::compile_time::ea();
bool largefile = libdar::compile_time::largefile();
bool nodump = libdar::compile_time::nodump();
bool special_alloc = libdar::compile_time::special_alloc();
U_I bits = libdar::compile_time::bits();
// bits is equal to zero for infinint,
// else it is equal to 32 or 64 depending on
// the compilation mode used.

bool thread = libdar::compile_time::thread_safe();
bool libz = libdar::compile_time::libz();
bool libbz2 = libdar::compile_time::libbz2();
bool liblzo = libdar::compile_time::liblzo();
bool libxz = libdar::compile_time::libxz();
bool libcrypto = libdar::compile_time::libgcrypt();
bool furtive_read = libdar::compile_time::furtive_read
();

/ // for details see the compile_time namespace in the API reference documentation

}


18 - User Interaction

we have seen std::shared_pointer on class libdar::user_interaction previously but did not used this feature.

18.1 - Defining your own user_interaction class

class libdar::user_interaction defines the way libdar interact with the user during an operation, like an archive creation, restoration, testing and so on. Only four types of interaction are used by libdar:

        void message(const std::string & message);
        void pause(const std::string & message);
        std::string get_string(const std::string & message, bool echo);
        secu_string get_secu_string(const std::string & message, bool echo);


By default an inherited class of libdar::user_interaction called libdar::shell_interaction is used and implements these four type of exchange by mean of text terminal:
  • message() sends the std::string provided by libdar to stdout
  • pause() does the same and ask for the user to press either return or escape to answer yes or no
  • get_string() reads a string from stdin
  • get_secu_string() reads a string into a secu_string object from stdin too
For a GUI you will probably not want stdin and stdout to be used. For that you have the possibility to implement your own inherited class from user_interaction. It should look like the following:


class my_user_interaction: public libdar::user_interaction
{
protected:
      // display of informational message
   virtual void inherited_message(const std::string & message) override;

      // display of a question and returns the answer from user as true/false
   virtual bool inherited_pause(const std::string & message) override;

      // display the message and returns a string from the user, with or without display what the user typed (echo)
   virtual std::string inherited_get_string(const std::string & message, bool echo) override;

      // same as the previous be the user provided string is returned as secu_string
   virtual secu_string inherited_get_secu_string(const std::string & message, bool echo) override;
};


18.2 - Relying on the pre-defined user_interaction_callback class

As an alternative to defining your own inherited class from libdar::user_interaction, libdar provides a class called user_interaction_callback which is an implementation of the user interaction, based on callback functions.

You will need to implement four callback functions:

        using message_callback = void (*)(const std::string &x, void *context);
        using pause_callback = bool (*)(const std::string &x, void *context);
        using get_string_callback = std::string (*)(const std::string &x, bool echo, void *context);
        using get_secu_string_callback = secu_string (*)(const std::string &x, bool echo, void *context);


Then you can create an libdar::user_interaction_callback object using this constructor:

        user_interaction_callback(message_callback x_message_callback,
                                  pause_callback x_answer_callback,
                                  get_string_callback x_string_callback,
                                  get_secu_string_callback x_secu_string_callback,
                                  void *context_value);


Here follows an example of use:

void my_message_cb(const std::string & x, void *context)
{
    cout << x << endl;
}

bool void my_pause_cb(const std::string & x, void *context)
{
   char a;

   cout << x << endl;
   cin >> a;
   return a == 'y';
}

std::string my_string_cb(const std::string & x, bool echo, void *context)
{
  // to be defined
}

libdar::secu_string my_secu_string_cb(const std::string & x, bool echo, void *context)
{
  // to be defined
}

   // eventually using a context_value that will be passed to the callback of the object
void *context_value = (void *)(& some_datastructure);

std::shared_ptr<libdar::user_interaction> my_user_interaction(new libdar::user_interaction_callback(my_message_cb,
                                                                                                    my_pause_cb,
                                                                                                    my_string_cb,
                                                                                                    my_secu_string_cb,
                                                                                                    context_value));


You will also find predefined classes like libdar::user_interaction_blind which always says no in name of the user displays nothing and provide empty strings, as well as libdar::shell_interaction_emulator which given a user_interaction object send to it formatted information as if it was a shell_interaction object, leading one to emulate libdar default behavior under any time of "terminal".

IMPORTANT: all libdar::user_interaction inherited classes provided by libdar are not designed to be manipulated by more than one thread at a time. The use of std::shared_ptr is only here to let the caller not have to manage such object and let libdar release it when no more needed or to let the caller to reuse the same user_interaction object for a subsequent call to libdar which would not be possible if a std::unique_ptr was used.

Now if you design your own user_interaction inherited class and provide them mecanism (mutex, ...) that allow them to be used simultaneously by several thread there is no issue to give pass such one object as argument to different libdar object used by different threads.


19 - Masks

Mask are used to define which string will be considered and which will not. Libdar implements masks as several classes that all inherit from a virtual class that defines the way masks are used. This root class is the class mask and provides the mask::is_covered(const std::string & expression) method which libdar uses to determine which string are considered for an operation which are not.

Strings applied to masks may correspond to filename only, to full path or maybe to other things. That's in the context where the mask is used that the string meaning take place.

 There is several different basic masks classes you can use to build fairly complex masks, while it is possible you should not need to define you own mask classes, if the need arises, please contact libdar developer if you thing an additional class should take place beside the following ones:

class libdar::bool_mask
boolean mask, either always true or false, it matches either all files or no files at all
class libdar::simple_mask
matches as done by the shell on the command lines (see "man 7 glob")
class libdar::regular_mask
matches regular expressions (see "man 7 regex")
class libdar::not_mask
negation of another mask
class libdar::et_mask
makes an *AND* operator between two or more masks
class libdar::ou_mask
makes the *OR* operator between  two or more masks
class lbdar::simple_path_mask

matches if it is subdirectory of mask or is a directory that contains the specified path itself

class libdar::same_path_mask
matches if the string is exactly the given mask (no wild card expression)
class libdar::exclude_dir_mask
matches if string is the given string or a sub directory of it
class libdar::mask_list
matches a list of files defined in a given file

Let's play with some masks :


      // all files will be elected by this mask
  libdar::bool_mask m1(true);   

      // all string that match the glob expression "A*~" will match.
      // the second argument of the constructor tell whether the match is case sensitive so here
      // any file beginning by 'A' or by 'a' and ending by '~' will be selected by this mask:
  libdar::simple_mask m2(std::string("A*~"), false);

      // m3 is the negation if m2. This mask will thus match
      // any string that does not begin by 'A' or 'a' or finishing by '~'
  libdar::not_mask m3(m2);

      // this mask matches any string that is a subdirectory of "/home/joe"
      // and any directory that contains /home/joe, meaning
      // "/", "/home", "/jome/joe" and any subdirectory are matched.
      // here, the second argument is also case sensitivity (so
      //  "/HoMe" will not be selected by this mask as we set it to "true".
  libdar::simple_path_mask m4 = simple_path_mask("/home/joe", true);

      // now let's do some more complex things:
      // m5 will now match only strings that are selected by both m2 AND m4
  libdar::et_mask m5;
  m5.add_mask(m2);
  m5.add_mask(m4);
     
      // we can make more interesting things like this, where m5 will select files
      // that match m2 AND m4 AND m3. But m3 = not m2 so now m5 will never
      // match any file...
  m5.add_mask(m3);

      // but we could do the same with an "ou_mask" and would get a silly
      // counterpart of m1 (a mask that matches any files)
  libdar::ou_mask m6;
  m6.add_mask(m2);
  m6.add_mask(m4);
  m6.add_mask(m3);

      // lastly, the NOT, AND and OR operation can be used recursively.
      // Frankly, it's possible to have masks referring each other!
  libdar::not_mask m7(m6);
  m6.add_mask(m7);



The idea here is not to create object manually, but to link their creation to the action and choices the user makes from the user interface (Graphical User Interface of your application, for example)

Now that you've seen the power of these masks, you should know that in libdar masks are used at several places:
  • A first place is to select files against their names (without path information) this the argument of the set_selection() method of libdar::archive_options_* classes. The mask here does not apply to directories.
  • A second place is to select files against their path+name and it applies here to all type of files including directories, this is the argument of the set_subtree() method of libdar::archive_options_* classes. So with it, you can prune directories, or in any other way restrict the operation to a particular subdirectory, as well as to a particular plain file for example. Important note about this second mask: what your own mask will be compared to by libdar is the absolute path of the file under consideration. If you want to exclude /usr/local/bin from the operation whatever is the fs_root value (which correspond the -R option of dar) using here a libdar::simple_mask("/usr/local/bin") as argument of libdar::archive_options_*::get_subtree() will do the trick.
An exception is the archive testing operation, which has no fs_root argument (because the operation is not relative to an existing filesystem), however the subtree argument exist to receive a mask for comparing the path of file to include or exclude from the testing operation. In this case the situation is as if the fs_root was set to the value "<ROOT>". For example, masks will be compared to "<ROOT>/some/file" when performing an archive test operation.

instead of using explicit string "<ROOT>" you can use libdar::PSEUDO_ROOT const std::string variable
  • A third place concerns Extended Attributes (EA), this is the argument of the set_ea_mask() method of archive_options classes. It is applied to the full EA name in the form <domain>.<name> where <domain> is any string value like but not limited to the usual "user" or "system" domains.
  • A fourth place concerns the file to compress or to avoid compressing. This is the argument of the set_compr_mask() method of libdar::archive_options_* classes. it is works the same as set_selection() seen above, based only to filename without any path consideration.
  • A fifth place concerns files that have need be prepared for backup, this is the argument of the set_backup_hook() method of libdar::archive_option_create class. I has to be used the same as set_subtree(). For more about this feature see the backup-hook feature in dar man page (-<, -> and -= options).


20 - Aborting an Operation

If the POSIX thread support is available, libdar will be built in a thread-safe manner, thus you may have several thread using libdar calls at the same time (but on different objects except concerning the libdar::statistics which can be shared between threads). You may then wish to interrupt a given thread. But aborting a thread form the outside (like sending it a KILL signal) will most of the time let some memory allocated or even worse can lead to dead-lock situation, when the killed thread was inside a critical section and had not got the opportunity to release a mutex. For that reason, libdar proposes a set of calls to abort any processing libdar call which is ran by a given thread.

   // next is the thread ID in which we want to have lidbar call canceled
   // here for simplicity we don't describe the way the ID has been obtained
   // but it could be for example the result of a call to pthread_self() as
   // defined in <pthread.h> system header file
pthread_t thread_id = 161720;
  
   // the most simple call is:
libdar::cancel_thread(thread_id);
   // this will make any libdar call in this thread be canceled immediately

   // but you can use something a bit more interesting:
libdar::cancel_thread(thread_id, false);
   // this second argument is true for immediate cancellation,
   // of false for a delayed cancellation, in which case libdar aborts the operation
   // but produces something usable. For example, if you were backing up something
   // you get a real usable archive which only contains files saved so far, in place
   // of having a broken archive which misses a catalogue at the end. Note that this
   // delayed cancellation needs a bit more time to complete, depending on the
   // size of the archive under process.


As seen above, cancellation can be very simple. What now succeeds when you ask for a cancellation this way? Well, an exception of type Ethread_cancel is thrown. All along his path, memory is released and mutex are freed. Last, the exception appears to the libdar caller. So, you can catch it to define a specific comportment. And if you don't want to use exceptions a special returned code is used.

try
{
   
libdar::archive my_arch(...);
    ...
}
catch(libdar::Ethread_cancel & e)
{
    ... do something specific when thread has been canceled;
}



Some helper routines are available to know the cancellation status for a particular thread or to abort a cancellation process if it has not yet been engaged.

 pthread_t tid;
  
   // how to know if the thread tid is under cancellation process?
if(libdar::cancel_status(tid))
     cout << "thread cancellation is under progress for thread : " << tid << endl;
else
     cout << "no thread cancellation is under progress for thread : " << endl;

   // how to cancel a pending thread cancellation ?
if(libdar::cancel_clear(tid))
    cout << "pending thread cancellation has been reset, thread " << tid << " has not been canceled" << endl;
else
   cout << "too late, could not avoid thread cancellation for thread "<< tid << endl;


Last point, back to the Ethread_cancel exception, this class has two methods you may find useful, when you catch it:

try
{
   ... some libdar calls
}
catch(libdar::Ethread_cancel & e)
{
   if(e.immediate_cancel())
       cout << "cancel_thread() has been called with "true" as second argument" << endl;
   else
      cout << "cancel_thread() has been called with "false" as second argument" << endl;

   U64 flag = e.get_flag();
    ... do something with the flag variable...
}

    // what is this flag stored in this exception?
    // You must consider that the complete definition of cancel_thread() is the following:
    // void cancel_thread(pthread_t tid, bool immediate = true, U_64 flag = 0);
   
// thus, any argument given in third is passed to the thrown Ethread_cancel exception,
    // value which can be retrieved thanks to its get_flag() method. The value given to this
    // flag is not used by libdar itself, it is a facility for user program to have the possibility
    // to include additional information about the thread cancellation.

    // supposing the thread cancellation has been invoked by:
libdar::cancel_thread(thread_id, true, 19);
   // then the flag variable in the catch() statement above would have received
   // the value 19.




21 - Dar_manager API


For more about dar_manager, please read the man page where are described in detail the available features. Note that for dar_manager there is not a "without exception" flavor, your program must be able to handle exceptions, which by the way are the same as the ones described above.

To get dar_manager features you need to use the class database which is defined in the libdar/database.hpp header file so you first need to include that file. Most of the methods of the database class do use options. For the same reason as previously seen for archive manipulation, these options are passed thanks to a container class. These container classes for options used by the database class are defined in the libdar/database_options.hpp file. Let's see the different method of the class database :

Database object construction

Two constructor are available. The first creates a brand-new but empty database in memory

database(const std::shared_ptr<user_interaction> & dialog);

As seen for libdar::archive dialog can be set to a null pointer if the default interaction mode (stdin/stdout/stderr) suits your need.
The second constructor opens an existing database from filesystem and stores its contents into memory ready for further use.

database(const std::shared_ptr<user_interaction> & dialog,
                 const std::string & base,
                 const database_open_options & opt);


  • dialog here to can be set to a null pointer or can point to an user_interaction object of your own
  • base is the path and filename of the database to read
  • opt is an object containing a few options. As seen with libdar::archive we can use an default temporary object to use default option

    database base;  
       // we have created an empty database (no archive in it) called "base"

    database other  = database(nullptr,
                               "/tmp/existing_base.dmd",
                               database_open_options());
      // we have created a database object called "other" which contains
      // (in RAM) all information that were contained in the
      // database file "/tmp/existing_base.dmd"
    

    database_open_option opt;
    opt.set_partial(true);
    opt.set_warn_order(false);
    database other2 = database(nullptr,
                               "/tmp/existing_base.dmd",
                               opt);
      // we have created yet another database object called "other2" which differs
      // from "other" by the option we used. While "other" is a fully loaded
      // database, "other2" is a partial database. This notion is explained
      // below


  • database_open_options::set_partial(bool value) leads dar to only load the database header into memory, which is quicker than loading the full database. But some operation we will see bellow need fully loaded database,  the other can work with both
  • database_open_options::set_partial_read_only(bool value) in addition to have only the header the archive is open in read-only mode which of course forbids any modification to the database but is even faster than just a partial read-write database. For just database listing this is perfectly adapted.
  • database_open_options::set_warn_order(bool value) avoid warning about ordering problem between archive
In the following we will indicate whether a database operation can be applied to a partially loaded database or not. All operation can be applied to a fully loaded databse.

Database's methods

First we will see methods that can work with a partial and read-only database
  • show_contents() : list the archives used to build the database
  • get_options() : list the options that will be passed to dar (as defined with the set_options() method)
  • get_dar_path() : return the path to dar (or empty string if relying on the PATH variable)
Second we will see methods that can work with  partially loaded databases:
  • all methods seen above
  • dump(...) : it is used to write back the database to a file.
  • change_name() : change the basename of the archive which index is given in argument
  • set_path() : change the path to the archive which index is given in argument
  • set_options() : change the default options to always pass to dar when performing restoration
  • set_dar_path() : specify the path to dar (use empty string to rely on the PATH variable)
last, let's see the database methods that can work with completely loaded databases:
  • all methods seen above
  • add_archive() : add an archive to the database
  • remove_archive() : remove an archive from the database
  • set_permutation() : change archive relative order within the database
  • show_files() : list the files which are present in the given archive
  • show_version() : list the archive where the given file is saved
  • show_most_recent_stats() :  compute statistics about the location of most recent file versions
  • restore() : restore a set of given files given in argument.
Well, you might now say that as description this is a bit light for a tutorial, yes. In fact these call are really very simple to use, you can find a complete description in the API reference documentation. This documentation is built if doxygen is available and is put under doc/html after calling make in the source package. It is also available from dar's homepage.



22 - dar_slave API

dar_slave role is to read an archive while interacting with a dar process through a pair of pipes. Dar asks portion of the archive or information about the archive in the first pipe from dar to dar_slave. And dar_slave sends the requested information into the other pipe toward dar.

Since API 6.0.x, dar_slave has an API. It is implemented by the class libdar::libdar_slave. You need firs to create an object using the following constructor:

libdar_slave(std::shared_ptr<user_interaction> & dialog,
             const std::string & folder,
             const std::string & basename,
             const std::string & extension,
             bool input_pipe_is_fd,
             const std::string & input_pipe,
             bool output_pipe_is_fd,
             const std::string & output_pipe,
             const std::string & execute,
             const infinint & min_digits);

  • dialog as seen for other libdar classes can be set to a null pointer for interaction on stdin and stdout
  • folder is the directory where resides the archive to read
  • basename is the basename of the archive
  • extension should always be set to "dar"
  • input_pipe_is_fd if set to true, the next argument is not the path to a named pipe but a number corresponding to a file descriptor open open in read mode
  • input_pipe is the path of a named pipe to read from. It can also be an empty string to use stdin as input pipe
  • out_pipe_is_fd if set to true, the next argument is not the path to a named pipe but a number corresponding to a file descriptor open in write mode
  • output_pipe is the path of a named pipe to write from. It can also be an empty string to use stdout as input pipe
Once the object is created, you will need to call the libdar_slave::run() method which will end when the dar process at the other end will no more need of this slave

libdar::libdar_slave slave(nullptr,
                           "/tmp",
                           "first_backup",
                           "dar",
                           false,
                           "/tmp/toslave", // assuming this is an existing named pipe
                           false,
                           "/tmp/todar",   // assuming this is also an existing named pipe
                           "echo 'reading slice %p/%b.%N.%e in context %c'",
                           0);

slave.run();
 
  // once run() has returned, you can launch it again for another process or the same
  // one as previously has access to the /tmp/first_backup.*.dar archive



23 - dar_xform API

dar_xform creates a copy of a given archive modifying its slicing. it does not require decompressing nor deciphering the archive to do so. There is different constructor depending whether the archive is read from filesystem, from a named pipe of from a provided file descriptor

Reading from a file

libdar_xform(const std::shared_ptr<user_interaction> & ui,
             const std::string & chem,
             const std::string & basename,
             const std::string & extension,
             const infinint & min_digits,
             const std::string & execute);

  • ui as seen for other libdar classes can be set to a null pointer for interaction on stdin and stdout
  • chem is the directory where resides the archive to read
  • basename is the basename of the archive
  • extension should always be set to "dar"
  • min_digits is the minimum number of digits slice number in filename have been created with (use zero if you don't know what it is)
Reading from a named pipe

libdar_xform(const std::shared_ptr<user_interaction> & dialog,
             const std::string & pipename);
  • dialog as seen for other libdar classes, it can be set to nullptr
  • pipename complete path to the named pipe to read the archive from
Reading from a file descriptor

libdar_xform(const std::shared_ptr<user_interaction> & dialog,
             int filedescriptor);
  • dialog same as above
  • filedescriptor is an read opened file descriptor to read the archive from
Once the libdar::libdar_xform object is created it can copy the referred archive to another location in another form thanks to one of the two libdar_xform::xform_to methods. There is not link between the constructor used and the libdar_xform::xform_to flavor used, any combination is possible.

Creating a single or multi-sliced archive on filesystem

void xform_to(const std::string & path,
              const std::string & basename,
              const std::string & extension,
              bool allow_over,
              bool warn_over,
              const infinint & pause,
              const infinint & first_slice_size,
              const infinint & slice_size,
              const std::string & slice_perm,
              const std::string & slice_user,
              const std::string & slice_group,
              hash_algo hash,
              const infinint & min_digits,
              const std::string & execute);

Creating a single sliced archive toward a filedescriptor

void xform_to(int filedescriptor,
              const std::string & execute);

Here follows an example of use. We will convert a possibly multi-sliced archive to a single slice one generating a sha512 hash file on-fly

libdar::libdar_xform transform(nullptr,
                               "/tmp",
                               "my_first_archive",
                               "dar",
                               0,
                               "echo 'reading slice %p/%b.%N.%e context is %c'");

transform.xform_to("/tmp",
                   "my_other_first_archive",
                   "dar",
                   false,     // no overwriting allowed
                   true,      // does not matter whether we warn or not as we do not allow overwriting
                   0,         // no pause between slices
                   0,         // no specific first slice
                   0,         // no slicing at all (previous argument is thus not used anyway)
                   "",        // using default permission for created slices
                   "",        // using default user ownership for created slices
                   "",        // using default group ownership for created slices
                   libdar::hash_algo::sha512, // the hash algo to use (for no hashing use hash_none instead)
                   0,         // min_digits ... not using this feature here
                   "echo 'Slice %p/%b.%N.%e has been written. Context is %c'");





24 - Compilation & Linking

Compilation

All the symbols found in the libdar API except the one relative to dar_manager (see below) are defined from <dar/libdar.h>. So you should only need to include this header. If the header file is not located in a standard directory, in order to compile your code, you may need some extra flags to pass to the compiler (like -I/opt/...). The pkg-config tool can help here to avoid system dependent invocation:


> cat my_prog.cpp

#include <dar/libdar.h>

main()
{
   libdar::get_version(...);
   ...
}

> gcc `pkg-config --cflags libdar` -c my_prog.cpp



Linking


Of course, you need to link your program with libdar. This is done by adding -ldar plus other library libdar can rely on like libz, libbzip2, liblzo or libgcrypt, depending on the feature activated at compilation time. Here too, pkg-config can provide a great help to avoid having system dependent invocation:


> gcc pkg-confg --libs libdar` my_prog.o -o my_prog


Libdar's different flavors


Well, all the compilation and linking steps described above assume you have a "full" libdar library. Beside the full (alias infinint) libdar flavor, libdar also comes in 32 and 64 bits versions. In these last ones, in place of internally relying on a special type (which is a C++ class called infinint) to handle arbitrary large integers, libdar32 relies on 32 bits integers and libdar64 relies on 64 bits integers (there are limitations which are described in doc/LIMITATIONS). But all these libdar version (infinint, 32bits, 64bits) have the same interface and must be used the same way, except for compilation and linking.

These different libdar versions can coexist on the same system, they share the same include files. But the LIBDAR_MODE macro must be set to 32 or 64 when compiling or linking with libdar32 or libdar64 respectively. The LIBDAR_MODE macro defines the way the "class infinint" type is implemented in libdar, and thus changes the way the libdar headers files are interpreted by the compiler. pkg-config --cflags will set the correct LIBDAR_MODE, so you should only bother calling it with either libdar, libdar32 or libdar64 depending on your need : "pkg-confg --cflags libdar64" for example.

> cat my_prog.cpp
#include <dar/libdar.h>

main()
{
   libdar::get_version(...);
   ...
}
> gcc -c `pkg-config --cflags libdar32` my_prog.cpp


> gcc `pkg-config --libs libdar32` my_prog.o -o my_prog


and replace 32 by 64 to link with libdar64.




dar-2.6.8/doc/Doxyfile0000644000175000017520000031221013552573442011504 00000000000000# Doxyfile 1.8.8 # This file describes the settings to be used by the documentation system # doxygen (www.doxygen.org) for a project. # # All text after a double hash (##) is considered a comment and is placed in # front of the TAG it is preceding. # # All text after a single hash (#) is considered a comment and will be ignored. # The format is: # TAG = value [value, ...] # For lists, items can also be appended using: # TAG += value [value, ...] # Values that contain spaces should be placed between quotes (\" \"). #--------------------------------------------------------------------------- # Project related configuration options #--------------------------------------------------------------------------- # This tag specifies the encoding used for all characters in the config file # that follow. The default is UTF-8 which is also the encoding used for all text # before the first occurrence of this tag. Doxygen uses libiconv (or the iconv # built into libc) for the transcoding. See http://www.gnu.org/software/libiconv # for the list of possible encodings. # The default value is: UTF-8. DOXYFILE_ENCODING = UTF-8 # The PROJECT_NAME tag is a single word (or a sequence of words surrounded by # double-quotes, unless you are using Doxywizard) that should identify the # project for which the documentation is generated. This name is used in the # title of most generated pages and in a few other places. # The default value is: My Project. PROJECT_NAME = "Disk ARchive" # The PROJECT_NUMBER tag can be used to enter a project or revision number. This # could be handy for archiving the generated documentation or if some version # control system is used. PROJECT_NUMBER = "##VERSION##" # Using the PROJECT_BRIEF tag one can provide an optional one line description # for a project that appears at the top of each page and should give viewer a # quick idea about the purpose of the project. Keep the description short. PROJECT_BRIEF = "Full featured and portable backup and archiving tool" # With the PROJECT_LOGO tag one can specify an logo or icon that is included in # the documentation. The maximum height of the logo should not exceed 55 pixels # and the maximum width should not exceed 200 pixels. Doxygen will copy the logo # to the output directory. PROJECT_LOGO = doc/dar_doc.jpg # The OUTPUT_DIRECTORY tag is used to specify the (relative or absolute) path # into which the generated documentation will be written. If a relative path is # entered, it will be relative to the location where doxygen was started. If # left blank the current directory will be used. OUTPUT_DIRECTORY = ./doc # If the CREATE_SUBDIRS tag is set to YES, then doxygen will create 4096 sub- # directories (in 2 levels) under the output directory of each output format and # will distribute the generated files over these directories. Enabling this # option can be useful when feeding doxygen a huge amount of source files, where # putting all generated files in the same directory would otherwise causes # performance problems for the file system. # The default value is: NO. CREATE_SUBDIRS = NO # If the ALLOW_UNICODE_NAMES tag is set to YES, doxygen will allow non-ASCII # characters to appear in the names of generated files. If set to NO, non-ASCII # characters will be escaped, for example _xE3_x81_x84 will be used for Unicode # U+3044. # The default value is: NO. ALLOW_UNICODE_NAMES = NO # The OUTPUT_LANGUAGE tag is used to specify the language in which all # documentation generated by doxygen is written. Doxygen will use this # information to generate all constant output in the proper language. # Possible values are: Afrikaans, Arabic, Armenian, Brazilian, Catalan, Chinese, # Chinese-Traditional, Croatian, Czech, Danish, Dutch, English (United States), # Esperanto, Farsi (Persian), Finnish, French, German, Greek, Hungarian, # Indonesian, Italian, Japanese, Japanese-en (Japanese with English messages), # Korean, Korean-en (Korean with English messages), Latvian, Lithuanian, # Macedonian, Norwegian, Persian (Farsi), Polish, Portuguese, Romanian, Russian, # Serbian, Serbian-Cyrillic, Slovak, Slovene, Spanish, Swedish, Turkish, # Ukrainian and Vietnamese. # The default value is: English. OUTPUT_LANGUAGE = English # If the BRIEF_MEMBER_DESC tag is set to YES doxygen will include brief member # descriptions after the members that are listed in the file and class # documentation (similar to Javadoc). Set to NO to disable this. # The default value is: YES. BRIEF_MEMBER_DESC = YES # If the REPEAT_BRIEF tag is set to YES doxygen will prepend the brief # description of a member or function before the detailed description # # Note: If both HIDE_UNDOC_MEMBERS and BRIEF_MEMBER_DESC are set to NO, the # brief descriptions will be completely suppressed. # The default value is: YES. REPEAT_BRIEF = YES # This tag implements a quasi-intelligent brief description abbreviator that is # used to form the text in various listings. Each string in this list, if found # as the leading text of the brief description, will be stripped from the text # and the result, after processing the whole list, is used as the annotated # text. Otherwise, the brief description is used as-is. If left blank, the # following values are used ($name is automatically replaced with the name of # the entity):The $name class, The $name widget, The $name file, is, provides, # specifies, contains, represents, a, an and the. ABBREVIATE_BRIEF = "The $name class" \ "The $name widget" \ "The $name file" \ is \ provides \ specifies \ contains \ represents \ a \ an \ the # If the ALWAYS_DETAILED_SEC and REPEAT_BRIEF tags are both set to YES then # doxygen will generate a detailed section even if there is only a brief # description. # The default value is: NO. ALWAYS_DETAILED_SEC = NO # If the INLINE_INHERITED_MEMB tag is set to YES, doxygen will show all # inherited members of a class in the documentation of that class as if those # members were ordinary class members. Constructors, destructors and assignment # operators of the base classes will not be shown. # The default value is: NO. INLINE_INHERITED_MEMB = NO # If the FULL_PATH_NAMES tag is set to YES doxygen will prepend the full path # before files name in the file list and in the header files. If set to NO the # shortest path that makes the file name unique will be used # The default value is: YES. FULL_PATH_NAMES = NO # The STRIP_FROM_PATH tag can be used to strip a user-defined part of the path. # Stripping is only done if one of the specified strings matches the left-hand # part of the path. The tag can be used to show relative paths in the file list. # If left blank the directory from which doxygen is run is used as the path to # strip. # # Note that you can specify absolute paths here, but also relative paths, which # will be relative from the directory where doxygen is started. # This tag requires that the tag FULL_PATH_NAMES is set to YES. STRIP_FROM_PATH = # The STRIP_FROM_INC_PATH tag can be used to strip a user-defined part of the # path mentioned in the documentation of a class, which tells the reader which # header file to include in order to use a class. If left blank only the name of # the header file containing the class definition is used. Otherwise one should # specify the list of include paths that are normally passed to the compiler # using the -I flag. STRIP_FROM_INC_PATH = # If the SHORT_NAMES tag is set to YES, doxygen will generate much shorter (but # less readable) file names. This can be useful is your file systems doesn't # support long names like on DOS, Mac, or CD-ROM. # The default value is: NO. SHORT_NAMES = NO # If the JAVADOC_AUTOBRIEF tag is set to YES then doxygen will interpret the # first line (until the first dot) of a Javadoc-style comment as the brief # description. If set to NO, the Javadoc-style will behave just like regular Qt- # style comments (thus requiring an explicit @brief command for a brief # description.) # The default value is: NO. JAVADOC_AUTOBRIEF = NO # If the QT_AUTOBRIEF tag is set to YES then doxygen will interpret the first # line (until the first dot) of a Qt-style comment as the brief description. If # set to NO, the Qt-style will behave just like regular Qt-style comments (thus # requiring an explicit \brief command for a brief description.) # The default value is: NO. QT_AUTOBRIEF = NO # The MULTILINE_CPP_IS_BRIEF tag can be set to YES to make doxygen treat a # multi-line C++ special comment block (i.e. a block of //! or /// comments) as # a brief description. This used to be the default behavior. The new default is # to treat a multi-line C++ comment block as a detailed description. Set this # tag to YES if you prefer the old behavior instead. # # Note that setting this tag to YES also means that rational rose comments are # not recognized any more. # The default value is: NO. MULTILINE_CPP_IS_BRIEF = NO # If the INHERIT_DOCS tag is set to YES then an undocumented member inherits the # documentation from any documented member that it re-implements. # The default value is: YES. INHERIT_DOCS = YES # If the SEPARATE_MEMBER_PAGES tag is set to YES, then doxygen will produce a # new page for each member. If set to NO, the documentation of a member will be # part of the file/class/namespace that contains it. # The default value is: NO. SEPARATE_MEMBER_PAGES = NO # The TAB_SIZE tag can be used to set the number of spaces in a tab. Doxygen # uses this value to replace tabs by spaces in code fragments. # Minimum value: 1, maximum value: 16, default value: 4. TAB_SIZE = 4 # This tag can be used to specify a number of aliases that act as commands in # the documentation. An alias has the form: # name=value # For example adding # "sideeffect=@par Side Effects:\n" # will allow you to put the command \sideeffect (or @sideeffect) in the # documentation, which will result in a user-defined paragraph with heading # "Side Effects:". You can put \n's in the value part of an alias to insert # newlines. ALIASES = # This tag can be used to specify a number of word-keyword mappings (TCL only). # A mapping has the form "name=value". For example adding "class=itcl::class" # will allow you to use the command class in the itcl::class meaning. TCL_SUBST = # Set the OPTIMIZE_OUTPUT_FOR_C tag to YES if your project consists of C sources # only. Doxygen will then generate output that is more tailored for C. For # instance, some of the names that are used will be different. The list of all # members will be omitted, etc. # The default value is: NO. OPTIMIZE_OUTPUT_FOR_C = NO # Set the OPTIMIZE_OUTPUT_JAVA tag to YES if your project consists of Java or # Python sources only. Doxygen will then generate output that is more tailored # for that language. For instance, namespaces will be presented as packages, # qualified scopes will look different, etc. # The default value is: NO. OPTIMIZE_OUTPUT_JAVA = NO # Set the OPTIMIZE_FOR_FORTRAN tag to YES if your project consists of Fortran # sources. Doxygen will then generate output that is tailored for Fortran. # The default value is: NO. OPTIMIZE_FOR_FORTRAN = NO # Set the OPTIMIZE_OUTPUT_VHDL tag to YES if your project consists of VHDL # sources. Doxygen will then generate output that is tailored for VHDL. # The default value is: NO. OPTIMIZE_OUTPUT_VHDL = NO # Doxygen selects the parser to use depending on the extension of the files it # parses. With this tag you can assign which parser to use for a given # extension. Doxygen has a built-in mapping, but you can override or extend it # using this tag. The format is ext=language, where ext is a file extension, and # language is one of the parsers supported by doxygen: IDL, Java, Javascript, # C#, C, C++, D, PHP, Objective-C, Python, Fortran (fixed format Fortran: # FortranFixed, free formatted Fortran: FortranFree, unknown formatted Fortran: # Fortran. In the later case the parser tries to guess whether the code is fixed # or free formatted code, this is the default for Fortran type files), VHDL. For # instance to make doxygen treat .inc files as Fortran files (default is PHP), # and .f files as C (default is Fortran), use: inc=Fortran f=C. # # Note For files without extension you can use no_extension as a placeholder. # # Note that for custom extensions you also need to set FILE_PATTERNS otherwise # the files are not read by doxygen. EXTENSION_MAPPING = # If the MARKDOWN_SUPPORT tag is enabled then doxygen pre-processes all comments # according to the Markdown format, which allows for more readable # documentation. See http://daringfireball.net/projects/markdown/ for details. # The output of markdown processing is further processed by doxygen, so you can # mix doxygen, HTML, and XML commands with Markdown formatting. Disable only in # case of backward compatibilities issues. # The default value is: YES. MARKDOWN_SUPPORT = YES # When enabled doxygen tries to link words that correspond to documented # classes, or namespaces to their corresponding documentation. Such a link can # be prevented in individual cases by by putting a % sign in front of the word # or globally by setting AUTOLINK_SUPPORT to NO. # The default value is: YES. AUTOLINK_SUPPORT = YES # If you use STL classes (i.e. std::string, std::vector, etc.) but do not want # to include (a tag file for) the STL sources as input, then you should set this # tag to YES in order to let doxygen match functions declarations and # definitions whose arguments contain STL classes (e.g. func(std::string); # versus func(std::string) {}). This also make the inheritance and collaboration # diagrams that involve STL classes more complete and accurate. # The default value is: NO. BUILTIN_STL_SUPPORT = YES # If you use Microsoft's C++/CLI language, you should set this option to YES to # enable parsing support. # The default value is: NO. CPP_CLI_SUPPORT = NO # Set the SIP_SUPPORT tag to YES if your project consists of sip (see: # http://www.riverbankcomputing.co.uk/software/sip/intro) sources only. Doxygen # will parse them like normal C++ but will assume all classes use public instead # of private inheritance when no explicit protection keyword is present. # The default value is: NO. SIP_SUPPORT = NO # For Microsoft's IDL there are propget and propput attributes to indicate # getter and setter methods for a property. Setting this option to YES will make # doxygen to replace the get and set methods by a property in the documentation. # This will only work if the methods are indeed getting or setting a simple # type. If this is not the case, or you want to show the methods anyway, you # should set this option to NO. # The default value is: YES. IDL_PROPERTY_SUPPORT = YES # If member grouping is used in the documentation and the DISTRIBUTE_GROUP_DOC # tag is set to YES, then doxygen will reuse the documentation of the first # member in the group (if any) for the other members of the group. By default # all members of a group must be documented explicitly. # The default value is: NO. DISTRIBUTE_GROUP_DOC = NO # Set the SUBGROUPING tag to YES to allow class member groups of the same type # (for instance a group of public functions) to be put as a subgroup of that # type (e.g. under the Public Functions section). Set it to NO to prevent # subgrouping. Alternatively, this can be done per class using the # \nosubgrouping command. # The default value is: YES. SUBGROUPING = YES # When the INLINE_GROUPED_CLASSES tag is set to YES, classes, structs and unions # are shown inside the group in which they are included (e.g. using \ingroup) # instead of on a separate page (for HTML and Man pages) or section (for LaTeX # and RTF). # # Note that this feature does not work in combination with # SEPARATE_MEMBER_PAGES. # The default value is: NO. INLINE_GROUPED_CLASSES = NO # When the INLINE_SIMPLE_STRUCTS tag is set to YES, structs, classes, and unions # with only public data fields or simple typedef fields will be shown inline in # the documentation of the scope in which they are defined (i.e. file, # namespace, or group documentation), provided this scope is documented. If set # to NO, structs, classes, and unions are shown on a separate page (for HTML and # Man pages) or section (for LaTeX and RTF). # The default value is: NO. INLINE_SIMPLE_STRUCTS = NO # When TYPEDEF_HIDES_STRUCT tag is enabled, a typedef of a struct, union, or # enum is documented as struct, union, or enum with the name of the typedef. So # typedef struct TypeS {} TypeT, will appear in the documentation as a struct # with name TypeT. When disabled the typedef will appear as a member of a file, # namespace, or class. And the struct will be named TypeS. This can typically be # useful for C code in case the coding convention dictates that all compound # types are typedef'ed and only the typedef is referenced, never the tag name. # The default value is: NO. TYPEDEF_HIDES_STRUCT = NO # The size of the symbol lookup cache can be set using LOOKUP_CACHE_SIZE. This # cache is used to resolve symbols given their name and scope. Since this can be # an expensive process and often the same symbol appears multiple times in the # code, doxygen keeps a cache of pre-resolved symbols. If the cache is too small # doxygen will become slower. If the cache is too large, memory is wasted. The # cache size is given by this formula: 2^(16+LOOKUP_CACHE_SIZE). The valid range # is 0..9, the default is 0, corresponding to a cache size of 2^16=65536 # symbols. At the end of a run doxygen will report the cache usage and suggest # the optimal cache size from a speed point of view. # Minimum value: 0, maximum value: 9, default value: 0. LOOKUP_CACHE_SIZE = 0 #--------------------------------------------------------------------------- # Build related configuration options #--------------------------------------------------------------------------- # If the EXTRACT_ALL tag is set to YES doxygen will assume all entities in # documentation are documented, even if no documentation was available. Private # class members and static file members will be hidden unless the # EXTRACT_PRIVATE respectively EXTRACT_STATIC tags are set to YES. # Note: This will also disable the warnings about undocumented members that are # normally produced when WARNINGS is set to YES. # The default value is: NO. EXTRACT_ALL = NO # If the EXTRACT_PRIVATE tag is set to YES all private members of a class will # be included in the documentation. # The default value is: NO. EXTRACT_PRIVATE = YES # If the EXTRACT_PACKAGE tag is set to YES all members with package or internal # scope will be included in the documentation. # The default value is: NO. EXTRACT_PACKAGE = NO # If the EXTRACT_STATIC tag is set to YES all static members of a file will be # included in the documentation. # The default value is: NO. EXTRACT_STATIC = NO # If the EXTRACT_LOCAL_CLASSES tag is set to YES classes (and structs) defined # locally in source files will be included in the documentation. If set to NO # only classes defined in header files are included. Does not have any effect # for Java sources. # The default value is: YES. EXTRACT_LOCAL_CLASSES = YES # This flag is only useful for Objective-C code. When set to YES local methods, # which are defined in the implementation section but not in the interface are # included in the documentation. If set to NO only methods in the interface are # included. # The default value is: NO. EXTRACT_LOCAL_METHODS = NO # If this flag is set to YES, the members of anonymous namespaces will be # extracted and appear in the documentation as a namespace called # 'anonymous_namespace{file}', where file will be replaced with the base name of # the file that contains the anonymous namespace. By default anonymous namespace # are hidden. # The default value is: NO. EXTRACT_ANON_NSPACES = NO # If the HIDE_UNDOC_MEMBERS tag is set to YES, doxygen will hide all # undocumented members inside documented classes or files. If set to NO these # members will be included in the various overviews, but no documentation # section is generated. This option has no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_MEMBERS = NO # If the HIDE_UNDOC_CLASSES tag is set to YES, doxygen will hide all # undocumented classes that are normally visible in the class hierarchy. If set # to NO these classes will be included in the various overviews. This option has # no effect if EXTRACT_ALL is enabled. # The default value is: NO. HIDE_UNDOC_CLASSES = YES # If the HIDE_FRIEND_COMPOUNDS tag is set to YES, doxygen will hide all friend # (class|struct|union) declarations. If set to NO these declarations will be # included in the documentation. # The default value is: NO. HIDE_FRIEND_COMPOUNDS = NO # If the HIDE_IN_BODY_DOCS tag is set to YES, doxygen will hide any # documentation blocks found inside the body of a function. If set to NO these # blocks will be appended to the function's detailed documentation block. # The default value is: NO. HIDE_IN_BODY_DOCS = NO # The INTERNAL_DOCS tag determines if documentation that is typed after a # \internal command is included. If the tag is set to NO then the documentation # will be excluded. Set it to YES to include the internal documentation. # The default value is: NO. INTERNAL_DOCS = YES # If the CASE_SENSE_NAMES tag is set to NO then doxygen will only generate file # names in lower-case letters. If set to YES upper-case letters are also # allowed. This is useful if you have classes or files whose names only differ # in case and if your file system supports case sensitive file names. Windows # and Mac users are advised to set this option to NO. # The default value is: system dependent. CASE_SENSE_NAMES = YES # If the HIDE_SCOPE_NAMES tag is set to NO then doxygen will show members with # their full class and namespace scopes in the documentation. If set to YES the # scope will be hidden. # The default value is: NO. HIDE_SCOPE_NAMES = NO # If the SHOW_INCLUDE_FILES tag is set to YES then doxygen will put a list of # the files that are included by a file in the documentation of that file. # The default value is: YES. SHOW_INCLUDE_FILES = YES # If the SHOW_GROUPED_MEMB_INC tag is set to YES then Doxygen will add for each # grouped member an include statement to the documentation, telling the reader # which file to include in order to use the member. # The default value is: NO. SHOW_GROUPED_MEMB_INC = NO # If the FORCE_LOCAL_INCLUDES tag is set to YES then doxygen will list include # files with double quotes in the documentation rather than with sharp brackets. # The default value is: NO. FORCE_LOCAL_INCLUDES = NO # If the INLINE_INFO tag is set to YES then a tag [inline] is inserted in the # documentation for inline members. # The default value is: YES. INLINE_INFO = YES # If the SORT_MEMBER_DOCS tag is set to YES then doxygen will sort the # (detailed) documentation of file and class members alphabetically by member # name. If set to NO the members will appear in declaration order. # The default value is: YES. SORT_MEMBER_DOCS = YES # If the SORT_BRIEF_DOCS tag is set to YES then doxygen will sort the brief # descriptions of file, namespace and class members alphabetically by member # name. If set to NO the members will appear in declaration order. Note that # this will also influence the order of the classes in the class list. # The default value is: NO. SORT_BRIEF_DOCS = NO # If the SORT_MEMBERS_CTORS_1ST tag is set to YES then doxygen will sort the # (brief and detailed) documentation of class members so that constructors and # destructors are listed first. If set to NO the constructors will appear in the # respective orders defined by SORT_BRIEF_DOCS and SORT_MEMBER_DOCS. # Note: If SORT_BRIEF_DOCS is set to NO this option is ignored for sorting brief # member documentation. # Note: If SORT_MEMBER_DOCS is set to NO this option is ignored for sorting # detailed member documentation. # The default value is: NO. SORT_MEMBERS_CTORS_1ST = NO # If the SORT_GROUP_NAMES tag is set to YES then doxygen will sort the hierarchy # of group names into alphabetical order. If set to NO the group names will # appear in their defined order. # The default value is: NO. SORT_GROUP_NAMES = NO # If the SORT_BY_SCOPE_NAME tag is set to YES, the class list will be sorted by # fully-qualified names, including namespaces. If set to NO, the class list will # be sorted only by class name, not including the namespace part. # Note: This option is not very useful if HIDE_SCOPE_NAMES is set to YES. # Note: This option applies only to the class list, not to the alphabetical # list. # The default value is: NO. SORT_BY_SCOPE_NAME = NO # If the STRICT_PROTO_MATCHING option is enabled and doxygen fails to do proper # type resolution of all parameters of a function it will reject a match between # the prototype and the implementation of a member function even if there is # only one candidate or it is obvious which candidate to choose by doing a # simple string match. By disabling STRICT_PROTO_MATCHING doxygen will still # accept a match between prototype and implementation in such cases. # The default value is: NO. STRICT_PROTO_MATCHING = NO # The GENERATE_TODOLIST tag can be used to enable ( YES) or disable ( NO) the # todo list. This list is created by putting \todo commands in the # documentation. # The default value is: YES. GENERATE_TODOLIST = YES # The GENERATE_TESTLIST tag can be used to enable ( YES) or disable ( NO) the # test list. This list is created by putting \test commands in the # documentation. # The default value is: YES. GENERATE_TESTLIST = YES # The GENERATE_BUGLIST tag can be used to enable ( YES) or disable ( NO) the bug # list. This list is created by putting \bug commands in the documentation. # The default value is: YES. GENERATE_BUGLIST = YES # The GENERATE_DEPRECATEDLIST tag can be used to enable ( YES) or disable ( NO) # the deprecated list. This list is created by putting \deprecated commands in # the documentation. # The default value is: YES. GENERATE_DEPRECATEDLIST= YES # The ENABLED_SECTIONS tag can be used to enable conditional documentation # sections, marked by \if ... \endif and \cond # ... \endcond blocks. ENABLED_SECTIONS = # The MAX_INITIALIZER_LINES tag determines the maximum number of lines that the # initial value of a variable or macro / define can have for it to appear in the # documentation. If the initializer consists of more lines than specified here # it will be hidden. Use a value of 0 to hide initializers completely. The # appearance of the value of individual variables and macros / defines can be # controlled using \showinitializer or \hideinitializer command in the # documentation regardless of this setting. # Minimum value: 0, maximum value: 10000, default value: 30. MAX_INITIALIZER_LINES = 30 # Set the SHOW_USED_FILES tag to NO to disable the list of files generated at # the bottom of the documentation of classes and structs. If set to YES the list # will mention the files that were used to generate the documentation. # The default value is: YES. SHOW_USED_FILES = YES # Set the SHOW_FILES tag to NO to disable the generation of the Files page. This # will remove the Files entry from the Quick Index and from the Folder Tree View # (if specified). # The default value is: YES. SHOW_FILES = YES # Set the SHOW_NAMESPACES tag to NO to disable the generation of the Namespaces # page. This will remove the Namespaces entry from the Quick Index and from the # Folder Tree View (if specified). # The default value is: YES. SHOW_NAMESPACES = YES # The FILE_VERSION_FILTER tag can be used to specify a program or script that # doxygen should invoke to get the current version for each file (typically from # the version control system). Doxygen will invoke the program by executing (via # popen()) the command command input-file, where command is the value of the # FILE_VERSION_FILTER tag, and input-file is the name of an input file provided # by doxygen. Whatever the program writes to standard output is used as the file # version. For an example see the documentation. FILE_VERSION_FILTER = # The LAYOUT_FILE tag can be used to specify a layout file which will be parsed # by doxygen. The layout file controls the global structure of the generated # output files in an output format independent way. To create the layout file # that represents doxygen's defaults, run doxygen with the -l option. You can # optionally specify a file name after the option, if omitted DoxygenLayout.xml # will be used as the name of the layout file. # # Note that if you run doxygen from a directory containing a file called # DoxygenLayout.xml, doxygen will parse it automatically even if the LAYOUT_FILE # tag is left empty. LAYOUT_FILE = # The CITE_BIB_FILES tag can be used to specify one or more bib files containing # the reference definitions. This must be a list of .bib files. The .bib # extension is automatically appended if omitted. This requires the bibtex tool # to be installed. See also http://en.wikipedia.org/wiki/BibTeX for more info. # For LaTeX the style of the bibliography can be controlled using # LATEX_BIB_STYLE. To use this feature you need bibtex and perl available in the # search path. See also \cite for info how to create references. CITE_BIB_FILES = #--------------------------------------------------------------------------- # Configuration options related to warning and progress messages #--------------------------------------------------------------------------- # The QUIET tag can be used to turn on/off the messages that are generated to # standard output by doxygen. If QUIET is set to YES this implies that the # messages are off. # The default value is: NO. QUIET = NO # The WARNINGS tag can be used to turn on/off the warning messages that are # generated to standard error ( stderr) by doxygen. If WARNINGS is set to YES # this implies that the warnings are on. # # Tip: Turn warnings on while writing the documentation. # The default value is: YES. WARNINGS = YES # If the WARN_IF_UNDOCUMENTED tag is set to YES, then doxygen will generate # warnings for undocumented members. If EXTRACT_ALL is set to YES then this flag # will automatically be disabled. # The default value is: YES. WARN_IF_UNDOCUMENTED = NO # If the WARN_IF_DOC_ERROR tag is set to YES, doxygen will generate warnings for # potential errors in the documentation, such as not documenting some parameters # in a documented function, or documenting parameters that don't exist or using # markup commands wrongly. # The default value is: YES. WARN_IF_DOC_ERROR = YES # This WARN_NO_PARAMDOC option can be enabled to get warnings for functions that # are documented, but have no documentation for their parameters or return # value. If set to NO doxygen will only warn about wrong or incomplete parameter # documentation, but not about the absence of documentation. # The default value is: NO. WARN_NO_PARAMDOC = NO # The WARN_FORMAT tag determines the format of the warning messages that doxygen # can produce. The string should contain the $file, $line, and $text tags, which # will be replaced by the file and line number from which the warning originated # and the warning text. Optionally the format may contain $version, which will # be replaced by the version of the file (if it could be obtained via # FILE_VERSION_FILTER) # The default value is: $file:$line: $text. WARN_FORMAT = "$file:$line: $text" # The WARN_LOGFILE tag can be used to specify a file to which warning and error # messages should be written. If left blank the output is written to standard # error (stderr). WARN_LOGFILE = #--------------------------------------------------------------------------- # Configuration options related to the input files #--------------------------------------------------------------------------- # The INPUT tag is used to specify the files and/or directories that contain # documented source files. You may enter file names like myfile.cpp or # directories like /usr/src/myproject. Separate the files or directories with # spaces. # Note: If this tag is empty the current directory is searched. INPUT = # This tag can be used to specify the character encoding of the source files # that doxygen parses. Internally doxygen uses the UTF-8 encoding. Doxygen uses # libiconv (or the iconv built into libc) for the transcoding. See the libiconv # documentation (see: http://www.gnu.org/software/libiconv) for the list of # possible encodings. # The default value is: UTF-8. INPUT_ENCODING = UTF-8 # If the value of the INPUT tag contains directories, you can use the # FILE_PATTERNS tag to specify one or more wildcard patterns (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank the # following patterns are tested:*.c, *.cc, *.cxx, *.cpp, *.c++, *.java, *.ii, # *.ixx, *.ipp, *.i++, *.inl, *.idl, *.ddl, *.odl, *.h, *.hh, *.hxx, *.hpp, # *.h++, *.cs, *.d, *.php, *.php4, *.php5, *.phtml, *.inc, *.m, *.markdown, # *.md, *.mm, *.dox, *.py, *.f90, *.f, *.for, *.tcl, *.vhd, *.vhdl, *.ucf, # *.qsf, *.as and *.js. FILE_PATTERNS = *.h \ *.hpp # The RECURSIVE tag can be used to specify whether or not subdirectories should # be searched for input files as well. # The default value is: NO. RECURSIVE = YES # The EXCLUDE tag can be used to specify files and/or directories that should be # excluded from the INPUT source files. This way you can easily exclude a # subdirectory from a directory tree whose root is specified with the INPUT tag. # # Note that relative paths are relative to the directory from which doxygen is # run. EXCLUDE = doc \ intl \ m4 \ man \ misc \ po \ src/testing \ src/check \ config\.h \ gettext.h \ my_config\.h # The EXCLUDE_SYMLINKS tag can be used to select whether or not files or # directories that are symbolic links (a Unix file system feature) are excluded # from the input. # The default value is: NO. EXCLUDE_SYMLINKS = NO # If the value of the INPUT tag contains directories, you can use the # EXCLUDE_PATTERNS tag to specify one or more wildcard patterns to exclude # certain files from those directories. # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories for example use the pattern */test/* EXCLUDE_PATTERNS = # The EXCLUDE_SYMBOLS tag can be used to specify one or more symbol names # (namespaces, classes, functions, etc.) that should be excluded from the # output. The symbol name can be a fully qualified name, a word, or if the # wildcard * is used, a substring. Examples: ANamespace, AClass, # AClass::ANamespace, ANamespace::*Test # # Note that the wildcards are matched against the file with absolute path, so to # exclude all test directories use the pattern */test/* EXCLUDE_SYMBOLS = # The EXAMPLE_PATH tag can be used to specify one or more files or directories # that contain example code fragments that are included (see the \include # command). EXAMPLE_PATH = # If the value of the EXAMPLE_PATH tag contains directories, you can use the # EXAMPLE_PATTERNS tag to specify one or more wildcard pattern (like *.cpp and # *.h) to filter out the source-files in the directories. If left blank all # files are included. EXAMPLE_PATTERNS = * # If the EXAMPLE_RECURSIVE tag is set to YES then subdirectories will be # searched for input files to be used with the \include or \dontinclude commands # irrespective of the value of the RECURSIVE tag. # The default value is: NO. EXAMPLE_RECURSIVE = NO # The IMAGE_PATH tag can be used to specify one or more files or directories # that contain images that are to be included in the documentation (see the # \image command). IMAGE_PATH = doc/dar_s_doc.jpg # The INPUT_FILTER tag can be used to specify a program that doxygen should # invoke to filter for each input file. Doxygen will invoke the filter program # by executing (via popen()) the command: # # # # where is the value of the INPUT_FILTER tag, and is the # name of an input file. Doxygen will then use the output that the filter # program writes to standard output. If FILTER_PATTERNS is specified, this tag # will be ignored. # # Note that the filter must not add or remove lines; it is applied before the # code is scanned, but not when the output code is generated. If lines are added # or removed, the anchors will not be placed correctly. INPUT_FILTER = # The FILTER_PATTERNS tag can be used to specify filters on a per file pattern # basis. Doxygen will compare the file name with each pattern and apply the # filter if there is a match. The filters are a list of the form: pattern=filter # (like *.cpp=my_cpp_filter). See INPUT_FILTER for further information on how # filters are used. If the FILTER_PATTERNS tag is empty or if none of the # patterns match the file name, INPUT_FILTER is applied. FILTER_PATTERNS = # If the FILTER_SOURCE_FILES tag is set to YES, the input filter (if set using # INPUT_FILTER ) will also be used to filter the input files that are used for # producing the source files to browse (i.e. when SOURCE_BROWSER is set to YES). # The default value is: NO. FILTER_SOURCE_FILES = NO # The FILTER_SOURCE_PATTERNS tag can be used to specify source filters per file # pattern. A pattern will override the setting for FILTER_PATTERN (if any) and # it is also possible to disable source filtering for a specific pattern using # *.ext= (so without naming a filter). # This tag requires that the tag FILTER_SOURCE_FILES is set to YES. FILTER_SOURCE_PATTERNS = # If the USE_MDFILE_AS_MAINPAGE tag refers to the name of a markdown file that # is part of the input, its contents will be placed on the main page # (index.html). This can be useful if you have a project on for instance GitHub # and want to reuse the introduction page also for the doxygen output. USE_MDFILE_AS_MAINPAGE = #--------------------------------------------------------------------------- # Configuration options related to source browsing #--------------------------------------------------------------------------- # If the SOURCE_BROWSER tag is set to YES then a list of source files will be # generated. Documented entities will be cross-referenced with these sources. # # Note: To get rid of all source code in the generated output, make sure that # also VERBATIM_HEADERS is set to NO. # The default value is: NO. SOURCE_BROWSER = YES # Setting the INLINE_SOURCES tag to YES will include the body of functions, # classes and enums directly into the documentation. # The default value is: NO. INLINE_SOURCES = NO # Setting the STRIP_CODE_COMMENTS tag to YES will instruct doxygen to hide any # special comment blocks from generated source code fragments. Normal C, C++ and # Fortran comments will always remain visible. # The default value is: YES. STRIP_CODE_COMMENTS = YES # If the REFERENCED_BY_RELATION tag is set to YES then for each documented # function all documented functions referencing it will be listed. # The default value is: NO. REFERENCED_BY_RELATION = YES # If the REFERENCES_RELATION tag is set to YES then for each documented function # all documented entities called/used by that function will be listed. # The default value is: NO. REFERENCES_RELATION = YES # If the REFERENCES_LINK_SOURCE tag is set to YES and SOURCE_BROWSER tag is set # to YES, then the hyperlinks from functions in REFERENCES_RELATION and # REFERENCED_BY_RELATION lists will link to the source code. Otherwise they will # link to the documentation. # The default value is: YES. REFERENCES_LINK_SOURCE = YES # If SOURCE_TOOLTIPS is enabled (the default) then hovering a hyperlink in the # source code will show a tooltip with additional information such as prototype, # brief description and links to the definition and documentation. Since this # will make the HTML file larger and loading of large files a bit slower, you # can opt to disable this feature. # The default value is: YES. # This tag requires that the tag SOURCE_BROWSER is set to YES. SOURCE_TOOLTIPS = YES # If the USE_HTAGS tag is set to YES then the references to source code will # point to the HTML generated by the htags(1) tool instead of doxygen built-in # source browser. The htags tool is part of GNU's global source tagging system # (see http://www.gnu.org/software/global/global.html). You will need version # 4.8.6 or higher. # # To use it do the following: # - Install the latest version of global # - Enable SOURCE_BROWSER and USE_HTAGS in the config file # - Make sure the INPUT points to the root of the source tree # - Run doxygen as normal # # Doxygen will invoke htags (and that will in turn invoke gtags), so these # tools must be available from the command line (i.e. in the search path). # # The result: instead of the source browser generated by doxygen, the links to # source code will now point to the output of htags. # The default value is: NO. # This tag requires that the tag SOURCE_BROWSER is set to YES. USE_HTAGS = NO # If the VERBATIM_HEADERS tag is set the YES then doxygen will generate a # verbatim copy of the header file for each class for which an include is # specified. Set to NO to disable this. # See also: Section \class. # The default value is: YES. VERBATIM_HEADERS = NO # If the CLANG_ASSISTED_PARSING tag is set to YES, then doxygen will use the # clang parser (see: http://clang.llvm.org/) for more accurate parsing at the # cost of reduced performance. This can be particularly helpful with template # rich C++ code for which doxygen's built-in parser lacks the necessary type # information. # Note: The availability of this option depends on whether or not doxygen was # compiled with the --with-libclang option. # The default value is: NO. CLANG_ASSISTED_PARSING = NO # If clang assisted parsing is enabled you can provide the compiler with command # line options that you would normally use when invoking the compiler. Note that # the include paths will already be set by doxygen for the files and directories # specified with INPUT and INCLUDE_PATH. # This tag requires that the tag CLANG_ASSISTED_PARSING is set to YES. CLANG_OPTIONS = #--------------------------------------------------------------------------- # Configuration options related to the alphabetical class index #--------------------------------------------------------------------------- # If the ALPHABETICAL_INDEX tag is set to YES, an alphabetical index of all # compounds will be generated. Enable this if the project contains a lot of # classes, structs, unions or interfaces. # The default value is: YES. ALPHABETICAL_INDEX = YES # The COLS_IN_ALPHA_INDEX tag can be used to specify the number of columns in # which the alphabetical index list will be split. # Minimum value: 1, maximum value: 20, default value: 5. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. COLS_IN_ALPHA_INDEX = 5 # In case all classes in a project start with a common prefix, all classes will # be put under the same header in the alphabetical index. The IGNORE_PREFIX tag # can be used to specify a prefix (or a list of prefixes) that should be ignored # while generating the index headers. # This tag requires that the tag ALPHABETICAL_INDEX is set to YES. IGNORE_PREFIX = #--------------------------------------------------------------------------- # Configuration options related to the HTML output #--------------------------------------------------------------------------- # If the GENERATE_HTML tag is set to YES doxygen will generate HTML output # The default value is: YES. GENERATE_HTML = YES # The HTML_OUTPUT tag is used to specify where the HTML docs will be put. If a # relative path is entered the value of OUTPUT_DIRECTORY will be put in front of # it. # The default directory is: html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_OUTPUT = html # The HTML_FILE_EXTENSION tag can be used to specify the file extension for each # generated HTML page (for example: .htm, .php, .asp). # The default value is: .html. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FILE_EXTENSION = .html # The HTML_HEADER tag can be used to specify a user-defined HTML header file for # each generated HTML page. If the tag is left blank doxygen will generate a # standard header. # # To get valid HTML the header file that includes any scripts and style sheets # that doxygen needs, which is dependent on the configuration options used (e.g. # the setting GENERATE_TREEVIEW). It is highly recommended to start with a # default header using # doxygen -w html new_header.html new_footer.html new_stylesheet.css # YourConfigFile # and then modify the file new_header.html. See also section "Doxygen usage" # for information on how to generate the default header that doxygen normally # uses. # Note: The header is subject to change so you typically have to regenerate the # default header when upgrading to a newer version of doxygen. For a description # of the possible markers and block names see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_HEADER = # The HTML_FOOTER tag can be used to specify a user-defined HTML footer for each # generated HTML page. If the tag is left blank doxygen will generate a standard # footer. See HTML_HEADER for more information on how to generate a default # footer and what special commands can be used inside the footer. See also # section "Doxygen usage" for information on how to generate the default footer # that doxygen normally uses. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_FOOTER = # The HTML_STYLESHEET tag can be used to specify a user-defined cascading style # sheet that is used by each HTML page. It can be used to fine-tune the look of # the HTML output. If left blank doxygen will generate a default style sheet. # See also section "Doxygen usage" for information on how to generate the style # sheet that doxygen normally uses. # Note: It is recommended to use HTML_EXTRA_STYLESHEET instead of this tag, as # it is more robust and this tag (HTML_STYLESHEET) will in the future become # obsolete. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_STYLESHEET = # The HTML_EXTRA_STYLESHEET tag can be used to specify additional user-defined # cascading style sheets that are included after the standard style sheets # created by doxygen. Using this option one can overrule certain style aspects. # This is preferred over using HTML_STYLESHEET since it does not replace the # standard style sheet and is therefor more robust against future updates. # Doxygen will copy the style sheet files to the output directory. # Note: The order of the extra stylesheet files is of importance (e.g. the last # stylesheet in the list overrules the setting of the previous ones in the # list). For an example see the documentation. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_STYLESHEET = # The HTML_EXTRA_FILES tag can be used to specify one or more extra images or # other source files which should be copied to the HTML output directory. Note # that these files will be copied to the base HTML output directory. Use the # $relpath^ marker in the HTML_HEADER and/or HTML_FOOTER files to load these # files. In the HTML_STYLESHEET file, use the file name only. Also note that the # files will be copied as-is; there are no commands or markers available. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_EXTRA_FILES = # The HTML_COLORSTYLE_HUE tag controls the color of the HTML output. Doxygen # will adjust the colors in the stylesheet and background images according to # this color. Hue is specified as an angle on a colorwheel, see # http://en.wikipedia.org/wiki/Hue for more information. For instance the value # 0 represents red, 60 is yellow, 120 is green, 180 is cyan, 240 is blue, 300 # purple, and 360 is red again. # Minimum value: 0, maximum value: 359, default value: 220. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_HUE = 220 # The HTML_COLORSTYLE_SAT tag controls the purity (or saturation) of the colors # in the HTML output. For a value of 0 the output will use grayscales only. A # value of 255 will produce the most vivid colors. # Minimum value: 0, maximum value: 255, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_SAT = 100 # The HTML_COLORSTYLE_GAMMA tag controls the gamma correction applied to the # luminance component of the colors in the HTML output. Values below 100 # gradually make the output lighter, whereas values above 100 make the output # darker. The value divided by 100 is the actual gamma applied, so 80 represents # a gamma of 0.8, The value 220 represents a gamma of 2.2, and 100 does not # change the gamma. # Minimum value: 40, maximum value: 240, default value: 80. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_COLORSTYLE_GAMMA = 80 # If the HTML_TIMESTAMP tag is set to YES then the footer of each generated HTML # page will contain the date and time when the page was generated. Setting this # to NO can help when comparing the output of multiple runs. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_TIMESTAMP = YES # If the HTML_DYNAMIC_SECTIONS tag is set to YES then the generated HTML # documentation will contain sections that can be hidden and shown after the # page has loaded. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_DYNAMIC_SECTIONS = YES # With HTML_INDEX_NUM_ENTRIES one can control the preferred number of entries # shown in the various tree structured indices initially; the user can expand # and collapse entries dynamically later on. Doxygen will expand the tree to # such a level that at most the specified number of entries are visible (unless # a fully collapsed tree already exceeds this amount). So setting the number of # entries 1 will produce a full collapsed tree by default. 0 is a special value # representing an infinite number of entries and will result in a full expanded # tree by default. # Minimum value: 0, maximum value: 9999, default value: 100. # This tag requires that the tag GENERATE_HTML is set to YES. HTML_INDEX_NUM_ENTRIES = 100 # If the GENERATE_DOCSET tag is set to YES, additional index files will be # generated that can be used as input for Apple's Xcode 3 integrated development # environment (see: http://developer.apple.com/tools/xcode/), introduced with # OSX 10.5 (Leopard). To create a documentation set, doxygen will generate a # Makefile in the HTML output directory. Running make will produce the docset in # that directory and running make install will install the docset in # ~/Library/Developer/Shared/Documentation/DocSets so that Xcode will find it at # startup. See http://developer.apple.com/tools/creatingdocsetswithdoxygen.html # for more information. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_DOCSET = NO # This tag determines the name of the docset feed. A documentation feed provides # an umbrella under which multiple documentation sets from a single provider # (such as a company or product suite) can be grouped. # The default value is: Doxygen generated docs. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_FEEDNAME = "Doxygen generated docs" # This tag specifies a string that should uniquely identify the documentation # set bundle. This should be a reverse domain-name style string, e.g. # com.mycompany.MyDocSet. Doxygen will append .docset to the name. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_BUNDLE_ID = org.doxygen.Project # The DOCSET_PUBLISHER_ID tag specifies a string that should uniquely identify # the documentation publisher. This should be a reverse domain-name style # string, e.g. com.mycompany.MyDocSet.documentation. # The default value is: org.doxygen.Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_ID = org.doxygen.Publisher # The DOCSET_PUBLISHER_NAME tag identifies the documentation publisher. # The default value is: Publisher. # This tag requires that the tag GENERATE_DOCSET is set to YES. DOCSET_PUBLISHER_NAME = Publisher # If the GENERATE_HTMLHELP tag is set to YES then doxygen generates three # additional HTML index files: index.hhp, index.hhc, and index.hhk. The # index.hhp is a project file that can be read by Microsoft's HTML Help Workshop # (see: http://www.microsoft.com/en-us/download/details.aspx?id=21138) on # Windows. # # The HTML Help Workshop contains a compiler that can convert all HTML output # generated by doxygen into a single compiled HTML file (.chm). Compiled HTML # files are now used as the Windows 98 help format, and will replace the old # Windows help format (.hlp) on all Windows platforms in the future. Compressed # HTML files also contain an index, a table of contents, and you can search for # words in the documentation. The HTML workshop also contains a viewer for # compressed HTML files. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_HTMLHELP = NO # The CHM_FILE tag can be used to specify the file name of the resulting .chm # file. You can add a path in front of the file if the result should not be # written to the html output directory. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_FILE = # The HHC_LOCATION tag can be used to specify the location (absolute path # including file name) of the HTML help compiler ( hhc.exe). If non-empty # doxygen will try to run the HTML help compiler on the generated index.hhp. # The file has to be specified with full path. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. HHC_LOCATION = # The GENERATE_CHI flag controls if a separate .chi index file is generated ( # YES) or that it should be included in the master .chm file ( NO). # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. GENERATE_CHI = NO # The CHM_INDEX_ENCODING is used to encode HtmlHelp index ( hhk), content ( hhc) # and project file content. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. CHM_INDEX_ENCODING = # The BINARY_TOC flag controls whether a binary table of contents is generated ( # YES) or a normal table of contents ( NO) in the .chm file. Furthermore it # enables the Previous and Next buttons. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. BINARY_TOC = NO # The TOC_EXPAND flag can be set to YES to add extra items for group members to # the table of contents of the HTML help documentation and to the tree view. # The default value is: NO. # This tag requires that the tag GENERATE_HTMLHELP is set to YES. TOC_EXPAND = NO # If the GENERATE_QHP tag is set to YES and both QHP_NAMESPACE and # QHP_VIRTUAL_FOLDER are set, an additional index file will be generated that # can be used as input for Qt's qhelpgenerator to generate a Qt Compressed Help # (.qch) of the generated HTML documentation. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_QHP = NO # If the QHG_LOCATION tag is specified, the QCH_FILE tag can be used to specify # the file name of the resulting .qch file. The path specified is relative to # the HTML output folder. # This tag requires that the tag GENERATE_QHP is set to YES. QCH_FILE = # The QHP_NAMESPACE tag specifies the namespace to use when generating Qt Help # Project output. For more information please see Qt Help Project / Namespace # (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#namespace). # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_NAMESPACE = # The QHP_VIRTUAL_FOLDER tag specifies the namespace to use when generating Qt # Help Project output. For more information please see Qt Help Project / Virtual # Folders (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#virtual- # folders). # The default value is: doc. # This tag requires that the tag GENERATE_QHP is set to YES. QHP_VIRTUAL_FOLDER = doc # If the QHP_CUST_FILTER_NAME tag is set, it specifies the name of a custom # filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_NAME = # The QHP_CUST_FILTER_ATTRS tag specifies the list of the attributes of the # custom filter to add. For more information please see Qt Help Project / Custom # Filters (see: http://qt-project.org/doc/qt-4.8/qthelpproject.html#custom- # filters). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_CUST_FILTER_ATTRS = # The QHP_SECT_FILTER_ATTRS tag specifies the list of the attributes this # project's filter section matches. Qt Help Project / Filter Attributes (see: # http://qt-project.org/doc/qt-4.8/qthelpproject.html#filter-attributes). # This tag requires that the tag GENERATE_QHP is set to YES. QHP_SECT_FILTER_ATTRS = # The QHG_LOCATION tag can be used to specify the location of Qt's # qhelpgenerator. If non-empty doxygen will try to run qhelpgenerator on the # generated .qhp file. # This tag requires that the tag GENERATE_QHP is set to YES. QHG_LOCATION = # If the GENERATE_ECLIPSEHELP tag is set to YES, additional index files will be # generated, together with the HTML files, they form an Eclipse help plugin. To # install this plugin and make it available under the help contents menu in # Eclipse, the contents of the directory containing the HTML and XML files needs # to be copied into the plugins directory of eclipse. The name of the directory # within the plugins directory should be the same as the ECLIPSE_DOC_ID value. # After copying Eclipse needs to be restarted before the help appears. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_ECLIPSEHELP = NO # A unique identifier for the Eclipse help plugin. When installing the plugin # the directory name containing the HTML and XML files should also have this # name. Each documentation set should have its own identifier. # The default value is: org.doxygen.Project. # This tag requires that the tag GENERATE_ECLIPSEHELP is set to YES. ECLIPSE_DOC_ID = org.doxygen.Project # If you want full control over the layout of the generated HTML pages it might # be necessary to disable the index and replace it with your own. The # DISABLE_INDEX tag can be used to turn on/off the condensed index (tabs) at top # of each HTML page. A value of NO enables the index and the value YES disables # it. Since the tabs in the index contain the same information as the navigation # tree, you can set this option to YES if you also set GENERATE_TREEVIEW to YES. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. DISABLE_INDEX = NO # The GENERATE_TREEVIEW tag is used to specify whether a tree-like index # structure should be generated to display hierarchical information. If the tag # value is set to YES, a side panel will be generated containing a tree-like # index structure (just like the one that is generated for HTML Help). For this # to work a browser that supports JavaScript, DHTML, CSS and frames is required # (i.e. any modern browser). Windows users are probably better off using the # HTML help feature. Via custom stylesheets (see HTML_EXTRA_STYLESHEET) one can # further fine-tune the look of the index. As an example, the default style # sheet generated by doxygen has an example that shows how to put an image at # the root of the tree instead of the PROJECT_NAME. Since the tree basically has # the same information as the tab index, you could consider setting # DISABLE_INDEX to YES when enabling this option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. GENERATE_TREEVIEW = NO # The ENUM_VALUES_PER_LINE tag can be used to set the number of enum values that # doxygen will group on one line in the generated HTML documentation. # # Note that a value of 0 will completely suppress the enum values from appearing # in the overview section. # Minimum value: 0, maximum value: 20, default value: 4. # This tag requires that the tag GENERATE_HTML is set to YES. ENUM_VALUES_PER_LINE = 4 # If the treeview is enabled (see GENERATE_TREEVIEW) then this tag can be used # to set the initial width (in pixels) of the frame in which the tree is shown. # Minimum value: 0, maximum value: 1500, default value: 250. # This tag requires that the tag GENERATE_HTML is set to YES. TREEVIEW_WIDTH = 250 # When the EXT_LINKS_IN_WINDOW option is set to YES doxygen will open links to # external symbols imported via tag files in a separate window. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. EXT_LINKS_IN_WINDOW = NO # Use this tag to change the font size of LaTeX formulas included as images in # the HTML documentation. When you change the font size after a successful # doxygen run you need to manually remove any form_*.png images from the HTML # output directory to force them to be regenerated. # Minimum value: 8, maximum value: 50, default value: 10. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_FONTSIZE = 10 # Use the FORMULA_TRANPARENT tag to determine whether or not the images # generated for formulas are transparent PNGs. Transparent PNGs are not # supported properly for IE 6.0, but are supported on all modern browsers. # # Note that when changing this option you need to delete any form_*.png files in # the HTML output directory before the changes have effect. # The default value is: YES. # This tag requires that the tag GENERATE_HTML is set to YES. FORMULA_TRANSPARENT = YES # Enable the USE_MATHJAX option to render LaTeX formulas using MathJax (see # http://www.mathjax.org) which uses client side Javascript for the rendering # instead of using prerendered bitmaps. Use this if you do not have LaTeX # installed or if you want to formulas look prettier in the HTML output. When # enabled you may also need to install MathJax separately and configure the path # to it using the MATHJAX_RELPATH option. # The default value is: NO. # This tag requires that the tag GENERATE_HTML is set to YES. USE_MATHJAX = NO # When MathJax is enabled you can set the default output format to be used for # the MathJax output. See the MathJax site (see: # http://docs.mathjax.org/en/latest/output.html) for more details. # Possible values are: HTML-CSS (which is slower, but has the best # compatibility), NativeMML (i.e. MathML) and SVG. # The default value is: HTML-CSS. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_FORMAT = HTML-CSS # When MathJax is enabled you need to specify the location relative to the HTML # output directory using the MATHJAX_RELPATH option. The destination directory # should contain the MathJax.js script. For instance, if the mathjax directory # is located at the same level as the HTML output directory, then # MATHJAX_RELPATH should be ../mathjax. The default value points to the MathJax # Content Delivery Network so you can quickly see the result without installing # MathJax. However, it is strongly recommended to install a local copy of # MathJax from http://www.mathjax.org before deployment. # The default value is: http://cdn.mathjax.org/mathjax/latest. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_RELPATH = http://cdn.mathjax.org/mathjax/latest # The MATHJAX_EXTENSIONS tag can be used to specify one or more MathJax # extension names that should be enabled during MathJax rendering. For example # MATHJAX_EXTENSIONS = TeX/AMSmath TeX/AMSsymbols # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_EXTENSIONS = # The MATHJAX_CODEFILE tag can be used to specify a file with javascript pieces # of code that will be used on startup of the MathJax code. See the MathJax site # (see: http://docs.mathjax.org/en/latest/output.html) for more details. For an # example see the documentation. # This tag requires that the tag USE_MATHJAX is set to YES. MATHJAX_CODEFILE = # When the SEARCHENGINE tag is enabled doxygen will generate a search box for # the HTML output. The underlying search engine uses javascript and DHTML and # should work on any modern browser. Note that when using HTML help # (GENERATE_HTMLHELP), Qt help (GENERATE_QHP), or docsets (GENERATE_DOCSET) # there is already a search function so this one should typically be disabled. # For large projects the javascript based search engine can be slow, then # enabling SERVER_BASED_SEARCH may provide a better solution. It is possible to # search using the keyboard; to jump to the search box use + S # (what the is depends on the OS and browser, but it is typically # , /